See <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/116/> ------------------------------------------ [...truncated 7426 lines...] sbin_dir = '\''/usr/sbin'\'' wrapper = '\''<http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/ns-slapd.valgrind'\''> def valgrind_enable(sbin_dir, wrapper=None): '\'''\'''\'' Copy the valgrind ns-slapd wrapper into the /sbin directory (making a backup of the original ns-slapd binary). The script calling valgrind_enable() must be run as the '\''root'\'' user as selinux needs to be disabled for valgrind to work The server instance(s) should be stopped prior to calling this function. Then after calling valgrind_enable(): - Start the server instance(s) with a timeout of 60 (valgrind takes a while to startup) - Run the tests - Stop the server - Get the results file - Run valgrind_check_file(result_file, "pattern", "pattern", ...) - Run valgrind_disable() :param sbin_dir: the location of the ns-slapd binary (e.g. /usr/sbin) :param wrapper: The valgrind wrapper script for ns-slapd (if not set, a default wrapper is used) :raise IOError: If there is a problem setting up the valgrind scripts :raise EnvironmentError: If script is not run as '\''root'\'' '\'''\'''\'' if os.geteuid() != 0: log.error('\''This script must be run as root to use valgrind'\'') raise EnvironmentError if not wrapper: # use the default ns-slapd wrapper wrapper = '\''%s/%s'\'' % (os.path.dirname(os.path.abspath(__file__)), VALGRIND_WRAPPER) nsslapd_orig = '\''%s/ns-slapd'\'' % sbin_dir nsslapd_backup = '\''%s/ns-slapd.original'\'' % sbin_dir if os.path.isfile(nsslapd_backup): # There is a backup which means we never cleaned up from a previous # run(failed test?) if not filecmp.cmp(nsslapd_backup, nsslapd_orig): # Files are different sizes, we assume valgrind is already setup log.info('\''Valgrind is already enabled.'\'') return # Check both nsslapd'\''s exist if not os.path.isfile(wrapper): raise IOError('\''The valgrind wrapper (%s) does not exist. file=%s'\'' % (wrapper, __file__)) if not os.path.isfile(nsslapd_orig): raise IOError('\''The binary (%s) does not exist or is not accessible.'\'' % nsslapd_orig) # Make a backup of the original ns-slapd and copy the wrapper into place try: shutil.copy2(nsslapd_orig, nsslapd_backup) except IOError as e: log.fatal('\''valgrind_enable(): failed to backup ns-slapd, error: %s'\'' % e.strerror) raise IOError('\''failed to backup ns-slapd, error: %s'\'' % e.strerror) # Copy the valgrind wrapper into place try: shutil.copy2(wrapper, nsslapd_orig) except IOError as e: log.fatal('\''valgrind_enable(): failed to copy valgrind wrapper '\'' '\''to ns-slapd, error: %s'\'' % e.strerror) raise IOError('\''failed to copy valgrind wrapper to ns-slapd, error: %s'\'' % > e.strerror) E IOError: failed to copy valgrind wrapper to ns-slapd, error: Text file busy <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/utils.py>:255: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:suites.memory_leaks.range_search_test:Initializing test_range_search... CRITICAL:lib389.utils:valgrind_enable(): failed to copy valgrind wrapper to ns-slapd, error: Text file busy ___________________________ test_multi_suffix_search ___________________________ topology = <suites.paged_results.paged_results_test.TopologyStandalone object at 0x7fd40bfa4410> test_user = None, new_suffixes = None def test_multi_suffix_search(topology, test_user, new_suffixes): """Verify that page result search returns empty cookie if there is no returned entry. :Feature: Simple paged results :Setup: Standalone instance, test user for binding, two suffixes with backends, one is inserted into another, 10 users for the search base within each suffix :Steps: 1. Bind as test user 2. Search through all 20 added users with a simple paged control using page_size = 4 3. Wait some time logs to be updated 3. Check access log :Assert: All users should be found, the access log should contain the pr_cookie for each page request and it should be equal 0, except the last one should be equal -1 """ search_flt = r'\''(uid=test*)'\'' searchreq_attrlist = ['\''dn'\'', '\''sn'\''] page_size = 4 users_num = 20 log.info('\''Clear the access log'\'') topology.standalone.deleteAccessLogs() users_list_1 = add_users(topology, users_num / 2, NEW_SUFFIX_1) users_list_2 = add_users(topology, users_num / 2, NEW_SUFFIX_2) try: log.info('\''Set DM bind'\'') topology.standalone.simple_bind_s(DN_DM, PASSWORD) req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='\'''\'') all_results = paged_search(topology, NEW_SUFFIX_1, [req_ctrl], search_flt, searchreq_attrlist) log.info('\''{} results'\''.format(len(all_results))) assert len(all_results) == users_num log.info('\''Restart the server to flush the logs'\'') topology.standalone.restart(timeout=10) access_log_lines = topology.standalone.ds_access_log.match('\''.*pr_cookie=.*'\'') pr_cookie_list = ([line.rsplit('\''='\'', 1)[-1] for line in access_log_lines]) pr_cookie_list = [int(pr_cookie) for pr_cookie in pr_cookie_list] log.info('\''Assert that last pr_cookie == -1 and others pr_cookie == 0'\'') pr_cookie_zeros = list(pr_cookie == 0 for pr_cookie in pr_cookie_list[0:-1]) assert all(pr_cookie_zeros) > assert pr_cookie_list[-1] == -1 E IndexError: list index out of range <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/paged_results/paged_results_test.py>:1198: IndexError ---------------------------- Captured stderr setup ----------------------------- INFO:suites.paged_results.paged_results_test:Adding suffix:o=test_parent and backend: parent_base INFO:lib389:List backend with suffix=o=test_parent INFO:lib389:Creating a local backend INFO:lib389:List backend cn=parent_base,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=parent_base,cn=ldbm database,cn=plugins,cn=config cn: parent_base nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db/parent_base nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: o=test_parent objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: cn="o=test_parent",cn=mapping tree,cn=config cn: o=test_parent nsslapd-backend: parent_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: cn=o\3Dtest_parent,cn=mapping tree,cn=config cn: o=test_parent nsslapd-backend: parent_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:suites.paged_results.paged_results_test:Adding suffix:ou=child,o=test_parent and backend: child_base INFO:lib389:List backend with suffix=ou=child,o=test_parent INFO:lib389:Creating a local backend INFO:lib389:List backend cn=child_base,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=child_base,cn=ldbm database,cn=plugins,cn=config cn: child_base nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db/child_base nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: ou=child,o=test_parent objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: cn="ou=child,o=test_parent",cn=mapping tree,cn=config cn: ou=child,o=test_parent nsslapd-backend: child_base nsslapd-parent-suffix: o=test_parent nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: cn=ou\3Dchild\2Co\3Dtest_parent,cn=mapping tree,cn=config cn: ou=child,o=test_parent nsslapd-backend: child_base nsslapd-parent-suffix: o=test_parent nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:suites.paged_results.paged_results_test:Adding ACI to allow our test user to search ----------------------------- Captured stderr call ----------------------------- INFO:suites.paged_results.paged_results_test:Clear the access log INFO:suites.paged_results.paged_results_test:Adding 10 users INFO:suites.paged_results.paged_results_test:Adding 10 users INFO:suites.paged_results.paged_results_test:Set DM bind INFO:suites.paged_results.paged_results_test:Running simple paged result search with - search suffix: o=test_parent; filter: (uid=test*); attr list ['\''dn'\'', '\''sn'\'']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl instance at 0x7fd410506200>]. INFO:suites.paged_results.paged_results_test:Getting page 0 INFO:suites.paged_results.paged_results_test:Getting page 1 INFO:suites.paged_results.paged_results_test:Getting page 2 INFO:suites.paged_results.paged_results_test:Getting page 3 INFO:suites.paged_results.paged_results_test:Getting page 4 INFO:suites.paged_results.paged_results_test:Getting page 5 INFO:suites.paged_results.paged_results_test:20 results INFO:suites.paged_results.paged_results_test:Restart the server to flush the logs INFO:suites.paged_results.paged_results_test:Assert that last pr_cookie == -1 and others pr_cookie == 0 INFO:suites.paged_results.paged_results_test:Remove added users INFO:suites.paged_results.paged_results_test:Deleting 10 users INFO:suites.paged_results.paged_results_test:Deleting 10 users ============== 32 failed, 484 passed, 5 error in 9544.66 seconds ===============' + '[' 1 -ne 0 ']' + echo CI Tests 'FAILED!' CI Tests FAILED! + echo ============================= test session starts ============================== platform linux2 -- Python 2.7.12, pytest-2.9.2, py-1.4.31, pluggy-0.3.1 -- /usr/bin/python2 cachedir: .cache rootdir: <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests,> inifile: plugins: sourceorder-0.5, multihost-1.0 collecting ... collected 520 items tickets/ticket1347760_test.py::test_ticket1347760 FAILED tickets/ticket365_test.py::test_ticket365 PASSED tickets/ticket47313_test.py::test_ticket47313_run PASSED tickets/ticket47384_test.py::test_ticket47384 PASSED tickets/ticket47431_test.py::test_ticket47431_0 PASSED tickets/ticket47431_test.py::test_ticket47431_1 FAILED tickets/ticket47431_test.py::test_ticket47431_2 PASSED tickets/ticket47431_test.py::test_ticket47431_3 PASSED tickets/ticket47462_test.py::test_ticket47462 FAILED tickets/ticket47490_test.py::test_ticket47490_init PASSED tickets/ticket47490_test.py::test_ticket47490_one PASSED tickets/ticket47490_test.py::test_ticket47490_two PASSED tickets/ticket47490_test.py::test_ticket47490_three PASSED tickets/ticket47490_test.py::test_ticket47490_four PASSED tickets/ticket47490_test.py::test_ticket47490_five PASSED tickets/ticket47490_test.py::test_ticket47490_six PASSED tickets/ticket47490_test.py::test_ticket47490_seven PASSED tickets/ticket47490_test.py::test_ticket47490_eight PASSED tickets/ticket47490_test.py::test_ticket47490_nine PASSED tickets/ticket47536_test.py::test_ticket47536 FAILED tickets/ticket47553_test.py::test_ticket47553 PASSED tickets/ticket47560_test.py::test_ticket47560 PASSED tickets/ticket47573_test.py::test_ticket47573_init PASSED tickets/ticket47573_test.py::test_ticket47573_one PASSED tickets/ticket47573_test.py::test_ticket47573_two PASSED tickets/ticket47573_test.py::test_ticket47573_three PASSED tickets/ticket47619_test.py::test_ticket47619_init FAILED tickets/ticket47619_test.py::test_ticket47619_create_index PASSED tickets/ticket47619_test.py::test_ticket47619_reindex PASSED tickets /ticket47619_test.py::test_ticket47619_check_indexed_search PASSED tickets/ticket47640_test.py::test_ticket47640 PASSED tickets/ticket47653MMR_test.py::test_ticket47653_init PASSED tickets/ticket47653MMR_test.py::test_ticket47653_add PASSED tickets/ticket47653MMR_test.py::test_ticket47653_modify PASSED tickets/ticket47653_test.py::test_ticket47653_init PASSED tickets/ticket47653_test.py::test_ticket47653_add PASSED tickets/ticket47653_test.py::test_ticket47653_search PASSED tickets/ticket47653_test.py::test_ticket47653_modify PASSED tickets/ticket47653_test.py::test_ticket47653_delete PASSED tickets/ticket47669_test.py::test_ticket47669_init FAILED tickets/ticket47669_test.py::test_ticket47669_changelog_maxage FAILED tickets/ticket47669_test.py::test_ticket47669_changelog_triminterval FAILED tickets/ticket47669_test.py::test_ticket47669_changelog_compactdbinterval FAILED tickets/ticket47669_test.py::test_ticket47669_retrochangelog_maxage FAILED tickets/ticket47676_test.py::test_ticket47676_init PASSED tickets/ticket47676_test.py::test_ticket47676_skip_oc_at PASSED tickets/ticket47676_test.py::test_ticket47676_reject_action PASSED tickets/ticket47714_test.py::test_ticket47714_init PASSED tickets/ticket47714_test.py::test_ticket47714_run_0 PASSED tickets/ticket47714_test.py::test_ticket47714_run_1 PASSED tickets/ticket47721_test.py::test_ticket47721_init PASSED tickets/ticket47721_test.py::test_ticket47721_0 PASSED tickets/ticket47721_test.py::test_ticket47721_1 PASSED tickets/ticket47721_test.py::test_ticket47721_2 PASSED tickets/ticket47721_test.py::test_ticket47721_3 PASSED tickets/ticket47721_test.py::test_ticket47721_4 PASSED tickets/ticket47781_test.py::test_ticket47781 PASSED tickets/ticket47787_test.py::test_ticket47787_init PASSED tickets/ticket47787_test.py::test_ticket47787_2 PASSED tickets/ticket47808_test.py::test_ticket47808_run PASSED tickets/ticket47815_test.py::test_ticket47815 PASSED tickets/ticket47819_test.py::test_ticket47819 PASSED tickets/ticket47823_test.py::test_ticket47823_init FAILED tick ets/ticket47823_test.py::test_ticket47823_one_container_add PASSED tickets/ticket47823_test.py::test_ticket47823_one_container_mod PASSED tickets/ticket47823_test.py::test_ticket47823_one_container_modrdn PASSED tickets/ticket47823_test.py::test_ticket47823_multi_containers_add PASSED tickets/ticket47823_test.py::test_ticket47823_multi_containers_mod PASSED tickets/ticket47823_test.py::test_ticket47823_multi_containers_modrdn PASSED tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_add PASSED tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_mod PASSED tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_modrdn PASSED tickets/ticket47823_test.py::test_ticket47823_invalid_config_1 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_2 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_3 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_4 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_5 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_6 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_7 FAILED tickets/ticket47828_test.py::test_ticket47828_init PASSED tickets/ticket47828_test.py::test_ticket47828_run_0 PASSED tickets/ticket47828_test.py::test_ticket47828_run_1 PASSED tickets/ticket47828_test.py::test_ticket47828_run_2 PASSED tickets/ticket47828_test.py::test_ticket47828_run_3 PASSED tickets/ticket47828_test.py::test_ticket47828_run_4 PASSED tickets/ticket47828_test.py::test_ticket47828_run_5 PASSED tickets/ticket47828_test.py::test_ticket47828_run_6 PASSED tickets/ticket47828_test.py::test_ticket47828_run_7 PASSED tickets/ticket47828_test.py::test_ticket47828_run_8 PASSED tickets/ticket47828_test.py::test_ticket47828_run_9 PASSED tickets/ticket47828_test.py::test_ticket47828_run_10 PASSED tickets/ticket47828_test.py::test_ticket47828_run_11 PASSED tickets/ticket47828_test.py::test_ticket47828_run_12 PASSED tickets/ticket47828_test.py::test_ticket47828_r un_13 PASSED tickets/ticket47828_test.py::test_ticket47828_run_14 PASSED tickets/ticket47828_test.py::test_ticket47828_run_15 PASSED tickets/ticket47828_test.py::test_ticket47828_run_16 PASSED tickets/ticket47828_test.py::test_ticket47828_run_17 PASSED tickets/ticket47828_test.py::test_ticket47828_run_18 PASSED tickets/ticket47828_test.py::test_ticket47828_run_19 PASSED tickets/ticket47828_test.py::test_ticket47828_run_20 PASSED tickets/ticket47828_test.py::test_ticket47828_run_21 PASSED tickets/ticket47828_test.py::test_ticket47828_run_22 PASSED tickets/ticket47828_test.py::test_ticket47828_run_23 PASSED tickets/ticket47828_test.py::test_ticket47828_run_24 PASSED tickets/ticket47828_test.py::test_ticket47828_run_25 PASSED tickets/ticket47828_test.py::test_ticket47828_run_26 PASSED tickets/ticket47828_test.py::test_ticket47828_run_27 PASSED tickets/ticket47828_test.py::test_ticket47828_run_28 PASSED tickets/ticket47828_test.py::test_ticket47828_run_29 PASSED tickets/ticket47828_test.py::test_ticket47828_run_30 PASSED tickets/ticket47828_test.py::test_ticket47828_run_31 PASSED tickets/ticket47829_test.py::test_ticket47829_init PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_2 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_3 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_2 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_3 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_out_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_out_user_2 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_out_user_3 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_active_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_stage_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_out_user_1 PASSED tick ets/ticket47829_test.py::test_ticket47829_mod_modrdn_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_active_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_1 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_2 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_3 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_4 PASSED tickets/ticket47833_test.py::test_ticket47829_init PASSED tickets/ticket47833_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 PASSED tickets/ticket47869MMR_test.py::test_ticket47869_init PASSED tickets/ticket47869MMR_test.py::test_ticket47869_check PASSED tickets/ticket47871_test.py::test_ticket47871_init FAILED tickets/ticket47871_test.py::test_ticket47871_1 PASSED tickets/ticket47871_test.py::test_ticket47871_2 PASSED tickets/ticket47900_test.py::test_ticket47900 PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_positive PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_negative PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_invalid PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_noaccesslogs PASSED tickets/ticket47920_test.py::test_ticket47920_init PASSED tickets/ticket47920_test.py::test_ticket47920_mod_readentry_ctrl PASSED tickets/ticket47921_test.py::test_ticket47921 PASSED tickets/ticket47927_test.py::test_ticket47927_init PASSED tickets/ticket47927_test.py::test_ticket47927_one PASSED tickets/ticket47927_test.py::test_ticket47927_two PASSED tickets/ticket47927_test.py::test_ticket47927_three PASSED tickets/ticket47927_test.py::test_ticket47927_four PASSED tickets/ticket47927_test.py::test_ticket47927_five PASSED tickets/ticket47927_test.py::test_ticket47927_six PASSED tickets/ticket47931_test.py::test_ticket47931 PASSED tickets/ticket47937_test.py::test_ticket47937 PASSED ticke ts/ticket47950_test.py::test_ticket47950 PASSED tickets/ticket47953_test.py::test_ticket47953 PASSED tickets/ticket47963_test.py::test_ticket47963 PASSED tickets/ticket47966_test.py::test_ticket47966 PASSED tickets/ticket47970_test.py::test_ticket47970 PASSED tickets/ticket47973_test.py::test_ticket47973 PASSED tickets/ticket47976_test.py::test_ticket47976_init PASSED tickets/ticket47976_test.py::test_ticket47976_1 PASSED tickets/ticket47976_test.py::test_ticket47976_2 PASSED tickets/ticket47976_test.py::test_ticket47976_3 PASSED tickets/ticket47980_test.py::test_ticket47980 PASSED tickets/ticket47981_test.py::test_ticket47981 PASSED tickets/ticket47988_test.py::test_ticket47988_init PASSED tickets/ticket47988_test.py::test_ticket47988_1 PASSED tickets/ticket47988_test.py::test_ticket47988_2 PASSED tickets/ticket47988_test.py::test_ticket47988_3 PASSED tickets/ticket47988_test.py::test_ticket47988_4 PASSED tickets/ticket47988_test.py::test_ticket47988_5 PASSED tickets/ticket47988_test.py::test_ticket47988_6 PASSED tickets/ticket48005_test.py::test_ticket48005_setup PASSED tickets/ticket48005_test.py::test_ticket48005_memberof PASSED tickets/ticket48005_test.py::test_ticket48005_automember PASSED tickets/ticket48005_test.py::test_ticket48005_syntaxvalidate PASSED tickets/ticket48005_test.py::test_ticket48005_usn PASSED tickets/ticket48005_test.py::test_ticket48005_schemareload PASSED tickets/ticket48013_test.py::test_ticket48013 PASSED tickets/ticket48026_test.py::test_ticket48026 PASSED tickets/ticket48109_test.py::test_ticket48109 FAILED tickets/ticket48170_test.py::test_ticket48170 PASSED tickets/ticket48194_test.py::test_init PASSED tickets/ticket48194_test.py::test_run_0 PASSED tickets/ticket48194_test.py::test_run_1 PASSED tickets/ticket48194_test.py::test_run_2 PASSED tickets/ticket48194_test.py::test_run_3 PASSED tickets/ticket48194_test.py::test_run_4 PASSED tickets/ticket48194_test.py::test_run_5 PASSED tickets/ticket48194_test.py::test_run_6 PASSED tickets/ticket48194_test.py::test_run_7 PASSED tickets/ ticket48194_test.py::test_run_8 PASSED tickets/ticket48194_test.py::test_run_9 PASSED tickets/ticket48194_test.py::test_run_10 PASSED tickets/ticket48194_test.py::test_run_11 PASSED tickets/ticket48212_test.py::test_ticket48212 PASSED tickets/ticket48214_test.py::test_ticket48214_run PASSED tickets/ticket48226_test.py::test_ticket48226_set_purgedelay PASSED tickets/ticket48226_test.py::test_ticket48226_1 PASSED tickets/ticket48228_test.py::test_ticket48228_test_global_policy PASSED tickets/ticket48228_test.py::test_ticket48228_test_subtree_policy PASSED tickets/ticket48233_test.py::test_ticket48233 PASSED tickets/ticket48234_test.py::test_ticket48234 PASSED tickets/ticket48252_test.py::test_ticket48252_setup PASSED tickets/ticket48252_test.py::test_ticket48252_run_0 PASSED tickets/ticket48252_test.py::test_ticket48252_run_1 PASSED tickets/ticket48265_test.py::test_ticket48265_test PASSED tickets/ticket48266_test.py::test_ticket48266_fractional ERROR tickets/ticket48266_test.py::test_ticket48266_check_repl_desc ERROR tickets/ticket48266_test.py::test_ticket48266_count_csn_evaluation ERROR tickets/ticket48270_test.py::test_ticket48270_init PASSED tickets/ticket48270_test.py::test_ticket48270_homeDirectory_indexed_cis FAILED tickets/ticket48270_test.py::test_ticket48270_homeDirectory_mixed_value PASSED tickets/ticket48270_test.py::test_ticket48270_extensible_search PASSED tickets/ticket48272_test.py::test_ticket48272 PASSED tickets/ticket48294_test.py::test_48294_init PASSED tickets/ticket48294_test.py::test_48294_run_0 PASSED tickets/ticket48294_test.py::test_48294_run_1 PASSED tickets/ticket48294_test.py::test_48294_run_2 PASSED tickets/ticket48295_test.py::test_48295_init PASSED tickets/ticket48295_test.py::test_48295_run PASSED tickets/ticket48312_test.py::test_ticket48312 PASSED tickets/ticket48325_test.py::test_ticket48325 PASSED tickets/ticket48342_test.py::test_ticket4026 ERROR tickets/ticket48354_test.py::test_ticket48354 PASSED tickets/ticket48362_test.py::test_ticket48362 PASSED tickets/ticket48366_test.p y::test_ticket48366_init PASSED tickets/ticket48366_test.py::test_ticket48366_search_user PASSED tickets/ticket48366_test.py::test_ticket48366_search_dm PASSED tickets/ticket48370_test.py::test_ticket48370 PASSED tickets/ticket48383_test.py::test_ticket48383 FAILED tickets/ticket48497_test.py::test_ticket48497_init PASSED tickets/ticket48497_test.py::test_ticket48497_homeDirectory_mixed_value PASSED tickets/ticket48497_test.py::test_ticket48497_extensible_search PASSED tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_cfg PASSED tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_run FAILED tickets/ticket48637_test.py::test_ticket48637 PASSED tickets/ticket48665_test.py::test_ticket48665 PASSED tickets/ticket48745_test.py::test_ticket48745_init PASSED tickets/ticket48745_test.py::test_ticket48745_homeDirectory_indexed_cis FAILED tickets/ticket48745_test.py::test_ticket48745_homeDirectory_mixed_value PASSED tickets/ticket48745_test.py::test_ticket48745_extensible_search_after_index PASSED tickets/ticket48746_test.py::test_ticket48746_init PASSED tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_cis FAILED tickets/ticket48746_test.py::test_ticket48746_homeDirectory_mixed_value PASSED tickets/ticket48746_test.py::test_ticket48746_extensible_search_after_index PASSED tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_ces FAILED tickets/ticket48755_test.py::test_ticket48755 PASSED tickets/ticket48759_test.py::test_ticket48759 PASSED tickets/ticket48784_test.py::test_ticket48784 PASSED tickets/ticket48798_test.py::test_ticket48798 PASSED tickets/ticket48799_test.py::test_ticket48799 PASSED tickets/ticket48808_test.py::test_ticket48808 PASSED tickets/ticket48844_test.py::test_ticket48844_init PASSED tickets/ticket48844_test.py::test_ticket48844_bitwise_on PASSED tickets/ticket48844_test.py::test_ticket48844_bitwise_off PASSED tickets/ticket48891_test.py::test_ticket48891_setup PASSED tickets/ticket48893_test.py::test_ticket48893 PASSED tickets/ticket48896_ test.py::test_ticket48896 PASSED tickets/ticket48906_test.py::test_ticket48906_setup PASSED tickets/ticket48906_test.py::test_ticket48906_dblock_default PASSED tickets/ticket48906_test.py::test_ticket48906_dblock_ldap_update FAILED tickets/ticket48906_test.py::test_ticket48906_dblock_edit_update FAILED tickets/ticket48906_test.py::test_ticket48906_dblock_robust FAILED tickets/ticket48916_test.py::test_ticket48916 PASSED tickets/ticket48956_test.py::test_ticket48956 PASSED tickets/ticket548_test.py::test_ticket548_test_with_no_policy PASSED tickets/ticket548_test.py::test_ticket548_test_global_policy PASSED tickets/ticket548_test.py::test_ticket548_test_subtree_policy PASSED suites/acct_usability_plugin/acct_usability_test.py::test_acct_usability_init PASSED suites/acct_usability_plugin/acct_usability_test.py::test_acct_usability_ PASSED suites/acctpolicy_plugin/acctpolicy_test.py::test_acctpolicy_init PASSED suites/acctpolicy_plugin/acctpolicy_test.py::test_acctpolicy_ PASSED 'suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[lang-ja]' PASSED 'suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[binary]' PASSED 'suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[phonetic]' PASSED suites/acl/acl_test.py::test_mode_default_add_deny PASSED suites/acl/acl_test.py::test_mode_default_delete_deny PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[0-cn=staged' 'user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[1-cn=staged' 'user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[2-cn=staged' 'user,dc=example,dc=com-cn=bad*,dc=example,dc=com-True]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[3-cn=st*,dc=example,dc=com-cn=accounts,dc=example,dc=com-False]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[4-cn=bad*,dc=example,dc=com-cn=accounts,dc=example,dc=com-True]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[5-cn=st*,dc=example,dc=com-cn=ac*, dc=example,dc=com-False]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[6-None-cn=ac*,dc=example,dc=com-False]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[7-cn=st*,dc=example,dc=com-None-False]' PASSED 'suites/acl/acl_test.py::test_moddn_staging_prod[8-None-None-False]' PASSED suites/acl/acl_test.py::test_moddn_staging_prod_9 PASSED suites/acl/acl_test.py::test_moddn_prod_staging PASSED suites/acl/acl_test.py::test_check_repl_M2_to_M1 PASSED suites/acl/acl_test.py::test_moddn_staging_prod_except PASSED suites/acl/acl_test.py::test_mode_default_ger_no_moddn PASSED suites/acl/acl_test.py::test_mode_default_ger_with_moddn PASSED suites/acl/acl_test.py::test_mode_switch_default_to_legacy PASSED suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn1 PASSED suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn2 PASSED suites/acl/acl_test.py::test_mode_legacy_ger_with_moddn PASSED suites/acl/acl_test.py::test_rdn_write_get_ger PASSED suites/acl/acl_test.py::test_rdn_write_modrdn_anonymous PASSED suites/attr_encryption/attr_encrypt_test.py::test_attr_encrypt_init PASSED suites/attr_encryption/attr_encrypt_test.py::test_attr_encrypt_ PASSED suites/attr_uniqueness_plugin/attr_uniqueness_test.py::test_attr_uniqueness_init PASSED suites/attr_uniqueness_plugin/attr_uniqueness_test.py::test_attr_uniqueness PASSED suites/automember_plugin/automember_test.py::test_automember_init PASSED suites/automember_plugin/automember_test.py::test_automember_ PASSED suites/basic/basic_test.py::test_basic_ops PASSED suites/basic/basic_test.py::test_basic_import_export PASSED suites/basic/basic_test.py::test_basic_backup PASSED suites/basic/basic_test.py::test_basic_acl PASSED suites/basic/basic_test.py::test_basic_searches PASSED suites/basic/basic_test.py::test_basic_referrals PASSED suites/basic/basic_test.py::test_basic_systemctl PASSED suites/basic/basic_test.py::test_basic_ldapagent PASSED suites/basic/basic_test.py::test_basic_dse PASSED 'suites/basic/basic_test.py::test_def_rootdse_attr[namingContexts]' PASSED ' suites/basic/basic_test.py::test_def_rootdse_attr[supportedLDAPVersion]' PASSED 'suites/basic/basic_test.py::test_def_rootdse_attr[supportedControl]' PASSED 'suites/basic/basic_test.py::test_def_rootdse_attr[supportedExtension]' PASSED 'suites/basic/basic_test.py::test_def_rootdse_attr[supportedSASLMechanisms]' PASSED 'suites/basic/basic_test.py::test_def_rootdse_attr[vendorName]' PASSED 'suites/basic/basic_test.py::test_def_rootdse_attr[vendorVersion]' PASSED 'suites/basic/basic_test.py::test_mod_def_rootdse_attr[namingContexts]' PASSED 'suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedLDAPVersion]' PASSED 'suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedControl]' PASSED 'suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedExtension]' PASSED 'suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedSASLMechanisms]' PASSED 'suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorName]' PASSED 'suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorVersion]' PASSED suites/betxns/betxn_test.py::test_betxn_init PASSED suites/betxns/betxn_test.py::test_betxt_7bit PASSED suites/betxns/betxn_test.py::test_betxn_attr_uniqueness PASSED suites/betxns/betxn_test.py::test_betxn_memberof PASSED suites/chaining_plugin/chaining_test.py::test_chaining_init PASSED suites/chaining_plugin/chaining_test.py::test_chaining_ PASSED suites/clu/clu_test.py::test_clu_init PASSED suites/clu/clu_test.py::test_clu_pwdhash PASSED suites/clu/db2ldif_test.py::test_db2ldif_init PASSED suites/collation_plugin/collatation_test.py::test_collatation_init PASSED suites/collation_plugin/collatation_test.py::test_collatation_ PASSED suites/config/config_test.py::test_maxbersize_repl PASSED suites/config/config_test.py::test_config_listen_backport_size PASSED suites/config/config_test.py::test_config_deadlock_policy PASSED suites/cos_plugin/cos_test.py::test_cos_init PASSED suites/cos_plugin/cos_test.py::test_cos_ PASSED suites/deref_plugin/deref_test.py::test_deref_init PASSED suites/deref_plugin/ deref_test.py::test_deref_ PASSED suites/disk_monitoring/disk_monitor_test.py::test_disk_monitor_init PASSED suites/disk_monitoring/disk_monitor_test.py::test_disk_monitor_ PASSED suites/distrib_plugin/distrib_test.py::test_distrib_init PASSED suites/distrib_plugin/distrib_test.py::test_distrib_ PASSED suites/dna_plugin/dna_test.py::test_dna_init PASSED suites/dna_plugin/dna_test.py::test_dna_ PASSED suites/ds_logs/ds_logs_test.py::test_ds_logs_init PASSED suites/ds_logs/ds_logs_test.py::test_ds_logs_ PASSED suites/dynamic-plugins/test_dynamic_plugins.py::test_dynamic_plugins FAILED suites/filter/filter_test.py::test_filter_init PASSED suites/filter/filter_test.py::test_filter_escaped PASSED suites/filter/filter_test.py::test_filter_search_original_attrs PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_supported_features PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-*]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-objectClass]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-*]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-objectClass]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-*]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-objectClass]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=examp le,dc=com-True-oper_attr_list3-*]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-objectClass]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-*]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-objectClass]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-*]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-objectClass]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-*]' PASSED 'suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-objectClass]' PASSED suites/get_effective_rights/ger_test.py::test_ger_init PASSED suites/get_effective_rights/ger_test.py::test_ger_ PASSED suites/gssapi_repl/gssapi_repl_test.py::test_gssapi_repl PASSED suites/ldapi/ldapi_test.py::test_ldapi_init PASSED suites/ldapi/ldapi_test.py::test_ldapi_ PASSED suites/linkedattrs_plugin/linked_attrs_test.py::test_linked_attrs_init PASSED suites/linkedattrs_plugin/linked_attrs_test.py::test_linked_attrs_ PASSED suites/mapping_tree/mapping_tree_test.py::test_mapping_tree_init PASSED suites/mapping_tree/mapping_tree_test.py::test_mapping_tree_ PASSED suites/memberof_plugin/memberof_test.py::test_memberof_auto_add_oc PASSED suites/m emory_leaks/range_search_test.py::test_range_search_init FAILED suites/memory_leaks/range_search_test.py::test_range_search PASSED suites/memory_leaks/range_search_test.py::test_range_search ERROR suites/monitor/monitor_test.py::test_monitor_init PASSED suites/monitor/monitor_test.py::test_monitor_ PASSED 'suites/paged_results/paged_results_test.py::test_search_success[6-5]' PASSED 'suites/paged_results/paged_results_test.py::test_search_success[5-5]' PASSED 'suites/paged_results/paged_results_test.py::test_search_success[5-25]' PASSED 'suites/paged_results/paged_results_test.py::test_search_limits_fail[50-200-cn=config,cn=ldbm' 'database,cn=plugins,cn=config-nsslapd-idlistscanlimit-100-UNWILLING_TO_PERFORM]' PASSED 'suites/paged_results/paged_results_test.py::test_search_limits_fail[5-15-cn=config-nsslapd-timelimit-20-UNAVAILABLE_CRITICAL_EXTENSION]' PASSED 'suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-sizelimit-20-SIZELIMIT_EXCEEDED]' PASSED 'suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-pagedsizelimit-5-SIZELIMIT_EXCEEDED]' PASSED 'suites/paged_results/paged_results_test.py::test_search_limits_fail[5-50-cn=config,cn=ldbm' 'database,cn=plugins,cn=config-nsslapd-lookthroughlimit-20-ADMINLIMIT_EXCEEDED]' PASSED suites/paged_results/paged_results_test.py::test_search_sort_success PASSED suites/paged_results/paged_results_test.py::test_search_abandon PASSED suites/paged_results/paged_results_test.py::test_search_with_timelimit PASSED 'suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[dns' = '"localhost.localdomain"]' PASSED 'suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[ip' = '"::1"' or ip = '"127.0.0.1"]' PASSED suites/paged_results/paged_results_test.py::test_search_multiple_paging PASSED 'suites/paged_results/paged_results_test.py::test_search_invalid_cookie[1000]' PASSED 'suites/paged_results/paged_results_test.py::test_search_invalid_cookie[-1]' PASSED suites/paged_results/paged_re sults_test.py::test_search_abandon_with_zero_size PASSED suites/paged_results/paged_results_test.py::test_search_pagedsizelimit_success PASSED 'suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[5-15-PASS]' PASSED 'suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[15-5-SIZELIMIT_EXCEEDED]' PASSED 'suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED]' PASSED 'suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values1-PASS]' PASSED 'suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED]' PASSED 'suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values1-PASS]' PASSED suites/paged_results/paged_results_test.py::test_ger_basic PASSED suites/paged_results/paged_results_test.py::test_multi_suffix_search FAILED 'suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[None]' PASSED 'suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[-1]' PASSED 'suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[1000]' PASSED 'suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[0]' PASSED 'suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[1]' PASSED suites/pam_passthru_plugin/pam_test.py::test_pam_init PASSED suites/pam_passthru_plugin/pam_test.py::test_pam_ PASSED suites/passthru_plugin/passthru_test.py::test_passthru_init PASSED suites/passthru_plugin/passthru_test.py::test_passthru_ PASSED suites/password/password_test.py::test_password_init PASSED suites/password/password_test.py::test_password_delete_specific_password PASSED suites/password/pwdAdmin_test.py::test_pwdAdmin_init PASSED suites/password/pwdAdmin_test.py::test_pwdAdmin PASSED suites/password/pwdAdmin_test.py::test_pwdAdmin_config_validation PASSED 'suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-of f-UNWILLING_TO_PERFORM]' PASSED 'suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-off-UNWILLING_TO_PERFORM]' PASSED 'suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-on-None]' PASSED 'suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-on-None]' PASSED suites/password/pwdPolicy_attribute_test.py::test_pwd_min_age PASSED 'suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-off]' PASSED 'suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[on-off]' PASSED 'suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-on]' PASSED 'suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_restrictions[cn=config]' PASSED 'suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_restrictions[cn="cn=nsPwPolicyEntry,ou=People,dc=example,dc=com",cn=nsPwPolicyContainer,ou=People,dc=example,dc=com]' PASSED suites/password/pwdPolicy_syntax_test.py::test_pwdPolicy_syntax PASSED 'suites/password/pwdPolicy_warning_test.py::test_different_values[' ']' PASSED 'suites/password/pwdPolicy_warning_test.py::test_different_values[junk123]' PASSED 'suites/password/pwdPolicy_warning_test.py::test_different_values[on]' PASSED 'suites/password/pwdPolicy_warning_test.py::test_different_values[off]' PASSED suites/password/pwdPolicy_warning_test.py::test_expiry_time PASSED 'suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordSendExpiringTime-off]' PASSED 'suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordWarning-3600]' PASSED suites/password/pwdPolicy_warning_test.py::test_with_different_password_states PASSED suites/password/pwdPolicy_warning_test.py::test_default_behavior PASSED suites/password/pwdPolicy_warning_test.py::test_with_local_policy PASSED suites/password/pwp_history_test.py::test_pwp_history_test PASSED suites/posix_winsync_plugin/posix_winsync_test.py::test_posix_winsync_init PASSED suites/posix_winsync_plugin/posix_winsync_test.py::test_posix_ winsync_ PASSED suites/psearch/psearch_test.py::test_psearch_init PASSED suites/psearch/psearch_test.py::test_psearch_ PASSED suites/referint_plugin/referint_test.py::test_referint_init PASSED suites/referint_plugin/referint_test.py::test_referint_ PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_init PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_clean PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_clean_restart PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_clean_force PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_abort PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_abort_restart PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_abort_certify PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_stress_clean PASSED suites/replication/wait_for_async_feature_test.py::test_not_int_value PASSED suites/replication/wait_for_async_feature_test.py::test_multi_value PASSED 'suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr0]' PASSED 'suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr1]' PASSED 'suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr2]' PASSED 'suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr3]' PASSED 'suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr0]' PASSED 'suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr1]' PASSED 'suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr2]' PASSED 'suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr3]' PASSED suites/replsync_plugin/repl_sync_test.py::test_repl_sync_init PASSED suites/replsync_plugin/repl_sync_test.py::test_repl_sync_ PASSED suites/resource_limits/res_limits_test.py::test_res_limits_init PASSED suites/resource_limits/ res_limits_test.py::test_res_limits_ PASSED suites/retrocl_plugin/retrocl_test.py::test_retrocl_init PASSED suites/retrocl_plugin/retrocl_test.py::test_retrocl_ PASSED suites/reverpwd_plugin/reverpwd_test.py::test_reverpwd_init PASSED suites/reverpwd_plugin/reverpwd_test.py::test_reverpwd_ PASSED suites/roles_plugin/roles_test.py::test_roles_init PASSED suites/roles_plugin/roles_test.py::test_roles_ PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_init PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_specific_time PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_day_of_week PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_denied_ip PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_denied_host PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_allowed_ip PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_allowed_host PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_config_validate PASSED suites/sasl/sasl_test.py::test_sasl_init PASSED suites/sasl/sasl_test.py::test_sasl_ PASSED suites/schema/test_schema.py::test_schema_comparewithfiles PASSED suites/schema_reload_plugin/schema_reload_test.py::test_schema_reload_init PASSED suites/schema_reload_plugin/schema_reload_test.py::test_schema_reload_ PASSED suites/snmp/snmp_test.py::test_snmp_init PASSED suites/snmp/snmp_test.py::test_snmp_ PASSED suites/ssl/ssl_test.py::test_ssl_init PASSED suites/ssl/ssl_test.py::test_ssl_ PASSED suites/syntax_plugin/syntax_test.py::test_syntax_init PASSED suites/syntax_plugin/syntax_test.py::test_syntax_ PASSED suites/usn_plugin/usn_test.py::test_usn_init PASSED suites/usn_plugin/usn_test.py::test_usn_ PASSED suites/views_plugin/views_test.py::test_views_init PASSED suites/views_plugin/views_test.py::test_views_ PASSED suites/vlv/vlv_test.py::test_vlv_init PASSED suites/vlv/vlv_test.py::test_vlv_ PASSED suites/whoami_plugin/whoami_test.py::test_whoami_init PASSED suites/whoami_plugin/whoam i_test.py::test_whoami_ PASSED ==================================== ERRORS ==================================== ________________ ERROR at setup of test_ticket48266_fractional _________________ request = '<SubRequest' ''\''topology'\''' for '<Function' ''\''test_ticket48266_fractional'\''>>' '@pytest.fixture(scope="module")' def 'topology(request):' global installation1_prefix if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix '#' Creating master 1... master1 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_1 'args_instance[SER_PORT]' = PORT_MASTER_1 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_1 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master1.allocate(args_master)' instance_master1 = 'master1.exists()' if instance_master1: 'master1.delete()' 'master1.create()' 'master1.open()' 'master1.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_1)' '#' Creating master 2... master2 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_2 'args_instance[SER_PORT]' = PORT_MASTER_2 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_2 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master2.allocate(args_master)' instance_master2 = 'master2.exists()' if instance_master2: 'master2.delete()' 'master2.create()' 'master2.open()' 'master2.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_2)' '#' '#' Create all the agreements '#' '#' Creating agreement from master 1 to master 2 properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProper ties[REPLICATION_TRANSPORT]}' m1_m2_agmt = 'master1.agreement.create(suffix=SUFFIX,' host=master2.host, port=master2.port, 'properties=properties)' if not m1_m2_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm1_m2_agmt)' '#' Creating agreement from master 2 to master 1 properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m2_m1_agmt = 'master2.agreement.create(suffix=SUFFIX,' host=master1.host, port=master1.port, 'properties=properties)' if not m2_m1_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm2_m1_agmt)' '#' Allow the replicas to get situated with the new agreements... 'time.sleep(5)' '#' '#' Initialize all the agreements '#' 'master1.agreement.init(SUFFIX,' HOST_MASTER_2, 'PORT_MASTER_2)' '>' 'master1.waitForReplInit(m1_m2_agmt)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48266_test.py>:104: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return 'self.replica.wait_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = 'self.check_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, '"(objectclass=*)",' 'attrlist)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = 'self.result(res)' <http://vm-0 58-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = 'self.result2(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = 'self.result3(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = 'self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd4151f0440>' func = '<built-in' method result4 of LDAP object at '0x7fd415e43468>' args = '(17,' 1, -1, 0, 0, '0),' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name __)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}/ecTMmFs5i6NRXNp+xUCcKLH/Jaog39xn+WYAD48Z3ZAFLOAL41fWMR3txL8mAi06R3LlnzlKibM+BOnYu+Notrol10Ax2ij' INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}AaqPX6+DZ67BObb2+or3fL6vmwppkqQmuLKcL4eNeHm4Wi5wGHQyriWVm23w012f9LdsJigMh39kL/PTS2wRTOPcW/yTWsbK' 'DEBUG:tickets.ticket48266_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created 'DEBUG:tickets.ticket48266_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created INFO:lib389:Starting total init 'cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config ______________ ERROR at setup of test_ticket48266_check_repl_desc ______________ request = '<SubRequest' ''\''topology'\''' for '<Function' ''\''test_ticket48266_fractional'\''>>' '@pytest.fixture(scope="module")' def 'topology(request):' global installation1_prefix if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix '#' Creating master 1... master1 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_1 'args_instance[SER_PORT]' = PORT_MASTER_1 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_1 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master1.allocate(args_master)' instance_master1 = 'master1.exists()' if instance_master1: 'master1.delete()' 'master1.create()' 'master1.open()' 'master1.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_1)' '#' Creating master 2... master2 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_2 'args_instance[SER_PORT]' = PORT_MASTER_2 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_2 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master2.allocate(args_master)' instance_master2 = 'master2.exists()' if instance_master2: 'master2.delete()' 'master2.create()' 'master2.open()' 'master2.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_2)' '#' '#' Create all the agreements '#' '#' Creating agreement from master 1 to master 2 properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m1_m2_agmt = 'master1.agreement.create(suffix=SUFFIX,' host=master2.host, port=master2.port, 'properties=properties)' if not m1_m2_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm1_m2_agmt)' '#' Creating agreement from master 2 to master 1 properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'default Properties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m2_m1_agmt = 'master2.agreement.create(suffix=SUFFIX,' host=master1.host, port=master1.port, 'properties=properties)' if not m2_m1_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm2_m1_agmt)' '#' Allow the replicas to get situated with the new agreements... 'time.sleep(5)' '#' '#' Initialize all the agreements '#' 'master1.agreement.init(SUFFIX,' HOST_MASTER_2, 'PORT_MASTER_2)' '>' 'master1.waitForReplInit(m1_m2_agmt)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48266_test.py>:104: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return 'self.replica.wait_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = 'self.check_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, '"(objectclass=*)",' 'attrlist)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = 'self.result(res)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = 'self.result2(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_typ e, resp_data, resp_msgid, resp_ctrls = 'self.result3(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = 'self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd4151f0440>' func = '<built-in' method result4 of LDAP object at '0x7fd415e43468>' args = '(17,' 1, -1, 0, 0, '0),' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ___________ ERROR at setup of test_ticket48266_count_csn_evaluation ____________ request = '<SubRequest' ''\''topology'\''' for '<Function' ''\''test_ticket 48266_fractional'\''>>' '@pytest.fixture(scope="module")' def 'topology(request):' global installation1_prefix if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix '#' Creating master 1... master1 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_1 'args_instance[SER_PORT]' = PORT_MASTER_1 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_1 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master1.allocate(args_master)' instance_master1 = 'master1.exists()' if instance_master1: 'master1.delete()' 'master1.create()' 'master1.open()' 'master1.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_1)' '#' Creating master 2... master2 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_2 'args_instance[SER_PORT]' = PORT_MASTER_2 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_2 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master2.allocate(args_master)' instance_master2 = 'master2.exists()' if instance_master2: 'master2.delete()' 'master2.create()' 'master2.open()' 'master2.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_2)' '#' '#' Create all the agreements '#' '#' Creating agreement from master 1 to master 2 properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m1_m2_agmt = 'master1.agreement.create(suffix=SUFFIX,' host=master2.host, port=master2.port, 'properties=properties)' if not m1_m2_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm1_m2_agmt)' '#' Creating agreement from master 2 to master 1 properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m2_m1_agmt = 'master2.agreement.create(suffix=SUFFIX,' host=master1.host, port=master1.port, 'properties=properties)' if not m2_m1_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm2_m1_agmt)' '#' Allow the replicas to get situated with the new agreements... 'time.sleep(5)' '#' '#' Initialize all the agreements '#' 'master1.agreement.init(SUFFIX,' HOST_MASTER_2, 'PORT_MASTER_2)' '>' 'master1.waitForReplInit(m1_m2_agmt)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48266_test.py>:104: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return 'self.replica.wait_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = 'self.check_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, '"(objectclass=*)",' 'attrlist)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = 'self.result(res)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = 'self.result 2(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = 'self.result3(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = 'self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd4151f0440>' func = '<built-in' method result4 of LDAP object at '0x7fd415e43468>' args = '(17,' 1, -1, 0, 0, '0),' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\' 't' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ______________________ ERROR at setup of test_ticket4026 _______________________ request = '<SubRequest' ''\''topology'\''' for '<Function' ''\''test_ticket4026'\''>>' '@pytest.fixture(scope="module")' def 'topology(request):' global installation1_prefix if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix '#' Creating master 1... master1 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_1 'args_instance[SER_PORT]' = PORT_MASTER_1 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_1 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master1.allocate(args_master)' instance_master1 = 'master1.exists()' if instance_master1: 'master1.delete()' 'master1.create()' 'master1.open()' 'master1.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_1)' '#' Creating master 2... master2 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_2 'args_instance[SER_PORT]' = PORT_MASTER_2 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_2 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance.copy()' 'master2.allocate(args_master)' instance_master2 = 'master2.exists()' if instance_master2: 'master2.delete()' 'master2.create()' 'master2.open()' 'master2.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_2)' '#' Creating master 3... master3 = 'DirSrv(verbose=False)' if installation1_prefix: 'args_instance[SER_DEPLOYED_DIR]' = installation1_prefix 'args_instance[SER_HOST]' = HOST_MASTER_3 'args_instance[SER_PORT]' = PORT_MASTER_3 'args_instance[SER_SERVERID_PROP]' = SERVERID_MASTER_3 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_master = 'args_instance .copy()' 'master3.allocate(args_master)' instance_master3 = 'master3.exists()' if instance_master3: 'master3.delete()' 'master3.create()' 'master3.open()' 'master3.replica.enableReplication(suffix=SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=REPLICAID_MASTER_3)' '#' '#' Create all the agreements '#' '#' Creating agreement from master 1 to master 2 properties = '{RA_BINDDN:' 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m1_m2_agmt = 'master1.agreement.create(suffix=SUFFIX,' host=master2.host, port=master2.port, 'properties=properties)' if not m1_m2_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm1_m2_agmt)' '#' Creating agreement from master 1 to master 3 '#' properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' '#' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' '#' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' '#' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' '#' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' '#' m1_m3_agmt = 'master1.agreement.create(suffix=SUFFIX,' host=master3.host, port=master3.port, 'properties=properties)' '#' if not m1_m3_agmt: '#' 'log.fatal("Fail' to create a master '->' master replica 'agreement")' '#' 'sys.exit(1)' '#' 'log.debug("%s' 'created"' % 'm1_m3_agmt)' '#' Creating agreement from master 2 to master 1 properties = '{RA_BINDDN:' 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m2_m1_agmt = 'master2.agreement.create(suffix=SUFFIX,' host=master1.host, port=master1.port, 'properties=properties)' if not m2_m1_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm2_m1_agmt)' ' #' Creating agreement from master 2 to master 3 properties = '{RA_BINDDN:' 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m2_m3_agmt = 'master2.agreement.create(suffix=SUFFIX,' host=master3.host, port=master3.port, 'properties=properties)' if not m2_m3_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm2_m3_agmt)' '#' Creating agreement from master 3 to master 1 '#' properties = '{RA_NAME:' 'r'\''meTo_$host:$port'\'',' '#' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' '#' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' '#' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' '#' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' '#' m3_m1_agmt = 'master3.agreement.create(suffix=SUFFIX,' host=master1.host, port=master1.port, 'properties=properties)' '#' if not m3_m1_agmt: '#' 'log.fatal("Fail' to create a master '->' master replica 'agreement")' '#' 'sys.exit(1)' '#' 'log.debug("%s' 'created"' % 'm3_m1_agmt)' '#' Creating agreement from master 3 to master 2 properties = '{RA_BINDDN:' 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' m3_m2_agmt = 'master3.agreement.create(suffix=SUFFIX,' host=master2.host, port=master2.port, 'properties=properties)' if not m3_m2_agmt: 'log.fatal("Fail' to create a master '->' master replica 'agreement")' 'sys.exit(1)' 'log.debug("%s' 'created"' % 'm3_m2_agmt)' '#' Allow the replicas to get situated with the new agreements... 'time.sleep(5)' '#' '#' Initialize all the agreements '#' 'master1.agreement.init(SUFFIX,' HOST_MASTER_2, 'PORT_MASTER_2)' 'master1.waitForReplInit(m1_m2_agmt)' 'time.sleep(5)' '#' just to be safe 'master2.agreement.init(SUFFIX,' HOST_MA STER_3, 'PORT_MASTER_3)' '>' 'master2.waitForReplInit(m2_m3_agmt)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48342_test.py>:171: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return 'self.replica.wait_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = 'self.check_init(agmtdn)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, '"(objectclass=*)",' 'attrlist)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = 'self.result(res)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = 'self.result2(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = 'self.result3(msgid,all,timeout)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/l dapobject.py:521: in result4 ldap_result = 'self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd41476ae18>' func = '<built-in' method result4 of LDAP object at '0x7fd415cf5dc8>' args = '(21,' 1, -1, 0, 0, '0),' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists Instance slapd-master_2 removed. OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists '('\''Update' succeeded: status ''\'',' ''\''0' Total update 'succeeded'\'')' ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}9eNP/JN3T5BbuSmXd4mH30B092YbFrY9EGvjXepy1jcqC6hN5C6 pf37nZaI3qnnpUXE1zH8UF1SA5I/D1U+0Wia91ccC39Lb' INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}BO+TCDI/Ze/G1j1sKbzu2o4/Bb4tUsXhuO95rjOygP/tEwmhfrX0eHF1ZA6H2ltoU2inw9SISUR+4hwvvskVb2a672svxdld' INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}z6/245iCbldFkSrBhHJRhc/5XGwZpL9J+OWcuybW9GU3bHio82qdtIj1XPbR4lvr3bXRwWDd0yb9SLwi4L1g/uVlOjpP7a9R' 'DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created 'DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38941,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created 'DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38943,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created 'DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created INFO:lib389:Starting total init 'cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config INFO:lib389:Starting total init 'cn=meTo_localhost.localdomain:38943,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config ____________________ ERROR at teardown of test_range_search ____________________ def 'fin():' 'standalone.delete()' if not 'standalone.has_asan():' sbin_dir = 'standalone.get_sbin_dir()' '>' 'valgrind_disable(sbin_dir)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/memory_leaks/range_search_test.py>:61: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ sbin_dir = ''\''/usr/sbin'\''' def 'valgrind_disable(sbin_dir):' ''\ '''\'''\''' Restore the ns-slapd binary to its original state - the server instances are expected to be stopped. Note - selinux is enabled at the end of this process. :param sbin_dir - the location of the ns-slapd binary '(e.g.' '/usr/sbin)' :raise ValueError :raise EnvironmentError: If script is not run as ''\''root'\''' ''\'''\'''\''' if 'os.geteuid()' '!=' 0: 'log.error('\''This' script must be run as root to use 'valgrind'\'')' raise EnvironmentError nsslapd_orig = ''\''%s/ns-slapd'\''' % sbin_dir nsslapd_backup = ''\''%s/ns-slapd.original'\''' % sbin_dir '#' Restore the original ns-slapd try: 'shutil.copyfile(nsslapd_backup,' 'nsslapd_orig)' except IOError as e: 'log.fatal('\''valgrind_disable:' failed to restore ns-slapd, error: '%s'\''' % 'e.strerror)' '>' raise 'ValueError('\''failed' to restore ns-slapd, error: '%s'\''' % 'e.strerror)' E ValueError: failed to restore ns-slapd, error: Text file busy <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/utils.py>:288: ValueError ----------------------------- Captured stderr call ----------------------------- INFO:suites.memory_leaks.range_search_test:Running test_range_search... CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user1,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user2,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user3,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user4,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user5,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Fail ed to add test user uid=user6,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user7,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user8,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user9,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user10,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user11,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user12,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user13,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user14,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user15,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user16,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user17,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user18,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add t est user uid=user19,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user20,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user21,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user22,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user23,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user24,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user25,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user26,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user27,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user28,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user29,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user30,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user31,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test use r uid=user32,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user33,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user34,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user35,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user36,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user37,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user38,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user39,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user40,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user41,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user42,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user43,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user44,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=u ser45,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user46,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user47,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user48,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user49,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user50,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user51,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user52,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user53,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user54,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user55,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user56,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user57,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user58,d c=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user59,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user60,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user61,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user62,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user63,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user64,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user65,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user66,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user67,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user68,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user69,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user70,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user71,dc=examp le,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user72,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user73,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user74,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user75,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user76,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user77,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user78,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user79,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user80,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user81,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user82,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user83,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user84,dc=example,dc=c om: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user85,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user86,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user87,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user88,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user89,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user90,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user91,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user92,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user93,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user94,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user95,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user96,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user97,dc=example,dc=com: err or 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user98,dc=example,dc=com: error 'Can'\''t' contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user99,dc=example,dc=com: error 'Can'\''t' contact LDAP server INFO:suites.memory_leaks.range_search_test:test_range_search: PASSED --------------------------- Captured stdout teardown --------------------------- Instance slapd-standalone removed. --------------------------- Captured stderr teardown --------------------------- CRITICAL:lib389.utils:valgrind_disable: failed to restore ns-slapd, error: Text file busy =================================== FAILURES =================================== ______________________________ test_ticket1347760 ______________________________ topology = '<tickets.ticket1347760_test.TopologyStandalone' object at '0x7fd415b56210>' def 'test_ticket1347760(topology):' '"""' Prevent revealing the entry info to whom has no access rights. '"""' 'log.info('\''Testing' Bug 1347760 - Information disclosure via repeated use of LDAP ADD operation, 'etc.'\'')' 'log.info('\''Disabling' accesslog 'logbuffering'\'')' 'topology.standalone.modify_s(CONFIG_DN,' '[(ldap.MOD_REPLACE,' ''\''nsslapd-accesslog-logbuffering'\'',' ''\''off'\'')])' 'log.info('\''Bind' as '{%s,%s}'\''' % '(DN_DM,' 'PASSWORD))' 'topology.standalone.simple_bind_s(DN_DM,' 'PASSWORD)' 'log.info('\''Adding' ou=%s a bind user belongs 'to.'\''' % 'BOU)' 'topology.standalone.add_s(Entry((BINDOU,' '{' ''\''objectclass'\'':' ''\''top' 'organizationalunit'\''.split(),' ''\''ou'\'':' 'BOU})))' 'log.info('\''Adding' a bind 'user.'\'')' 'topology.standalone.add_s(Entry((BINDDN,' '{'\''objectclass'\'':' '"top' person organizationalPerson 'inetOrgPerson".split(),' ''\''cn'\'':' ''\''bind' 'user'\'',' ''\''sn'\'':' ''\''user'\'',' ''\''userPassword'\'':' 'BINDPW})))' 'log.info('\''Adding' a test 'user.'\'')' 'topology.standalone.add_s(Entry((TESTDN,' '{'\''object class'\'':' '"top' person organizationalPerson 'inetOrgPerson".split(),' ''\''cn'\'':' ''\''test' 'user'\'',' ''\''sn'\'':' ''\''user'\'',' ''\''userPassword'\'':' 'TESTPW})))' 'log.info('\''Deleting' aci in '%s.'\''' % 'DEFAULT_SUFFIX)' 'topology.standalone.modify_s(DEFAULT_SUFFIX,' '[(ldap.MOD_DELETE,' ''\''aci'\'',' 'None)])' 'log.info('\''Bind' case 1. the bind user has no rights to read the entry itself, bind should be 'successful.'\'')' 'log.info('\''Bind' as '{%s,%s}' who has no access 'rights.'\''' % '(BINDDN,' 'BINDPW))' try: 'topology.standalone.simple_bind_s(BINDDN,' 'BINDPW)' except ldap.LDAPError as e: 'log.info('\''Desc' \' + 'e.message['\''desc'\''])' assert False file_path = 'os.path.join(topology.standalone.prefix,' ''\''var/log/dirsrv/slapd-%s/access'\''' % 'topology.standalone.serverid)' '>' file_obj = 'open(file_path,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/usr/var/log/dirsrv/slapd-standalone/access'\''' tickets/ticket1347760_test.py:236: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket1347760_test:Testing Bug 1347760 - Information disclosure via repeated use of LDAP ADD operation, etc. INFO:tickets.ticket1347760_test:Disabling accesslog logbuffering INFO:tickets.ticket1347760_test:Bind as '{cn=Directory' 'Manager,password}' INFO:tickets.ticket1347760_test:Adding ou=BOU a bind user belongs to. INFO:tickets.ticket1347760_test:Adding a bind user. INFO:tickets.ticket1347760_test:Adding a test user. INFO:tickets.ticket1347760_test:Deleting aci in dc=example,dc=com. INFO:tickets.ticket1347760_test:Bind case 1. the bind user has no rights to read the entry itself, bind should be successful. INFO:tickets.ticket1347760_test:Bind as '{uid=buser123,ou=BOU,dc=example,dc=com,buser123}' who has no access rights. ______________________________ test_ticket47431_1 ______________________________ topolo gy = '<tickets.ticket47431_test.TopologyStandalone' object at '0x7fd4152d89d0>' def 'test_ticket47431_1(topology):' ''\'''\'''\''' nsslapd-pluginarg0: uid nsslapd-pluginarg1: mail nsslapd-pluginarg2: userpassword '<==' repeat 27 times nsslapd-pluginarg3: , nsslapd-pluginarg4: dc=example,dc=com The duplicated values are removed by str2entry_dupcheck as follows: '[..]' - str2entry_dupcheck: 27 duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config. Extra values ignored. ''\'''\'''\''' 'log.info("Ticket' 47431 - 1: Check 26 duplicate values are treated as 'one...")' expected = '"str2entry_dupcheck' - . .. .cache duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit 'check,cn=plugins,cn=config."' 'log.debug('\''modify_s' '%s'\''' % 'DN_7BITPLUGIN)' try: 'topology.standalone.modify_s(DN_7BITPLUGIN,' '[(ldap.MOD_REPLACE,' ''\''nsslapd-pluginarg0'\'',' '"uid"),' '(ldap.MOD_REPLACE,' ''\''nsslapd-pluginarg1'\'',' '"mail"),' '(ldap.MOD_REPLACE,' ''\''nsslapd-pluginarg2'\'',' '"userpassword"),' '(ldap.MOD_REPLACE,' ''\''nsslapd-pluginarg3'\'',' '","),' '(ldap.MOD_REPLACE,' ''\''nsslapd-pluginarg4'\'',' 'SUFFIX)])' except ValueError: 'log.error('\''modify' failed: Some problem occured with a value that was 'provided'\'')' assert False arg2 = '"nsslapd-pluginarg2:' 'userpassword"' 'topology.standalone.stop(timeout=10)' dse_ldif = topology.standalone.confdir + ''\''/dse.ldif'\''' 'os.system('\''mv' %s '%s.47431'\''' % '(dse_ldif,' 'dse_ldif))' 'os.system('\''sed' -e '"s/\\(%s\\)/\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1/"' %s.47431 '>' '%s'\''' % '(arg2,' dse_ldif, 'dse_ldif))' 'topology.standalone.start(timeout=10)' cmdline = ''\''egrep' -i '"%s"' '%s'\''' % '(expected,' 'topology.standalone.errlog)' p = 'os.popen(cmdline,' '"r")' line = 'p.readline()' if line == '"":' 'log.error('\''Expected' error '"%s"' not logged in '%s'\''' % '(expected,' 'topology.standalone.errlog))' '>' assert False E assert False tickets/ticket47431_test.py:110: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47431_test:Ticket 47431 - 1: Check 26 duplicate values are treated as one... DEBUG:tickets.ticket47431_test:modify_s cn=7-bit check,cn=plugins,cn=config grep: /var/log/dirsrv/slapd-standalone/error: No such file or directory ERROR:tickets.ticket47431_test:Expected error '"str2entry_dupcheck' - . .. .cache duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit 'check,cn=plugins,cn=config."' not logged in /var/log/dirsrv/slapd-standalone/error _______________________________ test_ticket47462 _______________________________ topology = '<tickets.ticket47462_test.TopologyMaster1Master2' object at '0x7fd41598fd90>' def 'test_ticket47462(topology):' '"""' Test that AES properly replaces DES during an update/restart, and that replication also works correctly. '"""' '#' '#' First set config as if 'it'\''s' an older version. Set DES to use '#' libdes-plugin, MMR to depend on DES, delete the existing AES plugin, '#' and set a DES password for the replication agreement. '#' '#' Add an extra attribute to the DES plugin args '#' try: 'topology.master1.modify_s(DES_PLUGIN,' '[(ldap.MOD_REPLACE,' ''\''nsslapd-pluginEnabled'\'',' ''\''on'\'')])' except ldap.LDAPError as e: 'log.fatal('\''Failed' to enable DES plugin, error: \' + 'e.message['\''desc'\''])' assert False try: 'topology.master1.modify_s(DES_PLUGIN,' '[(ldap.MOD_ADD,' ''\''nsslapd-pluginarg2'\'',' ''\''description'\'')])' except ldap.LDAPError as e: 'log.fatal('\''Failed' to reset DES plugin, error: \' + 'e.message['\''desc'\''])' assert False try: 'topology.master1.modify_s(MMR_PLUGIN,' '[(ldap.MOD_DELETE,' ''\''nsslapd-plugin-depends-on-named'\'',' ''\''AES'\'')])' except ldap.NO_SUCH_ATTRIBUTE: pass except ldap.LDAPError as e: 'log.fatal('\''Failed' to reset MMR plugin, error: \' + 'e.message['\''desc'\''])' assert False '#' '#' Delete the AES plugin '#' try: 'topology.master1.delete_s(AES_PLUGIN)' except ldap.NO_SUCH_OBJECT: pass except ldap.LDAPError as e: 'log.fatal('\''Failed' to delete AES plugin, error: \' + 'e.message['\''desc'\''])' assert False '#' restart the server so we must use DES plugin 'topology.master1.restart(timeout=10)' '#' '#' Get the agmt dn, and set the password '#' try: entry = 'topology.master1.search_s('\''cn=config'\'',' ldap.SCOPE_SUBTREE, ''\''objectclass=nsDS5ReplicationAgreement'\'')' if entry: agmt_dn = 'entry[0].dn' 'log.info('\''Found' agmt dn '(%s)'\''' % 'agmt_dn)' else: 'log.fatal('\''No' replication 'agreements!'\'')' assert False except ldap.LDAPError as e: 'log.fatal('\''Failed' to search for replica credentials: \' + 'e.message['\''desc'\''])' assert False try: properties = '{RA_BINDPW:' '"password"}' 'topology.master1.agreement.setProperties(None,' agmt_dn, None, 'properties)' 'log.info('\''Successfully' modified replication 'agreement'\'')' except ValueError: 'log.error('\''Failed' to update replica agreement: \' + 'AGMT_DN)' assert False '#' '#' Check replication works with the new DES password '#' try: 'topology.master1.add_s(Entry((USER1_DN,' '{'\''objectclass'\'':' '"top' 'person".split(),' ''\''sn'\'':' ''\''sn'\'',' ''\''description'\'':' ''\''DES' value to 'convert'\'',' ''\''cn'\'':' ''\''test_user'\''})))' loop = 0 ent = None while loop '<=' 10: try: ent = 'topology.master2.getEntry(USER1_DN,' ldap.SCOPE_BASE, '"(objectclass=*)")' break except ldap.NO_SUCH_OBJECT: 'time.sleep(1)' loop += 1 if not ent: 'log.fatal('\''Replication' test failed fo 'user1!'\'')' assert False else: 'log.info('\''Replication' test 'passed'\'')' except ldap.LDAPError as e: 'log.fatal('\''Failed' to add test user: \' + 'e.message['\''desc'\''])' assert False '#' '#' Add a backend '(that' has no 'entries)' '#' try: 'topology.master1.backend.create("o=empty",' '{BACKEND_NAME:' '"empty"})' except ldap.LDAPError as e: 'log.fatal('\''Failed' to create extra/empty backend: \' + 'e.message['\''desc'\''])' assert False '#' '#' Run the upgrade... '#' '>' 'topology.master1.upgrade('\''online'\'')' tickets/ticket47462_test.py:269: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:2500: in upgrade 'DirSrvTools.runUpgrade(self.prefix,' 'online)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ prefix = ''\''/usr'\'',' online = True @staticmethod def 'runUpgrade(prefix,' 'online=True):' ''\'''\'''\''' Run '"setup-ds.pl' '--update"' We simply pass in one DirSrv isntance, and this will update all the instances that are in this prefix. For the update to work we must fix/adjust the permissions of the scripts in: '/prefix/lib[64]/dirsrv/slapd-INSTANCE/' ''\'''\'''\''' if not prefix: prefix = ''\'''\''' '#' This is an RPM run - check if /lib exists, if not use /lib64 if 'os.path.isdir('\''/usr/lib/dirsrv'\''):' libdir = ''\''/usr/lib/dirsrv/'\''' else: if 'os.path.isdir('\''/usr/lib64/dirsrv'\''):' libdir = ''\''/usr/lib64/dirsrv/'\''' else: 'log.fatal('\''runUpgrade:' failed to find slapd lib 'dir!'\'')' assert False else: '#' Standard prefix lib location if 'os.path.isdir('\''/usr/lib64/dirsrv'\''):' libdir = ''\''/usr/lib64/dirsrv/'\''' else: libdir = ''\''/lib/dirsrv/'\''' '#' Gather all the instances so we can adjust the permissions, otherwise servers = '[]' path = prefix + ''\''/etc/dirsrv'\''' '>' for files in 'os.listdir(path):' E OSError: '[Errno' '2]' No such file or directory: ''\''/usr/etc/dirsrv'\''' ../../../lib389/lib389/tools.py:932: OSError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists '('\''Update' succeeded: status ''\'',' ''\''0' Total update 'succeeded'\'')' ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass : top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}mE9qv43agtT5lEz/TTqQv+2Ft4GvxreW8ceX9JrNVWsviG7H+bYTuN8rzhxruDhCzFLwowsrPTMeVRz+WFm+ZSjq7hUBtR4/' INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}G2NatsHbvZNupGcfcysPi4oH3D3yIJFdbCd+Sy9GNysCZgtxcNzg6cwZjlluW2F5/5crqYCowBYCNT6ZehQB08Cmhmga+XT8' 'DEBUG:tickets.ticket47462_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created INFO:lib389:Starting total init 'cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config INFO:tickets.ticket47462_test:Replication is working. ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47462_test:Found agmt dn '(cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' 'tree,cn=config)' INFO:tickets.ticket47462_test:Successfully modified replication agreement INFO:tickets.ticket47462_test:Replication test passed INFO:lib389:List backend with suffix=o=empty INFO:lib389:Creating a local backend INFO:lib389:List backend cn=empty,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=empty,cn=ldbm database,cn=plugins,cn=config cn: empty nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master_1/db/empty nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: o=empty objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance _______________________________ test_ticket47536 _______________________________ topology = '<tickets.ticket47536_test.TopologyReplication' object at '0x7fd415b65110>' def 'test_ticket47536(topology):' '"""' Set up 2way MMR: master_1 ----- startTLS '----->' master_2 master_1 '<--' TLS_clientAuth -- master_2 Check CA cert, Server-Cert and Key are retrieved as PEM from cert d b when the server is started. First, the file names are not specified and the default names derived from the cert nicknames. Next, the file names are specified in the encryption config entries. Each time add 5 entries to master 1 and 2 and check they are replicated. '"""' 'log.info("Ticket' 47536 - Allow usage of OpenLDAP libraries that 'don'\''t' use NSS for 'crypto")' 'create_keys_certs(topology)' 'config_tls_agreements(topology)' 'add_entry(topology.master1,' ''\''master1'\'',' ''\''uid=m1user'\'',' 0, '5)' 'add_entry(topology.master2,' ''\''master2'\'',' ''\''uid=m2user'\'',' 0, '5)' 'time.sleep(1)' 'log.info('\''#####' Searching for entries on 'master1...'\'')' entries = 'topology.master1.search_s(DEFAULT_SUFFIX,' ldap.SCOPE_SUBTREE, ''\''(uid=*)'\'')' assert 10 == 'len(entries)' 'log.info('\''#####' Searching for entries on 'master2...'\'')' entries = 'topology.master2.search_s(DEFAULT_SUFFIX,' ldap.SCOPE_SUBTREE, ''\''(uid=*)'\'')' '>' assert 10 == 'len(entries)' E assert 10 == 5 E + where 5 = 'len([dn:' 'uid=m2user0,dc=example,dc=com\ncn:' master2 'user0\nobjectClass:' 'top\nobjectClass:' 'person\nobjectClass:' extensibleObjec...er2 'user4\nobjectClass:' 'top\nobjectClass:' 'person\nobjectClass:' 'extensibleObject\nsn:' 'user4\nuid:' 'uid=m2user4\nuid:' 'm2user4\n\n])' tickets/ticket47536_test.py:494: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists '('\''Update' succeeded: status ''\'',' ''\''0' Total update 'succeeded'\'')' ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}4i908B6GLJzSpF8N7NvSIqTyO9RflHjGkkYn7naXDSOZW95jnjYd/kNlZ2CI70plZ5mK+EyRWGVXCcajcT3wIlldZJVj+8Tk' INFO:lib389:List backend with suffix=dc=example ,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}GBl4QZQv6FqBUZI2NhsPArjfcv6MzyB1l0rr/BndQK2EQHa8r74UOZhcRzYpM1OcWBqEOdeE8/X1ZWuFBhY2Jpe2RbvqHYo0' 'DEBUG:tickets.ticket47536_test:cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created 'DEBUG:tickets.ticket47536_test:cn=meTo_localhost.localdomain:38941,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created INFO:lib389:Starting total init 'cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config INFO:tickets.ticket47536_test:Replication is working. ----------------------------- Captured stdout call ----------------------------- Is this a CA certificate '[y/N]?' Enter the path length constraint, enter to skip '[<0' for unlimited 'path]:' '>' Is this a critical extension '[y/N]?' pk12util: PKCS12 EXPORT SUCCESSFUL pk12util: PKCS12 IMPORT SUCCESSFUL ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47536_test:Ticket 47536 - Allow usage of OpenLDAP libraries that 'don'\''t' use NSS for crypto INFO:tickets.ticket47536_test: '#########################' Creating SSL Keys and Certs '######################' INFO:tickets.ticket47536_test:##### shutdown master1 INFO:tickets.ticket47536_test:##### Creating a password file INFO:tickets.ticket47536_test:##### create the pin file INFO:tickets.ticket47536_test:##### Creating a noise file INFO:tickets.ticket47536_test:##### Create key3.db and cert8.db database '(master1):' '['\''certutil'\'',' ''\''-N'\'',' ''\''-d'\'',' ''\''/etc/dirsrv/slapd-master_1'\'',' ''\''-f'\'',' ''\''/etc/dirsrv/slapd-master_1/pwdfile.txt'\'']' INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Creating encryption key for CA '(master1):' '['\''certutil'\'',' ''\''-G'\'',' ''\''-d'\'',' ''\''/etc/dirsrv/slapd- master_1'\'',' ''\''-z'\'',' ''\''/etc/dirsrv/slapd-master_1/noise.txt'\'',' ''\''-f'\'',' ''\''/etc/dirsrv/slapd-master_1/pwdfile.txt'\'']' INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Creating self-signed CA certificate '(master1)' -- nickname CAcertificate Generating key. This may take a few moments... INFO:tickets.ticket47536_test:##### Creating Server certificate -- nickname Server-Cert1: '['\''certutil'\'',' ''\''-S'\'',' ''\''-n'\'',' ''\''Server-Cert1'\'',' ''\''-s'\'',' ''\''CN=localhost.localdomain,OU=389' Directory 'Server'\'',' ''\''-c'\'',' ''\''CAcertificate'\'',' ''\''-t'\'',' ''\'',,'\'',' ''\''-m'\'',' ''\''1001'\'',' ''\''-v'\'',' ''\''120'\'',' ''\''-d'\'',' ''\''/etc/dirsrv/slapd-master_1'\'',' ''\''-z'\'',' ''\''/etc/dirsrv/slapd-master_1/noise.txt'\'',' ''\''-f'\'',' ''\''/etc/dirsrv/slapd-master_1/pwdfile.txt'\'']' INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Creating Server certificate -- nickname Server-Cert2: '['\''certutil'\'',' ''\''-S'\'',' ''\''-n'\'',' ''\''Server-Cert2'\'',' ''\''-s'\'',' ''\''CN=localhost.localdomain,OU=390' Directory 'Server'\'',' ''\''-c'\'',' ''\''CAcertificate'\'',' ''\''-t'\'',' ''\'',,'\'',' ''\''-m'\'',' ''\''1002'\'',' ''\''-v'\'',' ''\''120'\'',' ''\''-d'\'',' ''\''/etc/dirsrv/slapd-master_1'\'',' ''\''-z'\'',' ''\''/etc/dirsrv/slapd-master_1/noise.txt'\'',' ''\''-f'\'',' ''\''/etc/dirsrv/slapd-master_1/pwdfile.txt'\'']' INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### start master1 INFO:tickets.ticket47536_test:##### enable SSL in master1 with all ciphers INFO:tickets.ticket47536_test: '#########################' Enabling SSL LDAPSPORT 41636 '######################' INFO:tickets.ticket47536_test:##### Check the cert db: '['\''certutil'\'',' ''\''-L'\'',' ''\''-d'\'',' ''\''/etc/dirsrv/slapd-master_1'\'']' INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: I NFO:tickets.ticket47536_test: Certificate Nickname Trust Attributes INFO:tickets.ticket47536_test: SSL,S/MIME,JAR/XPI INFO:tickets.ticket47536_test: INFO:tickets.ticket47536_test: CAcertificate CTu,u,u INFO:tickets.ticket47536_test: Server-Cert2 u,u,u INFO:tickets.ticket47536_test: Server-Cert1 u,u,u INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### restart master1 INFO:tickets.ticket47536_test:##### Check PEM files of master1 '(before' setting nsslapd-extract-pemfiles INFO:tickets.ticket47536_test: '#########################' Check PEM files '(CAcertificate,' Server-Cert1, 'Server-Cert1-Key)' not in /etc/dirsrv/slapd-master_1 '######################' INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/CAcertificate.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1-Key.pem is correctly not generated. INFO:tickets.ticket47536_test:##### Set on to nsslapd-extract-pemfiles INFO:tickets.ticket47536_test:##### restart master1 INFO:tickets.ticket47536_test:##### Check PEM files of master1 '(after' setting nsslapd-extract-pemfiles INFO:tickets.ticket47536_test: '#########################' Check PEM files '(CAcertificate,' Server-Cert1, 'Server-Cert1-Key)' in /etc/dirsrv/slapd-master_1 '######################' INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/CAcertificate.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1-Key.pem is successfully generated. INFO:tickets.ticket47536_test:##### Extract PK12 file for master2: pk12util -o /tmp/Server-Cert2.pk12 -n '"Server-Cert2"' -d /etc/dirsrv/slapd-master_1 -w /etc/dirsrv/slapd-master_1/pwdfile.txt -k /etc/dirsrv/slapd-master_1/pwdfile.txt INFO:tickets.ticket47536_test:##### Check PK12 files INFO:tickets.ticket47536_test:/tmp/Server-Cert2. pk12 is successfully extracted. INFO:tickets.ticket47536_test:##### stop master2 INFO:tickets.ticket47536_test:##### Initialize Cert DB for master2 INFO:tickets.ticket47536_test:##### Create key3.db and cert8.db database '(master2):' '['\''certutil'\'',' ''\''-N'\'',' ''\''-d'\'',' ''\''/etc/dirsrv/slapd-master_2'\'',' ''\''-f'\'',' ''\''/etc/dirsrv/slapd-master_1/pwdfile.txt'\'']' INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Import certs to master2 INFO:tickets.ticket47536_test:Importing CAcertificate INFO:tickets.ticket47536_test:##### Importing Server-Cert2 to master2: pk12util -i /tmp/Server-Cert2.pk12 -n '"Server-Cert2"' -d /etc/dirsrv/slapd-master_2 -w /etc/dirsrv/slapd-master_1/pwdfile.txt -k /etc/dirsrv/slapd-master_1/pwdfile.txt INFO:tickets.ticket47536_test:copy /etc/dirsrv/slapd-master_1/pin.txt to /etc/dirsrv/slapd-master_2/pin.txt INFO:tickets.ticket47536_test:##### start master2 INFO:tickets.ticket47536_test:##### enable SSL in master2 with all ciphers INFO:tickets.ticket47536_test: '#########################' Enabling SSL LDAPSPORT 42636 '######################' INFO:tickets.ticket47536_test:##### restart master2 INFO:tickets.ticket47536_test:##### Check PEM files of master2 '(before' setting nsslapd-extract-pemfiles INFO:tickets.ticket47536_test: '#########################' Check PEM files '(CAcertificate,' Server-Cert2, 'Server-Cert2-Key)' not in /etc/dirsrv/slapd-master_2 '######################' INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/CAcertificate.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2-Key.pem is correctly not generated. INFO:tickets.ticket47536_test:##### Set on to nsslapd-extract-pemfiles INFO:tickets.ticket47536_test:##### restart master2 INFO:tickets.ticket47536_test:##### Check PEM files of master2 '(after' setting nsslapd-extract-pemfiles INFO:tic kets.ticket47536_test: '#########################' Check PEM files '(CAcertificate,' Server-Cert2, 'Server-Cert2-Key)' in /etc/dirsrv/slapd-master_2 '######################' INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/CAcertificate.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2-Key.pem is successfully generated. INFO:tickets.ticket47536_test:##### restart master1 INFO:tickets.ticket47536_test: '#########################' Creating SSL Keys and Certs Done '######################' INFO:tickets.ticket47536_test:######################### Configure SSL/TLS agreements '######################' INFO:tickets.ticket47536_test:######################## master1 -- startTLS '->' master2 '#####################' INFO:tickets.ticket47536_test:##################### master1 '<-' tls_clientAuth -- master2 '##################' INFO:tickets.ticket47536_test:##### Update the agreement of master1 INFO:tickets.ticket47536_test:##### Add the cert to the repl manager on master1 INFO:tickets.ticket47536_test:##### master2 Server Cert in base64 format: MIICyjCCAbKgAwIBAgICA+owDQYJKoZIhvcNAQELBQAwETEPMA0GA1UEAxMGQ0FjZXJ0MB4XDTE2MTAyOTIyNDAzM1oXDTI2MTAyOTIyNDAzM1owPzEdMBsGA1UECxMUMzkwIERpcmVjdG9yeSBTZXJ2ZXIxHjAcBgNVBAMTFWxvY2FsaG9zdC5sb2NhbGRvbWFpbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMrZm+30t6wqcSdSUkFUQp7QOzPA4ppptNN6+5VbpOhV6F5b+Sd6crL3LmrZBvjzR5CN0dFzXHB9tUeGTVHcMzgIVEmFwS5O/K9LBlrO2DpZPI0qO6RAc3PxLn++PMRIUBiRAYis796mvW0bmVuYvvX3gtXUdhuEZbszEdIpOWQXuwcNX9k68fKY37jER2LIdcVQNQgm0R2S0J1Y9YTScKeF0A+Wn3CI0Ce5Gtp9xnJC35+KtqUI8KoYaSYHjUw+1hYglm4Nf36HdebmYHgTe40KNsOsGRQDikwFW7ijEsXFZYy0P0PJUyb8zWAyCBrmIttoFT8kdfwKwBmF9Z7/kM0CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA05bqFI/yOGnwYATWPNDTnC176ZMM3OcywI1DNR99TRirQjcRTWtE4VPaJsbVoydz3h1yOXFuM15vXGTXDPSGPZFypmZUXmyLpRG9QWr5UtVGGzl/T8EJMtEs0XU62JNC4XphFXG/yPM3eUfK9h4MeG1dYtYRncrVFLZ1KGROz4QnLdD4UZ6L1yCULF9Cm7L6rNSW5LlxVbY5vtgrQCfVadLarY8N+LhnR UaG7mxAUesBGsXLm/ojghWi/Ch0b4nC92fkwa5jVHzeYjPy8wIBs/bfkBbO6IpaFSWZ5m3oWlLkmxSuPDyp8MAjRbDGS9T19Ac4dyzTkYl4nWJfx9ESWA== INFO:tickets.ticket47536_test:##### Replication manager on master1: cn=replrepl,cn=config INFO:tickets.ticket47536_test: ObjectClass: INFO:tickets.ticket47536_test: : top INFO:tickets.ticket47536_test: : person INFO:tickets.ticket47536_test:##### Modify the certmap.conf on master1 INFO:tickets.ticket47536_test:##### Update the agreement of master2 INFO:tickets.ticket47536_test: '#########################' Configure SSL/TLS agreements Done '######################' INFO:tickets.ticket47536_test: '#########################' Adding 5 entries to master1 '######################' INFO:tickets.ticket47536_test: '#########################' Adding 5 entries to master2 '######################' INFO:tickets.ticket47536_test:##### Searching for entries on master1... INFO:tickets.ticket47536_test:##### Searching for entries on master2... ____________________________ test_ticket47619_init _____________________________ topology = 'Master[localhost.localdomain:38941]' '->' 'Consumer[localhost.localdomain:38961' def 'test_ticket47619_init(topology):' '"""' Initialize the test environment '"""' 'topology.master.plugins.enable(name=PLUGIN_RETRO_CHANGELOG)' '#topology.master.plugins.enable(name=PLUGIN_MEMBER_OF)' '#topology.master.plugins.enable(name=PLUGIN_REFER_INTEGRITY)' 'topology.master.stop(timeout=10)' 'topology.master.start(timeout=10)' 'topology.master.log.info("test_ticket47619_init' topology '%r"' % '(topology))' '#' the test case will check if a warning message is logged in the '#' error log of the supplier '>' topology.master.errorlog_file = 'open(topology.master.errlog,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/var/log/dirsrv/slapd-master_1/error'\''' tickets/ticket47619_test.py:141: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists '('\''Update' succe eded: status ''\'',' ''\''0' Total update 'succeeded'\'')' ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}TQExc/+oPZQzlnPgZjnRRqJNK14VgdNTQC8MZO5UHpjE4vXZij6XX6GxrWJLsii6eIBbDZWxWvQnWDAN33dCkBH1WqG5EI2q' INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}NjuZ9tK8PgQ4S7q0rWsY3jYIC9mH5M9a0jKOKJjO3/GY75ZAK7EoLrrEWpg9Fk8+7PpVczdNKYHzbdDguZ4Sekq+67bVVU/r' 'DEBUG:tickets.ticket47619_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created INFO:lib389:Starting total init 'cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config INFO:tickets.ticket47619_test:Replication is working. ----------------------------- Captured stderr call ----------------------------- INFO:lib389:test_ticket47619_init topology 'Master[localhost.localdomain:38941]' '->' 'Consumer[localhost.localdomain:38961' ____________________________ test_ticket47669_init _____________________________ topology = '<tickets.ticket47669_test.TopologyStandalone' object at '0x7fd41528f7d0>' def 'test_ticket47669_init(topology):' '"""' Add cn=changelog5,cn=config Enable cn=Retro Changelog Plugin,cn=plugins,cn=config '"""' 'log.info('\''Testing' Ticket 47669 - Test duration syntax in the 'changelogs'\'')' '#' bind as directory manager 'topology.standalone.log.info("Bind' as '%s"' % 'DN_DM)' 'topology.standalone.simple_bind_s(DN_DM,' 'PASSWORD)' try: changelogdir = '"%s/changelog"' % topology.standalone.dbdir 'topology.standalone.add_s(Entry((CHANGELOG,' '{'\''objectclass'\'':' ''\''top' 'extensibleObject'\''.split(),' ''\''nsslapd-changelogdir'\'':' 'changelogdir})))' e xcept ldap.LDAPError as e: 'log.error('\''Failed' to add \' + CHANGELOG + ''\'':' error \' + 'e.message['\''desc'\''])' assert False try: 'topology.standalone.modify_s(RETROCHANGELOG,' '[(ldap.MOD_REPLACE,' ''\''nsslapd-pluginEnabled'\'',' ''\''on'\'')])' except ldap.LDAPError as e: 'log.error('\''Failed' to enable \' + RETROCHANGELOG + ''\'':' error \' + 'e.message['\''desc'\''])' assert False '#' restart the server '>' 'topology.standalone.restart(timeout=10)' tickets/ticket47669_test.py:103: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:1215: in restart 'self.start(timeout)' ../../../lib389/lib389/__init__.py:1096: in start '"dirsrv@%s"' % 'self.serverid])' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ popenargs = '(['\''/usr/bin/systemctl'\'',' ''\''start'\'',' ''\''dirsrv@standalone'\''],),' kwargs = '{}' retcode = 1, cmd = '['\''/usr/bin/systemctl'\'',' ''\''start'\'',' ''\''dirsrv@standalone'\'']' def 'check_call(*popenargs,' '**kwargs):' '"""Run' command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: 'check_call(["ls",' '"-l"])' '"""' retcode = 'call(*popenargs,' '**kwargs)' if retcode: cmd = 'kwargs.get("args")' if cmd is None: cmd = 'popenargs[0]' '>' raise 'CalledProcessError(retcode,' 'cmd)' E CalledProcessError: Command ''\''['\''/usr/bin/systemctl'\'',' ''\''start'\'',' ''\''dirsrv@standalone'\'']'\''' returned non-zero exit status 1 /usr/lib64/python2.7/subprocess.py:541: CalledProcessError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:Testing Ticket 47669 - Test duration syntax in the cha ngelogs INFO:lib389:Bind as cn=Directory Manager Job for dirsrv@standalone.service failed because the control process exited with error code. See '"systemctl' status 'dirsrv@standalone.service"' and '"journalctl' '-xe"' for details. ______________________ test_ticket47669_changelog_maxage _______________________ topology = '<tickets.ticket47669_test.TopologyStandalone' object at '0x7fd41528f7d0>' def 'test_ticket47669_changelog_maxage(topology):' '"""' Test nsslapd-changelogmaxage in cn=changelog5,cn=config '"""' 'log.info('\''1.' Test nsslapd-changelogmaxage in 'cn=changelog5,cn=config'\'')' '#' bind as directory manager 'topology.standalone.log.info("Bind' as '%s"' % 'DN_DM)' '>' 'topology.standalone.simple_bind_s(DN_DM,' 'PASSWORD)' tickets/ticket47669_test.py:159: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:223: in simple_bind_s resp_type, resp_data, resp_msgid, resp_ctrls = 'self.result3(msgid,all=1,timeout=self.timeout)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = 'self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd414fe4200>' func = '<built-in' method result4 of LDAP object at '0x7fd415cf1a08>' args = '(13,' 1, -1, 0, 0, '0),' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper metho d mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:1. Test nsslapd-changelogmaxage in cn=changelog5,cn=config INFO:lib389:Bind as cn=Directory Manager ___________________ test_ticket47669_changelog_triminterval ____________________ topology = '<tickets.ticket47669_test.TopologyStandalone' object at '0x7fd41528f7d0>' def 'test_ticket47669_changelog_triminterval(topology):' '"""' Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config '"""' 'log.info('\''2.' Test nsslapd-changelogtrim-interval in 'cn=changelog5,cn=config'\'')' '#' bind as directory manager 'topology.standalone.log.info("Bind' as '%s"' % 'DN_DM)' '>' 'topology.standalone.simple_bind_s(DN_DM,' 'PASSWORD)' tickets/ticket47669_test.py:179: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:222: in simple_bind_s msgid = 'self.simple_bind(who,cred,serverctrls,clientctrls)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:216: in simple_bind return 'self._ldap_call(self._l.simple_bind,who,cred,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls))' ../../../lib389/lib389/__init__.py:159: in inn er return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd414fe4200>' func = '<built-in' method simple_bind of LDAP object at '0x7fd415cf1a08>' args = '('\''cn=Directory' 'Manager'\'',' ''\''password'\'',' None, 'None),' kwargs = '{}' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:2. Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config INFO:lib389:Bind as cn=Directory Manager _________________ test_ticket47669_changelog_compactdbinterval _________________ topology = '<tickets.ticket47669_test.TopologyStandalone' object at '0x7fd41528f7d0>' def 'test_ticket47669_changelog_compactdbinterval(topology):' '"""' Test nsslapd-changelogcompactdb-interval in cn=changelog5,cn=config '"""' 'log.info('\''3.' Test nsslapd-changelogcompactdb-interval in 'cn=changelog5,cn=config'\'')' '#' bind as directory manager 'topology.standalone.log.info("Bind' as '%s"' % 'DN_DM)' '>' 'topology.standalone.simple_bind_s(DN_DM,' 'PASSWORD)' tickets/ticket47669_test.py:199: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/ lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:222: in simple_bind_s msgid = 'self.simple_bind(who,cred,serverctrls,clientctrls)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:216: in simple_bind return 'self._ldap_call(self._l.simple_bind,who,cred,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls))' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd414fe4200>' func = '<built-in' method simple_bind of LDAP object at '0x7fd415cf1a08>' args = '('\''cn=Directory' 'Manager'\'',' ''\''password'\'',' None, 'None),' kwargs = '{}' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:3. Test nsslapd-changelogcompactdb-interval in cn=changelog5,cn=config INFO:lib389:Bind as cn=Directory Manager ____________________ test_ticket47669_retrochangelog_maxage ____________________ topology = '<tickets.ticket47669_test.TopologyStandalone' object at ' 0x7fd41528f7d0>' def 'test_ticket47669_retrochangelog_maxage(topology):' '"""' Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config '"""' 'log.info('\''4.' Test nsslapd-changelogmaxage in cn=Retro Changelog 'Plugin,cn=plugins,cn=config'\'')' '#' bind as directory manager 'topology.standalone.log.info("Bind' as '%s"' % 'DN_DM)' '>' 'topology.standalone.simple_bind_s(DN_DM,' 'PASSWORD)' tickets/ticket47669_test.py:219: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:222: in simple_bind_s msgid = 'self.simple_bind(who,cred,serverctrls,clientctrls)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:216: in simple_bind return 'self._ldap_call(self._l.simple_bind,who,cred,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls))' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd414fe4200>' func = '<built-in' method simple_bind of LDAP object at '0x7fd415cf1a08>' args = '('\''cn=Directory' 'Manager'\'',' ''\''password'\'',' None, 'None),' kwargs = '{}' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' r esult = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:4. Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config INFO:lib389:Bind as cn=Directory Manager ____________________________ test_ticket47823_init _____________________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823_init(topology):' '"""' '"""' '#' Enabled the plugins 'topology.standalone.plugins.enable(name=PLUGIN_ATTR_UNIQUENESS)' 'topology.standalone.restart(timeout=120)' 'topology.standalone.add_s(Entry((PROVISIONING_DN,' '{'\''objectclass'\'':' '"top' 'nscontainer".split(),' ''\''cn'\'':' 'PROVISIONING_CN})))' 'topology.standalone.add_s(Entry((ACTIVE_DN,' '{'\''objectclass'\'':' '"top' 'nscontainer".split(),' ''\''cn'\'':' 'ACTIVE_CN})))' 'topology.standalone.add_s(Entry((STAGE_DN,' '{'\''objectclass'\'':' '"top' 'nscontainer".split(),' ''\''cn'\'':' 'STAGE_CN})))' 'topology.standalone.add_s(Entry((DELETE_DN,' '{'\''objectclass'\'':' '"top' 'nscontainer".split(),' ''\''cn'\'':' 'DELETE_CN})))' '>' topology.standalone.errorlog_file = 'open(topology.standalone.errlog,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/var/log/dirsrv/slapd-standalone/error'\''' tickets/ticket47823_test.py:477: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ______________________ test_ticket47823_invalid_config_1 _______________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823_invalid_config_1(topology):' ''\'''\'''\''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg0 is missing ''\'''\'''\''' '_header(topology,' '"Invalid' config '(old):' arg0 is 'missin g")' '_config_file(topology,' 'action='\''save'\'')' '#' create an invalid config without arg0 config = '_build_config(topology,' 'attr_name='\''cn'\'',' subtree_1=ACTIVE_DN, subtree_2=None, 'type_config='\''old'\'',' 'across_subtrees=False)' del 'config.data['\''nsslapd-pluginarg0'\'']' '#' replace ''\''cn'\''' uniqueness entry try: 'topology.standalone.delete_s(config.dn)' except ldap.NO_SUCH_OBJECT: pass 'topology.standalone.add_s(config)' 'topology.standalone.getEntry(config.dn,' ldap.SCOPE_BASE, '"(objectclass=nsSlapdPlugin)",' 'ALL_CONFIG_ATTRS)' '#' Check the server did not restart 'topology.standalone.modify_s(DN_CONFIG,' '[(ldap.MOD_REPLACE,' ''\''nsslapd-errorlog-level'\'',' ''\''65536'\'')])' try: '>' 'topology.standalone.restart(timeout=5)' tickets/ticket47823_test.py:636: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:1215: in restart 'self.start(timeout)' ../../../lib389/lib389/__init__.py:1096: in start '"dirsrv@%s"' % 'self.serverid])' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ popenargs = '(['\''/usr/bin/systemctl'\'',' ''\''start'\'',' ''\''dirsrv@standalone'\''],),' kwargs = '{}' retcode = 1, cmd = '['\''/usr/bin/systemctl'\'',' ''\''start'\'',' ''\''dirsrv@standalone'\'']' def 'check_call(*popenargs,' '**kwargs):' '"""Run' command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: 'check_call(["ls",' '"-l"])' '"""' retcode = 'call(*popenargs,' '**kwargs)' if retcode: cmd = 'kwargs.get("args")' if cmd is None: cmd = 'popenargs[0]' '>' raise 'CalledProcessError(retcode,' 'cmd)' E CalledProcessError: Command ''\''['\''/usr/bin/systemctl'\'',' ''\''start'\'',' ''\''dirsrv@standalone'\'']'\''' returned non-zero exit status 1 /usr/lib64/python2.7/subprocess.py:541: CalledProcessE rror ----------------------------- Captured stderr call ----------------------------- INFO:lib389: '###############################################' INFO:lib389:####### INFO:lib389:####### Invalid config '(old):' arg0 is missing INFO:lib389:####### INFO:lib389:############################################### Job for dirsrv@standalone.service failed because the control process exited with error code. See '"systemctl' status 'dirsrv@standalone.service"' and '"journalctl' '-xe"' for details. ______________________ test_ticket47823_invalid_config_2 _______________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823_invalid_config_2(topology):' ''\'''\'''\''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg1 is missing ''\'''\'''\''' '_header(topology,' '"Invalid' config '(old):' arg1 is 'missing")' '_config_file(topology,' 'action='\''save'\'')' '#' create an invalid config without arg0 '>' config = '_build_config(topology,' 'attr_name='\''cn'\'',' subtree_1=ACTIVE_DN, subtree_2=None, 'type_config='\''old'\'',' 'across_subtrees=False)' tickets/ticket47823_test.py:672: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:124: in _build_config config = '_uniqueness_config_entry(topology,' 'attr_name)' tickets/ticket47823_test.py:112: in _uniqueness_config_entry ''\''nsslapd-pluginDescription'\''])' ../../../lib389/lib389/__init__.py:1574: in getEntry restype, obj = 'self.result(res)' ../../../lib389/lib389/__init__.py:127: in inner objtype, data = 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = 'self.result2(msgid,all,timeout)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = 'self.result3(msgid,all,timeout)' ../../../lib389/lib389/__init__.py: 159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = 'self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd415907128>' func = '<built-in' method result4 of LDAP object at '0x7fd415de00d0>' args = '(15,' 1, -1, 0, 0, '0),' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: '###############################################' INFO:lib389:####### INFO:lib389:####### Invalid config '(old):' arg1 is missing INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_3 _______________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823 _invalid_config_3(topology):' ''\'''\'''\''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg0 is missing ''\'''\'''\''' '_header(topology,' '"Invalid' config '(old):' arg0 is missing but new config attrname 'exists")' '_config_file(topology,' 'action='\''save'\'')' '#' create an invalid config without arg0 '>' config = '_build_config(topology,' 'attr_name='\''cn'\'',' subtree_1=ACTIVE_DN, subtree_2=None, 'type_config='\''old'\'',' 'across_subtrees=False)' tickets/ticket47823_test.py:723: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:124: in _build_config config = '_uniqueness_config_entry(topology,' 'attr_name)' tickets/ticket47823_test.py:112: in _uniqueness_config_entry ''\''nsslapd-pluginDescription'\''])' ../../../lib389/lib389/__init__.py:1573: in getEntry res = 'self.search(*args,' '**kwargs)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return 'self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd415907128>' func = '<built-in' method search_ext of LDAP object at '0x7fd415de00d0>' args = '('\''cn=attribute' 'uniqueness,cn=plugins,cn=config'\'',' 0, ''\''(objectclass=nsSlapdPlugin)'\'',' '['\''objectClass'\'',' ''\''cn'\'',' ''\''nsslapd-pluginPath'\'',' ''\''nsslapd-pluginInitfunc'\'',' ''\''nsslapd-pluginType'\'',' ''\''nsslapd-pluginEnabled'\'',' '...],' 0, None, '...)' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwar gs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: '###############################################' INFO:lib389:####### INFO:lib389:####### Invalid config '(old):' arg0 is missing but new config attrname exists INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_4 _______________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823_invalid_config_4(topology):' ''\'''\'''\''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg1 is missing ''\'''\'''\''' '_header(topology,' '"Invalid' config '(old):' arg1 is missing but new config 'exist")' '_config_file(topology,' 'action='\''save'\'')' '#' create an invalid config without arg0 '>' config = '_build_config(topology,' 'attr_name='\''cn'\'',' subtree_1=ACTIVE_DN, subtree_2=None, 'type_config='\''old'\'',' 'across_subtrees=False)' tickets/ticket47823_test.py:776: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:124: in _build_config config = '_uniqueness_config_entry(topology,' 'attr_name)' tickets/ticket47823_test.py:112: in _uniqueness_config_entry ''\''nsslapd-pluginDescription'\''])' ../../../lib389/lib389/__init__.py:15 73: in getEntry res = 'self.search(*args,' '**kwargs)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return 'self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd415907128>' func = '<built-in' method search_ext of LDAP object at '0x7fd415de00d0>' args = '('\''cn=attribute' 'uniqueness,cn=plugins,cn=config'\'',' 0, ''\''(objectclass=nsSlapdPlugin)'\'',' '['\''objectClass'\'',' ''\''cn'\'',' ''\''nsslapd-pluginPath'\'',' ''\''nsslapd-pluginInitfunc'\'',' ''\''nsslapd-pluginType'\'',' ''\''nsslapd-pluginEnabled'\'',' '...],' 0, None, '...)' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: '###############################################' INFO:lib389:####### INFO:lib389:### #### Invalid config '(old):' arg1 is missing but new config exist INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_5 _______________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823_invalid_config_5(topology):' ''\'''\'''\''' Check that an invalid config is detected. No uniqueness enforced Using new config: uniqueness-attribute-name is missing ''\'''\'''\''' '_header(topology,' '"Invalid' config '(new):' uniqueness-attribute-name is 'missing")' '_config_file(topology,' 'action='\''save'\'')' '#' create an invalid config without arg0 '>' config = '_build_config(topology,' 'attr_name='\''cn'\'',' subtree_1=ACTIVE_DN, subtree_2=None, 'type_config='\''new'\'',' 'across_subtrees=False)' tickets/ticket47823_test.py:828: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:131: in _build_config config = '_uniqueness_config_entry(topology,' 'attr_name)' tickets/ticket47823_test.py:112: in _uniqueness_config_entry ''\''nsslapd-pluginDescription'\''])' ../../../lib389/lib389/__init__.py:1573: in getEntry res = 'self.search(*args,' '**kwargs)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return 'self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd415907128>' func = '<built-in' method search_ext of LDAP object at '0x7fd415de00d0>' args = '('\''cn=attribute' 'uniqueness,cn=plugins,cn=config'\'',' 0, ''\''(objectclass=nsSlapdPlugin)'\'',' '[ '\''objectClass'\'',' ''\''cn'\'',' ''\''nsslapd-pluginPath'\'',' ''\''nsslapd-pluginInitfunc'\'',' ''\''nsslapd-pluginType'\'',' ''\''nsslapd-pluginEnabled'\'',' '...],' 0, None, '...)' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: '###############################################' INFO:lib389:####### INFO:lib389:####### Invalid config '(new):' uniqueness-attribute-name is missing INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_6 _______________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823_invalid_config_6(topology):' ''\'''\'''\''' Check that an invalid config is detected. No uniqueness enforced Using new config: uniqueness-subtrees is missing ''\'''\'''\''' '_header(topology,' '"Invalid' config '(new):' uniqueness-subtrees is 'missing")' '_config_file(topology,' 'action='\''save'\'')' '#' create an invalid config without arg0 '>' config = '_build_config(topology,' 'attr_name='\''cn'\'',' subtree_1=ACTIVE_DN, subtree_2=None, 'type_config='\''new'\'',' 'across_subtrees=False)' tickets/ticket 47823_test.py:879: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:131: in _build_config config = '_uniqueness_config_entry(topology,' 'attr_name)' tickets/ticket47823_test.py:112: in _uniqueness_config_entry ''\''nsslapd-pluginDescription'\''])' ../../../lib389/lib389/__init__.py:1573: in getEntry res = 'self.search(*args,' '**kwargs)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return 'self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd415907128>' func = '<built-in' method search_ext of LDAP object at '0x7fd415de00d0>' args = '('\''cn=attribute' 'uniqueness,cn=plugins,cn=config'\'',' 0, ''\''(objectclass=nsSlapdPlugin)'\'',' '['\''objectClass'\'',' ''\''cn'\'',' ''\''nsslapd-pluginPath'\'',' ''\''nsslapd-pluginInitfunc'\'',' ''\''nsslapd-pluginType'\'',' ''\''nsslapd-pluginEnabled'\'',' '...],' 0, None, '...)' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: '###############################################' INFO:lib389:####### INFO:lib389:####### Invalid config '(new):' uniqueness-subtrees is missing INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_7 _______________________ topology = '<tickets.ticket47823_test.TopologyStandalone' object at '0x7fd4151fc490>' def 'test_ticket47823_invalid_config_7(topology):' ''\'''\'''\''' Check that an invalid config is detected. No uniqueness enforced Using new config: uniqueness-subtrees is missing ''\'''\'''\''' '_header(topology,' '"Invalid' config '(new):' uniqueness-subtrees are 'invalid")' '_config_file(topology,' 'action='\''save'\'')' '#' create an invalid config without arg0 '>' config = '_build_config(topology,' 'attr_name='\''cn'\'',' 'subtree_1="this_is' dummy 'DN",' 'subtree_2="an' other=dummy 'DN",' 'type_config='\''new'\'',' 'across_subtrees=False)' tickets/ticket47823_test.py:930: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:131: in _build_config config = '_uniqueness_config_entry(topology,' 'attr_name)' tickets/ticket47823_test.py:112: in _uniqueness_config_entry ''\''nsslapd-pluginDescription'\''])' ../../../lib389/lib389/__init__.py:1573: in getEntry res = 'self.search(*args,' '**kwargs)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return 'self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:15 9: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' instance at '0x7fd415907128>' func = '<built-in' method search_ext of LDAP object at '0x7fd415de00d0>' args = '('\''cn=attribute' 'uniqueness,cn=plugins,cn=config'\'',' 0, ''\''(objectclass=nsSlapdPlugin)'\'',' '['\''objectClass'\'',' ''\''cn'\'',' ''\''nsslapd-pluginPath'\'',' ''\''nsslapd-pluginInitfunc'\'',' ''\''nsslapd-pluginType'\'',' ''\''nsslapd-pluginEnabled'\'',' '...],' 0, None, '...)' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: '###############################################' INFO:lib389:####### INFO:lib389:####### Invalid config '(new):' uniqueness-subtrees are invalid INFO:lib389:####### INFO:lib389:############################################### ____________________________ test_ticket47871_init _____________________________ topology = 'Master[localhost.localdomain:38941]' '->' 'Consumer[localhost.localdomain:38961' def 'test_ticket47871_init(topology):' '"""' Initialize the test environment '"""' 'topology.master.plugins.enable(name=PLUGIN_RETRO_CHANGELOG)' mod = '[(ldap.MOD_REPLACE,' ''\''nsslapd-changelogmaxage'\'',' '"10s"),' '#' 10 second triming '(ldap.MOD_REPLACE,' ''\''nsslapd-changelog-trim-interval'\'',' '"5s")]' 'topology.master.modify_s("cn=%s,%s"' % '(PLUGIN_RETRO_CHANGELOG,' 'DN_PLUGIN),' 'mod)' '#topology.master.plugins.enable(name=PLUGIN_MEMBER_OF)' '#topology.master.plugins.enable(name=PLUGIN_REFER_INTEGRITY)' 'topology.master.stop(timeout=10)' 'topology.master.start(timeout=10)' 'topology.master.log.info("test_ticket47871_init' topology '%r"' % '(topology))' '#' the test case will check if a warning message is logged in the '#' error log of the supplier '>' topology.master.errorlog_file = 'open(topology.master.errlog,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/var/log/dirsrv/slapd-master_1/error'\''' tickets/ticket47871_test.py:147: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists '('\''Update' succeeded: status ''\'',' ''\''0' Total update 'succeeded'\'')' ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}ExX81be84URksIaqLWqKsoBAFQwBAiHltdNM8jhnaOGwIKzOExXuKuYzHtP+fMBy+ObdxOiJoY2XvJnFAYYfm3QWuNKWs7QN' INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}PJRtPwGd3e+KDL3ErsG5KTyo1qq+tvNLdJSKxlWUfmviHbGK8sMyeob38mTsYNhWXReDZpvnchh2WuoJXxwLWXpBzSOz0Htp' 'DEBUG:tickets.ticket47871_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config created INFO:lib389:Starting total init 'cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config INFO:tickets. ticket47871_test:Replication is working. ----------------------------- Captured stderr call ----------------------------- INFO:lib389:test_ticket47871_init topology 'Master[localhost.localdomain:38941]' '->' 'Consumer[localhost.localdomain:38961' _______________________________ test_ticket48109 _______________________________ topology = '<tickets.ticket48109_test.TopologyStandalone' object at '0x7fd415228c10>' def 'test_ticket48109(topology):' ''\'''\'''\''' Set SubStr lengths to cn=uid,cn=index,... objectClass: extensibleObject nsIndexType: sub nsSubStrBegin: 2 nsSubStrEnd: 2 ''\'''\'''\''' 'log.info('\''Test' case '0'\'')' '#' add substr setting to UID_INDEX try: 'topology.standalone.modify_s(UID_INDEX,' '[(ldap.MOD_ADD,' ''\''objectClass'\'',' ''\''extensibleObject'\''),' '(ldap.MOD_ADD,' ''\''nsIndexType'\'',' ''\''sub'\''),' '(ldap.MOD_ADD,' ''\''nsSubStrBegin'\'',' ''\''2'\''),' '(ldap.MOD_ADD,' ''\''nsSubStrEnd'\'',' ''\''2'\'')])' except ldap.LDAPError as e: 'log.error('\''Failed' to add substr lengths: error \' + 'e.message['\''desc'\''])' assert False '#' restart the server to apply the indexing 'topology.standalone.restart(timeout=10)' '#' add a test user UID = ''\''auser0'\''' USER_DN = ''\''uid=%s,%s'\''' % '(UID,' 'SUFFIX)' try: 'topology.standalone.add_s(Entry((USER_DN,' '{' ''\''objectclass'\'':' ''\''top' person organizationalPerson 'inetOrgPerson'\''.split(),' ''\''cn'\'':' ''\''a' 'user0'\'',' ''\''sn'\'':' ''\''user0'\'',' ''\''givenname'\'':' ''\''a'\'',' ''\''mail'\'':' 'UID})))' except ldap.LDAPError as e: 'log.error('\''Failed' to add \' + USER_DN + ''\'':' error \' + 'e.message['\''desc'\''])' assert False entries = 'topology.standalone.search_s(SUFFIX,' ldap.SCOPE_SUBTREE, ''\''(uid=a*)'\'')' assert 'len(entries)' == 1 '#' restart the server to check the access log 'topology.standalone.restart(timeout=10)' cmdline = ''\''egrep' %s %s '|' egrep '"uid=a\*"'\''' % '(SUFFIX,' 'topology.standalone.accesslog)' p = 'os.popen(cmdline,' '"r")' l0 = 'p.readline()' if l0 == '"":' 'log.error('\''Sea rch' with '"(uid=a*)"' is not logged in \' + 'topology.standalone.accesslog)' '>' assert False E assert False <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48109_test.py>:121: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48109_test:Test case 0 ERROR:tickets.ticket48109_test:Search with '"(uid=a*)"' is not logged in /var/log/dirsrv/slapd-standalone/access __________________ test_ticket48270_homeDirectory_indexed_cis __________________ topology = '<tickets.ticket48270_test.TopologyStandalone' object at '0x7fd415217790>' def 'test_ticket48270_homeDirectory_indexed_cis(topology):' 'log.info("\n\nindex' homeDirectory in caseIgnoreIA5Match and 'caseExactIA5Match")' try: ent = 'topology.standalone.getEntry(HOMEDIRECTORY_INDEX,' 'ldap.SCOPE_BASE)' except ldap.NO_SUCH_OBJECT: 'topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX,' '{' ''\''objectclass'\'':' '"top' 'nsIndex".split(),' ''\''cn'\'':' HOMEDIRECTORY_CN, ''\''nsSystemIndex'\'':' ''\''false'\'',' ''\''nsIndexType'\'':' ''\''eq'\''})))' '#log.info("attach' 'debugger")' '#time.sleep(60)' 'IGNORE_MR_NAME='\''caseIgnoreIA5Match'\''' 'EXACT_MR_NAME='\''caseExactIA5Match'\''' mod = '[(ldap.MOD_REPLACE,' MATCHINGRULE, '(IGNORE_MR_NAME,' 'EXACT_MR_NAME))]' 'topology.standalone.modify_s(HOMEDIRECTORY_INDEX,' 'mod)' '#topology.standalone.stop(timeout=10)' 'log.info("successfully' checked that filter with exact mr , a filter with lowercase eq is 'failing")' '#assert' 'topology.standalone.db2index(bename=DEFAULT_BENAME,' suffixes=None, 'attrs=['\''homeDirectory'\''])' '#topology.standalone.start(timeout=10)' args = '{TASK_WAIT:' 'True}' 'topology.standalone.tasks.reindex(suffix=SUFFIX,' 'attrname='\''homeDirectory'\'',' 'args=args)' 'log.info("Check' indexing succeeded with a specified matching 'rule")' file_p ath = 'os.path.join(topology.standalone.prefix,' '"var/log/dirsrv/slapd-%s/errors"' % 'topology.standalone.serverid)' '>' file_obj = 'open(file_path,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/usr/var/log/dirsrv/slapd-standalone/errors'\''' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48270_test.py>:100: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48270_test: index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO:tickets.ticket48270_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_013453 completed successfully INFO:tickets.ticket48270_test:Check indexing succeeded with a specified matching rule _______________________________ test_ticket48383 _______________________________ topology = '<tickets.ticket48383_test.TopologyStandalone' object at '0x7fd4104d2150>' def 'test_ticket48383(topology):' '"""' This test case will check that we re-alloc buffer sizes on import.c We achieve this by setting the servers dbcachesize to a stupid small value and adding huge objects to ds. Then when we run db2index, either: data stress suites tickets tmp If we are not using the re-alloc code, it will FAIL '(Bad)' data stress suites tickets tmp If we re-alloc properly, it all works regardless. '"""' 'topology.standalone.config.set('\''nsslapd-maxbersize'\'',' ''\''200000000'\'')' 'topology.standalone.restart()' '#' Create some stupid huge objects / attributes in DS. '#' seeAlso is indexed by default. Lets do 'that!' '#' This will take a while ... data = '[random.choice(string.letters)' for x in 'xrange(10000000)]' s = '"".join(data)' '#' This was here for an iteration test. i = 1 USER_DN = ''\''uid=user%s,ou=people,%s'\''' % '(i,' 'DEFAULT_SUFFIX)' padding = '['\''%s'\''' % n for n in 'range(400)]' user = 'Entry((USE R_DN,' '{' ''\''objectclass'\'':' ''\''top' posixAccount person 'extensibleObject'\''.split(),' ''\''uid'\'':' ''\''user%s'\''' % '(i),' ''\''cn'\'':' ''\''user%s'\''' % '(i),' ''\''uidNumber'\'':' ''\''%s'\''' % '(i),' ''\''gidNumber'\'':' ''\''%s'\''' % '(i),' ''\''homeDirectory'\'':' ''\''/home/user%s'\''' % '(i),' ''\''description'\'':' ''\''user' 'description'\'',' ''\''sn'\''' : s , ''\''padding'\''' : padding , '}))' try: 'topology.standalone.add_s(user)' except ldap.LDAPError as e: 'log.fatal('\''test' 48383: Failed to user%s: error %s \' % '(i,' 'e.message['\''desc'\'']))' assert False '#' Set the dbsize really low. try: 'topology.standalone.modify_s(DEFAULT_BENAME,' '[(ldap.MOD_REPLACE,' ''\''nsslapd-cachememsize'\'',' ''\''1'\'')])' except ldap.LDAPError as e: 'log.fatal('\''Failed' to change nsslapd-cachememsize \' + 'e.message['\''desc'\''])' '##' Does ds try and set a minimum possible value for 'this?' '##' Yes: '[16/Feb/2016:16:39:18' '+1000]' - WARNING: cache too small, increasing to 500K bytes '#' Given the formula, by default, this means DS will make the buffsize 400k '#' So an object with a 1MB attribute should break indexing '#' stop the server 'topology.standalone.stop(timeout=30)' '#' Now export and import the DB. 'It'\''s' easier than db2index ... 'topology.standalone.db2ldif(bename=DEFAULT_BENAME,' 'suffixes=[DEFAULT_SUFFIX],' 'excludeSuffixes=[],' encrypt=False, '\' repl_data=True, 'outputfile='\''%s/ldif/%s.ldif'\''' % '(topology.standalone.dbdir,SERVERID_STANDALONE' '))' result = 'topology.standalone.ldif2db(DEFAULT_BENAME,' None, None, False, ''\''%s/ldif/%s.ldif'\''' % '(topology.standalone.dbdir,SERVERID_STANDALONE' '))' '>' 'assert(result)' E assert False <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48383_test.py>:123: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stdout call -- --------------------------- OK group dirsrv exists OK user dirsrv exists Exported ldif file: /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- CRITICAL:tickets.ticket48383_test:Failed to change nsslapd-cachememsize No such object INFO:lib389:Running script: /usr/sbin/db2ldif -Z standalone -n userRoot -s dc=example,dc=com -a /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif -r '[30/Oct/2016:01:46:17.520373120' '+0200]' - DEBUG - ldbm_back_start - userRoot: entry cache size: 10485760 'B;' db size: 10321920 B '[30/Oct/2016:01:46:17.939192514' '+0200]' - DEBUG - ldbm_back_start - total cache size: 20971520 'B;' '[30/Oct/2016:01:46:17.983696414' '+0200]' - DEBUG - ldbm_back_start - Total entry cache size: 20971520 'B;' dbcache size: 10000000 'B;' available memory size: 2143031296 'B;' '[30/Oct/2016:01:46:18.006396738' '+0200]' - NOTICE - dblayer_start - Detected Disorderly Shutdown last time Directory Server was running, recovering database. ldiffile: /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif '[30/Oct/2016:01:46:18.936350117' '+0200]' - ERR - ldbm_back_ldbm2ldif - db2ldif: 'can'\''t' open /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif: 2 '(No' such file or 'directory)' '[30/Oct/2016:01:46:19.436065901' '+0200]' - INFO - dblayer_pre_close - Waiting for 4 database threads to stop '[30/Oct/2016:01:46:19.871891563' '+0200]' - INFO - dblayer_pre_close - All database threads now stopped ERROR:lib389:ldif2db: 'Can'\''t' find file: /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif ___________________ test_ticket48497_homeDirectory_index_run ___________________ topology = '<tickets.ticket48497_test.TopologyStandalone' object at '0x7fd41049f650>' def 'test_ticket48497_homeDirectory_index_run(topology):' args = '{TASK_WAIT:' 'True}' 'topology.standalone.tasks.reindex(suffix=SUFFIX,' 'attrname='\''homeDirectory'\'',' 'args=args)' 'log.info("Check' indexing succ eeded with a specified matching 'rule")' file_path = 'os.path.join(topology.standalone.prefix,' '"var/log/dirsrv/slapd-%s/errors"' % 'topology.standalone.serverid)' '>' file_obj = 'open(file_path,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/usr/var/log/dirsrv/slapd-standalone/errors'\''' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48497_test.py>:139: IOError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014648 completed successfully INFO:tickets.ticket48497_test:Check indexing succeeded with a specified matching rule __________________ test_ticket48745_homeDirectory_indexed_cis __________________ topology = '<tickets.ticket48745_test.TopologyStandalone' object at '0x7fd414b5d3d0>' def 'test_ticket48745_homeDirectory_indexed_cis(topology):' 'log.info("\n\nindex' homeDirectory in caseIgnoreIA5Match and 'caseExactIA5Match")' try: ent = 'topology.standalone.getEntry(HOMEDIRECTORY_INDEX,' 'ldap.SCOPE_BASE)' except ldap.NO_SUCH_OBJECT: 'topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX,' '{' ''\''objectclass'\'':' '"top' 'nsIndex".split(),' ''\''cn'\'':' HOMEDIRECTORY_CN, ''\''nsSystemIndex'\'':' ''\''false'\'',' ''\''nsIndexType'\'':' ''\''eq'\''})))' '#log.info("attach' 'debugger")' '#time.sleep(60)' 'IGNORE_MR_NAME='\''caseIgnoreIA5Match'\''' 'EXACT_MR_NAME='\''caseExactIA5Match'\''' mod = '[(ldap.MOD_REPLACE,' MATCHINGRULE, '(IGNORE_MR_NAME,' 'EXACT_MR_NAME))]' 'topology.standalone.modify_s(HOMEDIRECTORY_INDEX,' 'mod)' '#topology.standalone.stop(timeout=10)' 'log.info("successfully' checked that filter with exact mr , a filter with lowercase eq is 'failing")' '#assert' 'topology.standalone.db2index(bename=DEFAULT_BENAME,' suffixes=None, 'attrs=['\''homeDirectory'\''])' '#topology.standalone.start(timeout=10)' args = '{TASK_WAIT:' 'True}' 'topology.standalone.tasks.reindex(suffi x=SUFFIX,' 'attrname='\''homeDirectory'\'',' 'args=args)' 'log.info("Check' indexing succeeded with a specified matching 'rule")' file_path = 'os.path.join(topology.standalone.prefix,' '"var/log/dirsrv/slapd-%s/errors"' % 'topology.standalone.serverid)' '>' file_obj = 'open(file_path,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/usr/var/log/dirsrv/slapd-standalone/errors'\''' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48745_test.py>:110: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48745_test: index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO:tickets.ticket48745_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014817 completed successfully INFO:tickets.ticket48745_test:Check indexing succeeded with a specified matching rule __________________ test_ticket48746_homeDirectory_indexed_cis __________________ topology = '<tickets.ticket48746_test.TopologyStandalone' object at '0x7fd410970790>' def 'test_ticket48746_homeDirectory_indexed_cis(topology):' 'log.info("\n\nindex' homeDirectory in caseIgnoreIA5Match and 'caseExactIA5Match")' try: ent = 'topology.standalone.getEntry(HOMEDIRECTORY_INDEX,' 'ldap.SCOPE_BASE)' except ldap.NO_SUCH_OBJECT: 'topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX,' '{' ''\''objectclass'\'':' '"top' 'nsIndex".split(),' ''\''cn'\'':' HOMEDIRECTORY_CN, ''\''nsSystemIndex'\'':' ''\''false'\'',' ''\''nsIndexType'\'':' ''\''eq'\''})))' '#log.info("attach' 'debugger")' '#time.sleep(60)' 'IGNORE_MR_NAME='\''caseIgnoreIA5Match'\''' 'EXACT_MR_NAME='\''caseExactIA5Match'\''' mod = '[(ldap.MOD_REPLACE,' MATCHINGRULE, '(IGNORE_MR_NAME,' 'EXACT_MR_NAME))]' 'topology.standalone.modify_s(HOMEDIRECTORY_INDEX,' 'mod)' '#topology.standalone.stop(timeout=10)' 'log.info("successfully' checked that filter with exact mr , a filter with lowercase eq is 'failing")' '#assert' 'topology.standalone.db2index(bename=DEFAULT_BENAME,' suffixes=None, 'attrs=['\''homeDirectory'\''])' '#topology.standalone.start(timeout=10)' args = '{TASK_WAIT:' 'True}' 'topology.standalone.tasks.reindex(suffix=SUFFIX,' 'attrname='\''homeDirectory'\'',' 'args=args)' 'log.info("Check' indexing succeeded with a specified matching 'rule")' file_path = 'os.path.join(topology.standalone.prefix,' '"var/log/dirsrv/slapd-%s/errors"' % 'topology.standalone.serverid)' '>' file_obj = 'open(file_path,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/usr/var/log/dirsrv/slapd-standalone/errors'\''' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48746_test.py>:108: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48746_test: index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO:tickets.ticket48746_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014853 completed successfully INFO:tickets.ticket48746_test:Check indexing succeeded with a specified matching rule __________________ test_ticket48746_homeDirectory_indexed_ces __________________ topology = '<tickets.ticket48746_test.TopologyStandalone' object at '0x7fd410970790>' def 'test_ticket48746_homeDirectory_indexed_ces(topology):' 'log.info("\n\nindex' homeDirectory in caseExactIA5Match, this would trigger the 'crash")' try: ent = 'topology.standalone.getEntry(HOMEDIRECTORY_INDEX,' 'ldap.SCOPE_BASE)' except ldap.NO_SUCH_OBJECT: 'topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX,' '{' ''\''objectclass'\'':' '"top' 'nsIndex".split(),' ''\''cn'\'':' HOMEDIRECTORY_CN, ''\''nsSystemIndex'\'':' ''\''false'\'',' ''\''nsIndexType'\'':' ''\''eq'\''})))' '#' 'log.info("attach' 'debugger ")' '#' 'time.sleep(60)' 'EXACT_MR_NAME='\''caseExactIA5Match'\''' mod = '[(ldap.MOD_REPLACE,' MATCHINGRULE, '(EXACT_MR_NAME))]' 'topology.standalone.modify_s(HOMEDIRECTORY_INDEX,' 'mod)' '#topology.standalone.stop(timeout=10)' 'log.info("successfully' checked that filter with exact mr , a filter with lowercase eq is 'failing")' '#assert' 'topology.standalone.db2index(bename=DEFAULT_BENAME,' suffixes=None, 'attrs=['\''homeDirectory'\''])' '#topology.standalone.start(timeout=10)' args = '{TASK_WAIT:' 'True}' 'topology.standalone.tasks.reindex(suffix=SUFFIX,' 'attrname='\''homeDirectory'\'',' 'args=args)' 'log.info("Check' indexing succeeded with a specified matching 'rule")' file_path = 'os.path.join(topology.standalone.prefix,' '"var/log/dirsrv/slapd-%s/errors"' % 'topology.standalone.serverid)' '>' file_obj = 'open(file_path,' '"r")' E IOError: '[Errno' '2]' No such file or directory: ''\''/usr/var/log/dirsrv/slapd-standalone/errors'\''' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48746_test.py>:172: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48746_test: index homeDirectory in caseExactIA5Match, this would trigger the crash INFO:tickets.ticket48746_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014855 completed successfully INFO:tickets.ticket48746_test:Check indexing succeeded with a specified matching rule _____________________ test_ticket48906_dblock_ldap_update ______________________ topology = '<tickets.ticket48906_test.TopologyStandalone' object at '0x7fd4104a3590>' def 'test_ticket48906_dblock_ldap_update(topology):' 'topology.standalone.log.info('\''###################################'\'')' 'topology.standalone.log.info('\''###'\'')' 'topology.standalone.log.info('\''###' Check that after ldap 'update'\'')' 'topology. standalone.log.info('\''###' - monitor contains 'DEFAULT'\'')' 'topology.standalone.log.info('\''###' - configured contains 'DBLOCK_LDAP_UPDATE'\'')' 'topology.standalone.log.info('\''###' - After stop dse.ldif contains 'DBLOCK_LDAP_UPDATE'\'')' 'topology.standalone.log.info('\''###' - After stop guardian contains 'DEFAULT'\'')' 'topology.standalone.log.info('\''###' In fact guardian should differ from config to recreate the 'env'\'')' 'topology.standalone.log.info('\''###' Check that after restart '(DBenv' 'recreated)'\'')' 'topology.standalone.log.info('\''###' - monitor contains DBLOCK_LDAP_UPDATE ''\'')' 'topology.standalone.log.info('\''###' - configured contains 'DBLOCK_LDAP_UPDATE'\'')' 'topology.standalone.log.info('\''###' - dse.ldif contains 'DBLOCK_LDAP_UPDATE'\'')' 'topology.standalone.log.info('\''###'\'')' 'topology.standalone.log.info('\''###################################'\'')' 'topology.standalone.modify_s(ldbm_config,' '[(ldap.MOD_REPLACE,' DBLOCK_ATTR_CONFIG, 'DBLOCK_LDAP_UPDATE)])' '_check_monitored_value(topology,' 'DBLOCK_DEFAULT)' '_check_configured_value(topology,' attr=DBLOCK_ATTR_CONFIG, expected_value=DBLOCK_LDAP_UPDATE, 'required=True)' 'topology.standalone.stop(timeout=10)' '_check_dse_ldif_value(topology,' attr=DBLOCK_ATTR_CONFIG, 'expected_value=DBLOCK_LDAP_UPDATE)' '>' '_check_guardian_value(topology,' attr=DBLOCK_ATTR_GUARDIAN, 'expected_value=DBLOCK_DEFAULT)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:218: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = '<tickets.ticket48906_test.TopologyStandalone' object at '0x7fd4104a3590>' attr = ''\''locks'\'',' expected_value = ''\''10000'\''' def '_check_guardian_value(topology,' attr=DBLOCK_ATTR_CONFIG, 'expected_value=None):' guardian_file = topology.standalone.dbdir + ''\''/db/guardian'\''' '>' 'assert(os.path.exists(guardian_file))' E assert '<function' exists at '0x7fd425a60050>('\''/var/lib/dirsrv/sla pd-standalone/db/db/guardian'\'')' E + where '<function' exists at '0x7fd425a60050>' = '<module' ''\''posixpath'\''' from ''\''/usr/lib64/python2.7/posixpath.pyc'\''>.exists' E + where '<module' ''\''posixpath'\''' from ''\''/usr/lib64/python2.7/posixpath.pyc'\''>' = os.path <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:164: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:################################### INFO:lib389:### INFO:lib389:### Check that after ldap update INFO:lib389:### - monitor contains DEFAULT INFO:lib389:### - configured contains DBLOCK_LDAP_UPDATE INFO:lib389:### - After stop dse.ldif contains DBLOCK_LDAP_UPDATE INFO:lib389:### - After stop guardian contains DEFAULT INFO:lib389:### In fact guardian should differ from config to recreate the env INFO:lib389:### Check that after restart '(DBenv' 'recreated)' INFO:lib389:### - monitor contains DBLOCK_LDAP_UPDATE INFO:lib389:### - configured contains DBLOCK_LDAP_UPDATE INFO:lib389:### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO:lib389:### INFO:lib389:################################### _____________________ test_ticket48906_dblock_edit_update ______________________ topology = '<tickets.ticket48906_test.TopologyStandalone' object at '0x7fd4104a3590>' def 'test_ticket48906_dblock_edit_update(topology):' 'topology.standalone.log.info('\''###################################'\'')' 'topology.standalone.log.info('\''###'\'')' 'topology.standalone.log.info('\''###' Check that after 'stop'\'')' 'topology.standalone.log.info('\''###' - dse.ldif contains 'DBLOCK_LDAP_UPDATE'\'')' 'topology.standalone.log.info('\''###' - guardian contains 'DBLOCK_LDAP_UPDATE'\'')' 'topology.standalone.log.info('\''###' Check that edit 'dse+restart'\'')' 'topology.standalone.log.info('\''###' - monitor contains 'DBLOCK_EDIT_UPDATE'\'')' 'topology.standalone.log.info('\''###' - configured contains 'DBLOCK_EDIT_UPDATE'\'')' 'topology.st andalone.log.info('\''###' Check that after 'stop'\'')' 'topology.standalone.log.info('\''###' - dse.ldif contains 'DBLOCK_EDIT_UPDATE'\'')' 'topology.standalone.log.info('\''###' - guardian contains 'DBLOCK_EDIT_UPDATE'\'')' 'topology.standalone.log.info('\''###'\'')' 'topology.standalone.log.info('\''###################################'\'')' 'topology.standalone.stop(timeout=10)' '_check_dse_ldif_value(topology,' attr=DBLOCK_ATTR_CONFIG, 'expected_value=DBLOCK_LDAP_UPDATE)' '>' '_check_guardian_value(topology,' attr=DBLOCK_ATTR_GUARDIAN, 'expected_value=DBLOCK_LDAP_UPDATE)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:243: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = '<tickets.ticket48906_test.TopologyStandalone' object at '0x7fd4104a3590>' attr = ''\''locks'\'',' expected_value = ''\''20000'\''' def '_check_guardian_value(topology,' attr=DBLOCK_ATTR_CONFIG, 'expected_value=None):' guardian_file = topology.standalone.dbdir + ''\''/db/guardian'\''' '>' 'assert(os.path.exists(guardian_file))' E assert '<function' exists at '0x7fd425a60050>('\''/var/lib/dirsrv/slapd-standalone/db/db/guardian'\'')' E + where '<function' exists at '0x7fd425a60050>' = '<module' ''\''posixpath'\''' from ''\''/usr/lib64/python2.7/posixpath.pyc'\''>.exists' E + where '<module' ''\''posixpath'\''' from ''\''/usr/lib64/python2.7/posixpath.pyc'\''>' = os.path <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:164: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:################################### INFO:lib389:### INFO:lib389:### Check that after stop INFO:lib389:### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO:lib389:### - guardian contains DBLOCK_LDAP_UPDATE INFO:lib389:### Check that edit dse+restart INFO:lib389:### - monitor contains DBLOCK_EDIT_UPDATE INFO:lib389:# ## - configured contains DBLOCK_EDIT_UPDATE INFO:lib389:### Check that after stop INFO:lib389:### - dse.ldif contains DBLOCK_EDIT_UPDATE INFO:lib389:### - guardian contains DBLOCK_EDIT_UPDATE INFO:lib389:### INFO:lib389:################################### ________________________ test_ticket48906_dblock_robust ________________________ topology = '<tickets.ticket48906_test.TopologyStandalone' object at '0x7fd4104a3590>' def 'test_ticket48906_dblock_robust(topology):' 'topology.standalone.log.info('\''###################################'\'')' 'topology.standalone.log.info('\''###'\'')' 'topology.standalone.log.info('\''###' Check that the following values are 'rejected'\'')' 'topology.standalone.log.info('\''###' - negative 'value'\'')' 'topology.standalone.log.info('\''###' - insuffisant 'value'\'')' 'topology.standalone.log.info('\''###' - invalid 'value'\'')' 'topology.standalone.log.info('\''###' Check that minimum value is 'accepted'\'')' 'topology.standalone.log.info('\''###'\'')' 'topology.standalone.log.info('\''###################################'\'')' 'topology.standalone.start(timeout=10)' '>' '_check_monitored_value(topology,' 'DBLOCK_EDIT_UPDATE)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:291: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = '<tickets.ticket48906_test.TopologyStandalone' object at '0x7fd4104a3590>' expected_value = ''\''40000'\''' def '_check_monitored_value(topology,' 'expected_value):' entries = 'topology.standalone.search_s(ldbm_monitor,' ldap.SCOPE_BASE, ''\''(objectclass=*)'\'')' '>' 'assert(entries[0].hasValue(DBLOCK_ATTR_MONITOR)' and 'entries[0].getValue(DBLOCK_ATTR_MONITOR)' == 'expected_value)' E assert '(True' and ''\''20000'\''' == ''\''40000'\''' E + where True = '<bound' method Entry.hasValue of dn: cn=database,cn=monitor,cn=ldbm database,cn...pd-db-txn-region-wait-rate: '0\nobjectClass:' 'top\nobjectClass:' 'extensibleObject\n\n>('\''nssla pd-db-configured-locks'\'')' E + where '<bound' method Entry.hasValue of dn: cn=database,cn=monitor,cn=ldbm database,cn...pd-db-txn-region-wait-rate: '0\nobjectClass:' 'top\nobjectClass:' 'extensibleObject\n\n>' = dn: cn=database,cn=monitor,cn=ldbm 'database,cn=plugins,cn=config\ncn:' 'database\n...apd-db-txn-region-wait-rate:' '0\nobjectClass:' 'top\nobjectClass:' 'extensibleObject\n\n.hasValue' E - 20000 E '?' '^' E + 40000 E '?' '^)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:144: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:################################### INFO:lib389:### INFO:lib389:### Check that the following values are rejected INFO:lib389:### - negative value INFO:lib389:### - insuffisant value INFO:lib389:### - invalid value INFO:lib389:### Check that minimum value is accepted INFO:lib389:### INFO:lib389:################################### 'INFO:lib389:open():' Connecting to uri ldap://localhost.localdomain:38931/ 'INFO:lib389:open():' bound as cn=Directory Manager _____________________________ test_dynamic_plugins _____________________________ topology = '<test_dynamic_plugins.TopologyStandalone' object at '0x7fd414a16290>' def 'test_dynamic_plugins(topology):' '"""' Test Dynamic Plugins - exercise each plugin and its main features, while changing the configuration without restarting the server. Need to test: functionality, stability, and stress. These tests need to run with replication disabled, and with replication setup with a second instance. Then test if replication is working, and we have same entries on each side. Functionality - Make sure that as configuration changes are made they take effect immediately. Cross plugin interaction '(e.g.' 'automember/memberOf)' needs to tested, as well as plugin tasks. Need to test plugin config 'validation(dependencies,' 'etc).' Memory Corruption - Restart the plugins many times, and in different orders an d test functionality, and stability. This will excerise the internal plugin linked lists, dse callbacks, and task handlers. Stress - Put the server under load that will trigger multiple 'plugins(MO,' RI, DNA, 'etc)' Restart various plugins while these operations are going on. Perform this test 5 'times(stress_max_run).' '"""' REPLICA_PORT = 33334 RUV_FILTER = ''\''(&(nsuniqueid=ffffffff-ffffffff-ffffffff-ffffffff)(objectclass=nstombstone))'\''' master_maxcsn = 0 replica_maxcsn = 0 msg = \' '(no' 'replication)'\''' replication_run = False stress_max_runs = 5 '#' First enable dynamic plugins try: 'topology.standalone.modify_s(DN_CONFIG,' '[(ldap.MOD_REPLACE,' ''\''nsslapd-dynamic-plugins'\'',' ''\''on'\'')])' except ldap.LDAPError as e: 'ldap.fatal('\''Failed' to enable dynamic 'plugin!'\''' + 'e.message['\''desc'\''])' assert False '#' Test that critical plugins can be updated even though the change might not be applied try: 'topology.standalone.modify_s(DN_LDBM,' '[(ldap.MOD_REPLACE,' ''\''description'\'',' ''\''test'\'')])' except ldap.LDAPError as e: 'ldap.fatal('\''Failed' to apply change to critical 'plugin'\''' + 'e.message['\''desc'\''])' assert False while 1: '#' '#' First run the tests with replication disabled, then rerun them with replication set up '#' '############################################################################' '#' Test plugin functionality '############################################################################' 'log.info('\''####################################################################'\'')' 'log.info('\''Testing' Dynamic Plugins 'Functionality'\''' + msg + ''\''...'\'')' 'log.info('\''####################################################################\n'\'')' 'plugin_tests.test_all_plugins(topology.standalone)' 'log.info('\''####################################################################'\'')' 'log.info('\''Successfully' Tested Dynamic Plugins 'Functionality'\''' + msg + ''\''.'\'')' 'log.info('\''################################################################## ##\n'\'')' '############################################################################' '#' Test the stability by exercising the internal lists, callabcks, and task handlers '############################################################################' 'log.info('\''####################################################################'\'')' 'log.info('\''Testing' Dynamic Plugins for Memory 'Corruption'\''' + msg + ''\''...'\'')' 'log.info('\''####################################################################\n'\'')' prev_plugin_test = None prev_prev_plugin_test = None for plugin_test in plugin_tests.func_tests: '#' '#' Restart the plugin several times '(and' prev 'plugins)' - work that linked list '#' 'plugin_test(topology.standalone,' '"restart")' if prev_prev_plugin_test: 'prev_prev_plugin_test(topology.standalone,' '"restart")' 'plugin_test(topology.standalone,' '"restart")' if prev_plugin_test: 'prev_plugin_test(topology.standalone,' '"restart")' 'plugin_test(topology.standalone,' '"restart")' '#' Now run the functional test 'plugin_test(topology.standalone)' '#' Set the previous tests if prev_plugin_test: prev_prev_plugin_test = prev_plugin_test prev_plugin_test = plugin_test 'log.info('\''####################################################################'\'')' 'log.info('\''Successfully' Tested Dynamic Plugins for Memory 'Corruption'\''' + msg + ''\''.'\'')' 'log.info('\''####################################################################\n'\'')' '############################################################################' '#' Stress two plugins while restarting it, and while restarting other plugins. '#' The goal is to not crash, and have the plugins work after stressing them. '############################################################################' 'log.info('\''####################################################################'\'')' 'log.info('\''Stressing' Dynamic 'Plugins'\''' + msg + ''\''...'\'')' 'log.info('\''####################################################################\n'\'')' 'stress_tests.configureMO(topology.standalone)' 'stress_tests.configureRI(topology.standalone)' stress_count = 0 while stress_count '<' stress_max_runs: 'log.info('\''####################################################################'\'')' 'log.info('\''Running' stress 'test'\''' + msg + ''\''.' Run '(%d/%d)...'\''' % '(stress_count' + 1, 'stress_max_runs))' 'log.info('\''####################################################################\n'\'')' try: '#' Launch three new threads to add a bunch of users add_users = 'stress_tests.AddUsers(topology.standalone,' ''\''employee'\'',' 'True)' 'add_users.start()' add_users2 = 'stress_tests.AddUsers(topology.standalone,' ''\''entry'\'',' 'True)' 'add_users2.start()' add_users3 = 'stress_tests.AddUsers(topology.standalone,' ''\''person'\'',' 'True)' 'add_users3.start()' 'time.sleep(1)' '#' While we are adding users restart the MO plugin and an idle plugin 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS)' 'topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS)' 'time.sleep(1)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'time.sleep(2)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS)' 'topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' '#' Wait for the ''\''adding'\''' threads to complete 'add _users.join()' 'add_users2.join()' 'add_users3.join()' '#' Now launch three threads to delete the users del_users = 'stress_tests.DelUsers(topology.standalone,' ''\''employee'\'')' 'del_users.start()' del_users2 = 'stress_tests.DelUsers(topology.standalone,' ''\''entry'\'')' 'del_users2.start()' del_users3 = 'stress_tests.DelUsers(topology.standalone,' ''\''person'\'')' 'del_users3.start()' 'time.sleep(1)' '#' Restart both the MO, RI plugins during these deletes, and an idle plugin 'topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY)' 'time.sleep(1)' 'topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY)' 'time.sleep(1)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY)' 'topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS)' 'topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS)' 'topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY)' 'time.sleep(2)' 'topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY)' 'time.sleep(1)' 'topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF)' 'time.sleep(1)' 'topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY)' 'topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS)' 'topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS)' '#' Wait for the ''\''deleting'\''' threads to complete 'del_users.join()' 'del_users2.join()' 'del_users3.join()' '#' Now make sure both the MO and RI plugins sti ll work correctly 'plugin_tests.func_tests[8](topology.standalone)' '#' RI plugin 'plugin_tests.func_tests[5](topology.standalone)' '#' MO plugin '#' Cleanup the stress tests 'stress_tests.cleanup(topology.standalone)' except: 'log.info('\''Stress' test 'failed!'\'')' 'repl_fail(replica_inst)' stress_count += 1 'log.info('\''####################################################################'\'')' 'log.info('\''Successfully' Stressed Dynamic 'Plugins'\''' + msg + ''\''.' Completed '(%d/%d)'\''' % '(stress_count,' 'stress_max_runs))' 'log.info('\''####################################################################\n'\'')' if replication_run: '#' 'We'\''re' done. break else: '#' '#' Enable replication and run everything one more time '#' 'log.info('\''Setting' up replication, and rerunning the 'tests...\n'\'')' '#' Create replica instance replica_inst = 'DirSrv(verbose=False)' 'args_instance[SER_HOST]' = LOCALHOST 'args_instance[SER_PORT]' = REPLICA_PORT 'args_instance[SER_SERVERID_PROP]' = ''\''replica'\''' 'args_instance[SER_CREATION_SUFFIX]' = DEFAULT_SUFFIX args_replica_inst = 'args_instance.copy()' 'replica_inst.allocate(args_replica_inst)' 'replica_inst.create()' 'replica_inst.open()' try: 'topology.standalone.replica.enableReplication(suffix=DEFAULT_SUFFIX,' role=REPLICAROLE_MASTER, 'replicaId=1)' 'replica_inst.replica.enableReplication(suffix=DEFAULT_SUFFIX,' role=REPLICAROLE_CONSUMER, 'replicaId=65535)' properties = '{RA_NAME:' 'r'\''to_replica'\'',' RA_BINDDN: 'defaultProperties[REPLICATION_BIND_DN],' RA_BINDPW: 'defaultProperties[REPLICATION_BIND_PW],' RA_METHOD: 'defaultProperties[REPLICATION_BIND_METHOD],' RA_TRANSPORT_PROT: 'defaultProperties[REPLICATION_TRANSPORT]}' repl_agreement = 'topology.standalone.agreement.create(suffix=DEFAULT_SUFFIX,' host=LOCALHOST, port=REPLICA_PORT, 'properties=properties)' if not repl_agreement: 'log.fatal("Fail' to create a replica 'agreement")' 'repl_fail(replica_inst)' 'topology.standalone.agreement.init(DEFAULT_SUFFIX,' LOCALHOST, 'REPLICA_PORT)' 'topology.standalo ne.waitForReplInit(repl_agreement)' except: 'log.info('\''Failed' to setup 'replication!'\'')' '>' 'repl_fail(replica_inst)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/dynamic-plugins/test_dynamic_plugins.py>:347: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ replica = '<lib389.DirSrv' instance at '0x7fd410a04128>' def 'repl_fail(replica):' '#' remove replica instance, and assert failure 'replica.delete()' '>' assert False E assert False <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/dynamic-plugins/test_dynamic_plugins.py>:40: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stdout call ----------------------------- OK group dirsrv exists OK user dirsrv exists Instance slapd-passthru removed. OK group dirsrv exists OK user dirsrv exists Instance slapd-passthru removed. OK group dirsrv exists OK user dirsrv exists Instance slapd-replica removed. ----------------------------- Captured stderr call ----------------------------- INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Testing Dynamic Plugins Functionality '(no' 'replication)...' INFO:test_dynamic_plugins:#################################################################### INFO:plugin_tests:Testing Account Policy Plugin... INFO:plugin_tests:test_acctpolicy: PASS INFO:plugin_tests:Testing attribute uniqueness... INFO:plugin_tests:test_attruniq: PASS INFO:plugin_tests:Testing Auto Membership Plugin... INFO:plugin_tests:test_automember: PASS INFO:plugin_tests:Testing Distributed Numeric Assignment Plugin... INFO:plugin_tests:test_dna: PASS INFO:plugin_tests:Testing Linked Attributes... INFO:plugin_tests:test_linkedattrs: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_membe rof: PASS INFO:plugin_tests:Testing Managed Entries... INFO:plugin_tests:test_mep: PASS INFO:plugin_tests:Testing Pass Through Authentication... INFO:lib389:List backend with suffix=dc=pass2,dc=thru INFO:lib389:Creating a local backend INFO:lib389:List backend cn=PASS2,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=PASS2,cn=ldbm database,cn=plugins,cn=config cn: PASS2 nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-passthru/db/PASS2 nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: dc=pass2,dc=thru objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: 'cn="dc=pass2,dc=thru",cn=mapping' tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: 'cn=dc\3Dpass2\2Cdc\3Dthru,cn=mapping' tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:plugin_tests:test_passthru: PASS INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing Retro Changelog Plugin... INFO:plugin_tests:test_retrocl: PASS INFO:plugin_tests:Testing RootDN Access Control... INFO:plugin_tests:test_rootdn: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Tested Dynamic Plugins Functionality '(no' 'replication).' INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Testing Dynamic Plugins for Memory Corruption '(no' 'replication)...' INFO:test_dynamic_plugins:#################################################################### INFO:plugin_tests:Testi ng Account Policy Plugin... INFO:plugin_tests:test_acctpolicy: PASS INFO:plugin_tests:Testing attribute uniqueness... INFO:plugin_tests:test_attruniq: PASS INFO:plugin_tests:Testing Auto Membership Plugin... INFO:plugin_tests:test_automember: PASS INFO:plugin_tests:Testing Distributed Numeric Assignment Plugin... INFO:plugin_tests:test_dna: PASS INFO:plugin_tests:Testing Linked Attributes... INFO:plugin_tests:test_linkedattrs: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:plugin_tests:Testing Managed Entries... INFO:plugin_tests:test_mep: PASS INFO:plugin_tests:Testing Pass Through Authentication... INFO:lib389:List backend with suffix=dc=pass2,dc=thru INFO:lib389:Creating a local backend INFO:lib389:List backend cn=PASS2,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=PASS2,cn=ldbm database,cn=plugins,cn=config cn: PASS2 nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-passthru/db/PASS2 nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: dc=pass2,dc=thru objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: 'cn="dc=pass2,dc=thru",cn=mapping' tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: 'cn=dc\3Dpass2\2Cdc\3Dthru,cn=mapping' tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:plugin_tests:test_passthru: PASS INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing Retro Changelog Plugin... INFO:plugin_tests:test_retrocl: PASS INFO:plugin_tests:Testing RootDN Access Control... INFO:plugin_tests:test_rootdn: PASS INFO:test_dynamic_plugins:############################################################## ###### INFO:test_dynamic_plugins:Successfully Tested Dynamic Plugins for Memory Corruption '(no' 'replication).' INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Stressing Dynamic Plugins '(no' 'replication)...' INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test '(no' 'replication).' Run '(1/5)...' INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries '(employee)...' INFO:stress_tests:AddUsers - Adding 250 entries '(entry)...' INFO:stress_tests:AddUsers - Adding 250 entries '(person)...' INFO:stress_tests:AddUsers - Finished adding 250 entries '(person).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(employee).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(entry).' INFO:stress_tests:DelUsers - Deleting 250 entries '(employee)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(entry)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(person)...' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(employee).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(entry).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(person).' INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins '(no' 'replication).' Completed '(1/5)' INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_p lugins:#################################################################### INFO:test_dynamic_plugins:Running stress test '(no' 'replication).' Run '(2/5)...' INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries '(employee)...' INFO:stress_tests:AddUsers - Adding 250 entries '(person)...' INFO:stress_tests:AddUsers - Adding 250 entries '(entry)...' INFO:stress_tests:AddUsers - Finished adding 250 entries '(person).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(entry).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(employee).' INFO:stress_tests:DelUsers - Deleting 250 entries '(employee)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(entry)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(person)...' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(person).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(employee).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(entry).' INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins '(no' 'replication).' Completed '(2/5)' INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test '(no' 'replication).' Run '(3/5)...' INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries '(employee)...' INFO:stress_tests:AddUsers - Adding 250 entries '(entry)...' INFO:stress_tests:AddUsers - Adding 250 entries '(person)...' INFO:stress_tests:AddUsers - Finished adding 250 ent ries '(employee).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(entry).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(person).' INFO:stress_tests:DelUsers - Deleting 250 entries '(person)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(entry)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(employee)...' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(entry).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(person).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(employee).' INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins '(no' 'replication).' Completed '(3/5)' INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test '(no' 'replication).' Run '(4/5)...' INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries '(entry)...' INFO:stress_tests:AddUsers - Adding 250 entries '(person)...' INFO:stress_tests:AddUsers - Adding 250 entries '(employee)...' INFO:stress_tests:AddUsers - Finished adding 250 entries '(employee).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(entry).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(person).' INFO:stress_tests:DelUsers - Deleting 250 entries '(employee)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(entry)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(person)...' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(employee).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(person).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(entry).' INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins '(no' 'replication).' Completed '(4/5)' INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test '(no' 'replication).' Run '(5/5)...' INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries '(employee)...' INFO:stress_tests:AddUsers - Adding 250 entries '(person)...' INFO:stress_tests:AddUsers - Adding 250 entries '(entry)...' INFO:stress_tests:AddUsers - Finished adding 250 entries '(person).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(employee).' INFO:stress_tests:AddUsers - Finished adding 250 entries '(entry).' INFO:stress_tests:DelUsers - Deleting 250 entries '(employee)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(entry)...' INFO:stress_tests:DelUsers - Deleting 250 entries '(person)...' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(person).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(entry).' INFO:stress_tests:DelUsers - Finished deleting 250 entries '(employee).' INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins '(no' 'replication).' Completed '(5/5)' INFO:test_dynamic_plugins:############ ######################################################## INFO:test_dynamic_plugins:Setting up replication, and rerunning the tests... INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}JV7exSg0ZOVtFUJs1Me7nkHfcAk+Scrghco05426bFodD5+k04fScbd6z455BXTjuqmFvReC2dPFqsj+diUbPKR2ZKghF0pm' INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: '{SSHA512}vWC5GEutgGMU45/KDdNDLhz/glIDFixC0LmS/ROY82BQxsgSCPhntMOXf2Apl+yAZGBty+57SzJFCNyij8g0dTpAGpxurYkw' INFO:lib389:Starting total init 'cn=to_replica,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping' tree,cn=config INFO:test_dynamic_plugins:Failed to setup 'replication!' ____________________________ test_range_search_init ____________________________ topology = '<suites.memory_leaks.range_search_test.TopologyStandalone' object at '0x7fd410939710>' def 'test_range_search_init(topology):' ''\'''\'''\''' Enable retro cl, and valgrind. Since valgrind tests move the ns-slapd binary around 'it'\''s' important to always '"valgrind_disable"' before '"assert' 'False"ing,' otherwise we leave the wrong ns-slapd in place if there is a failure ''\'''\'''\''' 'log.info('\''Initializing' 'test_range_search...'\'')' 'topology.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG)' '#' First stop the instance 'topology.standalone.stop(timeout=30)' '#' Get the sbin directory so we know where to replace ''\''ns-slapd'\''' sbin_dir = 'get_sbin_dir(prefix=topology.standalone.prefix)' '#' Enable valgrind if not 'topology.standalone.has_asan():' '>' 'valgrind_enable(sbin_dir)' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/memory_leaks/range_search_test.py>:86: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ sbin_dir = ''\''/usr/sbin'\''' wrapper = ''\''<http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/ns-slapd.valgrind'\'''> def 'valgrind_enable(sbin_dir,' 'wrapper=None):' ''\'''\'''\''' Copy the valgrind ns-slapd wrapper into the /sbin directory '(making' a backup of the original ns-slapd 'binary).' The script calling 'valgrind_enable()' must be run as the ''\''root'\''' user as selinux needs to be disabled for valgrind to work The server 'instance(s)' should be stopped prior to calling this function. Then after calling 'valgrind_enable():' - Start the server 'instance(s)' with a timeout of 60 '(valgrind' takes a while to 'startup)' - Run the tests - Stop the server - Get the results file - Run 'valgrind_check_file(result_file,' '"pattern",' '"pattern",' '...)' - Run 'valgrind_disable()' :param sbin_dir: the location of the ns-slapd binary '(e.g.' '/usr/sbin)' :param wrapper: The valgrind wrapper script for ns-slapd '(if' not set, a default wrapper is 'used)' :raise IOError: If there is a problem setting up the valgrind scripts :raise EnvironmentError: If script is not run as ''\''root'\''' ''\'''\'''\''' if 'os.geteuid()' '!=' 0: 'log.error('\''This' script must be run as root to use 'valgrind'\'')' raise EnvironmentError if not wrapper: '#' use the default ns-slapd wrapper wrapper = ''\''%s/%s'\''' % '(os.path.dirname(os.path.abspath(__file__)),' 'VALGRIND_WRAPPER)' nsslapd_orig = ''\''%s/ns-slapd'\''' % sbin_dir nsslapd_backup = ''\''%s/ns-slapd.original'\''' % sbin_dir if 'os.path.isfile(nsslapd_backup):' '#' There is a backup which means we never cleaned up from a previous '#' 'run(failed' 'test?)' if not 'filecmp.cmp(nsslapd_backup,' 'nsslapd_orig):' '#' Files are different sizes, we assume valgrind is already setup 'log.info('\''Valgrind' is already 'enabled.'\'')' return '#' Check both 'nsslapd'\''s' exist if not 'os.path.isfile(wrapper):' raise 'IOError('\''The' valgrind wrapper '(%s)' does not exist. 'file=%s'\''' % '(wra pper,' '__file__))' if not 'os.path.isfile(nsslapd_orig):' raise 'IOError('\''The' binary '(%s)' does not exist or is not 'accessible.'\''' % 'nsslapd_orig)' '#' Make a backup of the original ns-slapd and copy the wrapper into place try: 'shutil.copy2(nsslapd_orig,' 'nsslapd_backup)' except IOError as e: 'log.fatal('\''valgrind_enable():' failed to backup ns-slapd, error: '%s'\''' % 'e.strerror)' raise 'IOError('\''failed' to backup ns-slapd, error: '%s'\''' % 'e.strerror)' '#' Copy the valgrind wrapper into place try: 'shutil.copy2(wrapper,' 'nsslapd_orig)' except IOError as e: 'log.fatal('\''valgrind_enable():' failed to copy valgrind wrapper \' ''\''to' ns-slapd, error: '%s'\''' % 'e.strerror)' raise 'IOError('\''failed' to copy valgrind wrapper to ns-slapd, error: '%s'\''' % '>' 'e.strerror)' E IOError: failed to copy valgrind wrapper to ns-slapd, error: Text file busy <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/utils.py>:255: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:suites.memory_leaks.range_search_test:Initializing test_range_search... 'CRITICAL:lib389.utils:valgrind_enable():' failed to copy valgrind wrapper to ns-slapd, error: Text file busy ___________________________ test_multi_suffix_search ___________________________ topology = '<suites.paged_results.paged_results_test.TopologyStandalone' object at '0x7fd40bfa4410>' test_user = None, new_suffixes = None def 'test_multi_suffix_search(topology,' test_user, 'new_suffixes):' '"""Verify' that page result search returns empty cookie if there is no returned entry. :Feature: Simple paged results :Setup: Standalone instance, test user for binding, two suffixes with backends, one is inserted into another, 10 users for the search base within each suffix :Steps: 1. Bind as test user 2. Search through all 20 added users with a simple p aged control using page_size = 4 3. Wait some time logs to be updated 3. Check access log :Assert: All users should be found, the access log should contain the pr_cookie for each page request and it should be equal 0, except the last one should be equal -1 '"""' search_flt = 'r'\''(uid=test*)'\''' searchreq_attrlist = '['\''dn'\'',' ''\''sn'\'']' page_size = 4 users_num = 20 'log.info('\''Clear' the access 'log'\'')' 'topology.standalone.deleteAccessLogs()' users_list_1 = 'add_users(topology,' users_num / 2, 'NEW_SUFFIX_1)' users_list_2 = 'add_users(topology,' users_num / 2, 'NEW_SUFFIX_2)' try: 'log.info('\''Set' DM 'bind'\'')' 'topology.standalone.simple_bind_s(DN_DM,' 'PASSWORD)' req_ctrl = 'SimplePagedResultsControl(True,' size=page_size, 'cookie='\'''\'')' all_results = 'paged_search(topology,' NEW_SUFFIX_1, '[req_ctrl],' search_flt, 'searchreq_attrlist)' 'log.info('\''{}' 'results'\''.format(len(all_results)))' assert 'len(all_results)' == users_num 'log.info('\''Restart' the server to flush the 'logs'\'')' 'topology.standalone.restart(timeout=10)' access_log_lines = 'topology.standalone.ds_access_log.match('\''.*pr_cookie=.*'\'')' pr_cookie_list = '([line.rsplit('\''='\'',' '1)[-1]' for line in 'access_log_lines])' pr_cookie_list = '[int(pr_cookie)' for pr_cookie in 'pr_cookie_list]' 'log.info('\''Assert' that last pr_cookie == -1 and others pr_cookie == '0'\'')' pr_cookie_zeros = 'list(pr_cookie' == 0 for pr_cookie in 'pr_cookie_list[0:-1])' assert 'all(pr_cookie_zeros)' '>' assert 'pr_cookie_list[-1]' == -1 E IndexError: list index out of range <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/paged_results/paged_results_test.py>:1198: IndexError ---------------------------- Captured stderr setup ----------------------------- INFO:suites.paged_results.paged_results_test:Adding suffix:o=test_parent and backend: parent_base INFO:lib389:List backend with suffix=o=test_parent INFO:lib389:Creating a local backend INFO:lib389:List backend cn=parent_ base,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=parent_base,cn=ldbm database,cn=plugins,cn=config cn: parent_base nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db/parent_base nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: o=test_parent objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: 'cn="o=test_parent",cn=mapping' tree,cn=config cn: o=test_parent nsslapd-backend: parent_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: 'cn=o\3Dtest_parent,cn=mapping' tree,cn=config cn: o=test_parent nsslapd-backend: parent_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:suites.paged_results.paged_results_test:Adding suffix:ou=child,o=test_parent and backend: child_base INFO:lib389:List backend with suffix=ou=child,o=test_parent INFO:lib389:Creating a local backend INFO:lib389:List backend cn=child_base,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=child_base,cn=ldbm database,cn=plugins,cn=config cn: child_base nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db/child_base nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: ou=child,o=test_parent objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: 'cn="ou=child,o=test_parent",cn=mapping' tree,cn=config cn: ou=child,o=test_parent nsslapd-backend: child_base nsslapd-parent-suffix: o=test_parent nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: 'cn=ou\3Dchild\2Co\3Dtest_parent,cn=mapping' tree,cn=config cn: ou=child,o=test_parent nsslapd-backend: child_base nsslapd-parent-suffix: o=test_parent nsslapd-state: backend obj ectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:suites.paged_results.paged_results_test:Adding ACI to allow our test user to search ----------------------------- Captured stderr call ----------------------------- INFO:suites.paged_results.paged_results_test:Clear the access log INFO:suites.paged_results.paged_results_test:Adding 10 users INFO:suites.paged_results.paged_results_test:Adding 10 users INFO:suites.paged_results.paged_results_test:Set DM bind INFO:suites.paged_results.paged_results_test:Running simple paged result search with - search suffix: 'o=test_parent;' filter: '(uid=test*);' attr list '['\''dn'\'',' ''\''sn'\''];' page_size = '4;' controls: '[<ldap.controls.libldap.SimplePagedResultsControl' instance at '0x7fd410506200>].' INFO:suites.paged_results.paged_results_test:Getting page 0 INFO:suites.paged_results.paged_results_test:Getting page 1 INFO:suites.paged_results.paged_results_test:Getting page 2 INFO:suites.paged_results.paged_results_test:Getting page 3 INFO:suites.paged_results.paged_results_test:Getting page 4 INFO:suites.paged_results.paged_results_test:Getting page 5 INFO:suites.paged_results.paged_results_test:20 results INFO:suites.paged_results.paged_results_test:Restart the server to flush the logs INFO:suites.paged_results.paged_results_test:Assert that last pr_cookie == -1 and others pr_cookie == 0 INFO:suites.paged_results.paged_results_test:Remove added users INFO:suites.paged_results.paged_results_test:Deleting 10 users INFO:suites.paged_results.paged_results_test:Deleting 10 users ============== 32 failed, 484 passed, 5 error in 9544.66 seconds =============== ============================= test session starts ============================== platform linux2 -- Python 2.7.12, pytest-2.9.2, py-1.4.31, pluggy-0.3.1 -- /usr/bin/python2 cachedir: .cache rootdir: <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests,> inifile: plugins: sourceorder-0.5, multihost-1.0 collecting ... collected 520 items tickets/ticket1347760_test.py::test_ticket1347760 FAILED tickets/ticket365_test.py::test_ticket365 PASSED tickets/ticket47313_test.py::test_ticket47313_run PASSED tickets/ticket47384_test.py::test_ticket47384 PASSED tickets/ticket47431_test.py::test_ticket47431_0 PASSED tickets/ticket47431_test.py::test_ticket47431_1 FAILED tickets/ticket47431_test.py::test_ticket47431_2 PASSED tickets/ticket47431_test.py::test_ticket47431_3 PASSED tickets/ticket47462_test.py::test_ticket47462 FAILED tickets/ticket47490_test.py::test_ticket47490_init PASSED tickets/ticket47490_test.py::test_ticket47490_one PASSED tickets/ticket47490_test.py::test_ticket47490_two PASSED tickets/ticket47490_test.py::test_ticket47490_three PASSED tickets/ticket47490_test.py::test_ticket47490_four PASSED tickets/ticket47490_test.py::test_ticket47490_five PASSED tickets/ticket47490_test.py::test_ticket47490_six PASSED tickets/ticket47490_test.py::test_ticket47490_seven PASSED tickets/ticket47490_test.py::test_ticket47490_eight PASSED tickets/ticket47490_test.py::test_ticket47490_nine PASSED tickets/ticket47536_test.py::test_ticket47536 FAILED tickets/ticket47553_test.py::test_ticket47553 PASSED tickets/ticket47560_test.py::test_ticket47560 PASSED tickets/ticket47573_test.py::test_ticket47573_init PASSED tickets/ticket47573_test.py::test_ticket47573_one PASSED tickets/ticket47573_test.py::test_ticket47573_two PASSED tickets/ticket47573_test.py::test_ticket47573_three PASSED tickets/ticket47619_test.py::test_ticket47619_init FAILED tickets/ticket47619_test.py::test_ticket47619_create_index PASSED tickets/ticket47619_test.py::test_ticket47619_reindex PASSED tickets/ticket 47619_test.py::test_ticket47619_check_indexed_search PASSED tickets/ticket47640_test.py::test_ticket47640 PASSED tickets/ticket47653MMR_test.py::test_ticket47653_init PASSED tickets/ticket47653MMR_test.py::test_ticket47653_add PASSED tickets/ticket47653MMR_test.py::test_ticket47653_modify PASSED tickets/ticket47653_test.py::test_ticket47653_init PASSED tickets/ticket47653_test.py::test_ticket47653_add PASSED tickets/ticket47653_test.py::test_ticket47653_search PASSED tickets/ticket47653_test.py::test_ticket47653_modify PASSED tickets/ticket47653_test.py::test_ticket47653_delete PASSED tickets/ticket47669_test.py::test_ticket47669_init FAILED tickets/ticket47669_test.py::test_ticket47669_changelog_maxage FAILED tickets/ticket47669_test.py::test_ticket47669_changelog_triminterval FAILED tickets/ticket47669_test.py::test_ticket47669_changelog_compactdbinterval FAILED tickets/ticket47669_test.py::test_ticket47669_retrochangelog_maxage FAILED tickets/ticket47676_test.py::test_ticket47676_init PASSED tickets/ticket47676_test.py::test_ticket47676_skip_oc_at PASSED tickets/ticket47676_test.py::test_ticket47676_reject_action PASSED tickets/ticket47714_test.py::test_ticket47714_init PASSED tickets/ticket47714_test.py::test_ticket47714_run_0 PASSED tickets/ticket47714_test.py::test_ticket47714_run_1 PASSED tickets/ticket47721_test.py::test_ticket47721_init PASSED tickets/ticket47721_test.py::test_ticket47721_0 PASSED tickets/ticket47721_test.py::test_ticket47721_1 PASSED tickets/ticket47721_test.py::test_ticket47721_2 PASSED tickets/ticket47721_test.py::test_ticket47721_3 PASSED tickets/ticket47721_test.py::test_ticket47721_4 PASSED tickets/ticket47781_test.py::test_ticket47781 PASSED tickets/ticket47787_test.py::test_ticket47787_init PASSED tickets/ticket47787_test.py::test_ticket47787_2 PASSED tickets/ticket47808_test.py::test_ticket47808_run PASSED tickets/ticket47815_test.py::test_ticket47815 PASSED tickets/ticket47819_test.py::test_ticket47819 PASSED tickets/ticket47823_test.py::test_ticket47823_init FAILED tickets/tic ket47823_test.py::test_ticket47823_one_container_add PASSED tickets/ticket47823_test.py::test_ticket47823_one_container_mod PASSED tickets/ticket47823_test.py::test_ticket47823_one_container_modrdn PASSED tickets/ticket47823_test.py::test_ticket47823_multi_containers_add PASSED tickets/ticket47823_test.py::test_ticket47823_multi_containers_mod PASSED tickets/ticket47823_test.py::test_ticket47823_multi_containers_modrdn PASSED tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_add PASSED tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_mod PASSED tickets/ticket47823_test.py::test_ticket47823_across_multi_containers_modrdn PASSED tickets/ticket47823_test.py::test_ticket47823_invalid_config_1 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_2 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_3 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_4 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_5 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_6 FAILED tickets/ticket47823_test.py::test_ticket47823_invalid_config_7 FAILED tickets/ticket47828_test.py::test_ticket47828_init PASSED tickets/ticket47828_test.py::test_ticket47828_run_0 PASSED tickets/ticket47828_test.py::test_ticket47828_run_1 PASSED tickets/ticket47828_test.py::test_ticket47828_run_2 PASSED tickets/ticket47828_test.py::test_ticket47828_run_3 PASSED tickets/ticket47828_test.py::test_ticket47828_run_4 PASSED tickets/ticket47828_test.py::test_ticket47828_run_5 PASSED tickets/ticket47828_test.py::test_ticket47828_run_6 PASSED tickets/ticket47828_test.py::test_ticket47828_run_7 PASSED tickets/ticket47828_test.py::test_ticket47828_run_8 PASSED tickets/ticket47828_test.py::test_ticket47828_run_9 PASSED tickets/ticket47828_test.py::test_ticket47828_run_10 PASSED tickets/ticket47828_test.py::test_ticket47828_run_11 PASSED tickets/ticket47828_test.py::test_ticket47828_run_12 PASSED tickets/ticket47828_test.py::test_ticket47828_run_13 P ASSED tickets/ticket47828_test.py::test_ticket47828_run_14 PASSED tickets/ticket47828_test.py::test_ticket47828_run_15 PASSED tickets/ticket47828_test.py::test_ticket47828_run_16 PASSED tickets/ticket47828_test.py::test_ticket47828_run_17 PASSED tickets/ticket47828_test.py::test_ticket47828_run_18 PASSED tickets/ticket47828_test.py::test_ticket47828_run_19 PASSED tickets/ticket47828_test.py::test_ticket47828_run_20 PASSED tickets/ticket47828_test.py::test_ticket47828_run_21 PASSED tickets/ticket47828_test.py::test_ticket47828_run_22 PASSED tickets/ticket47828_test.py::test_ticket47828_run_23 PASSED tickets/ticket47828_test.py::test_ticket47828_run_24 PASSED tickets/ticket47828_test.py::test_ticket47828_run_25 PASSED tickets/ticket47828_test.py::test_ticket47828_run_26 PASSED tickets/ticket47828_test.py::test_ticket47828_run_27 PASSED tickets/ticket47828_test.py::test_ticket47828_run_28 PASSED tickets/ticket47828_test.py::test_ticket47828_run_29 PASSED tickets/ticket47828_test.py::test_ticket47828_run_30 PASSED tickets/ticket47828_test.py::test_ticket47828_run_31 PASSED tickets/ticket47829_test.py::test_ticket47829_init PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_2 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_3 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_2 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_3 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_out_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_out_user_2 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_out_user_3 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_active_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_stage_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_active_user_modrdn_out_user_1 PASSED tickets/tic ket47829_test.py::test_ticket47829_mod_modrdn_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_active_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_1 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_2 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_3 PASSED tickets/ticket47829_test.py::test_ticket47829_indirect_active_group_4 PASSED tickets/ticket47833_test.py::test_ticket47829_init PASSED tickets/ticket47833_test.py::test_ticket47829_mod_stage_user_modrdn_stage_user_1 PASSED tickets/ticket47869MMR_test.py::test_ticket47869_init PASSED tickets/ticket47869MMR_test.py::test_ticket47869_check PASSED tickets/ticket47871_test.py::test_ticket47871_init FAILED tickets/ticket47871_test.py::test_ticket47871_1 PASSED tickets/ticket47871_test.py::test_ticket47871_2 PASSED tickets/ticket47900_test.py::test_ticket47900 PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_positive PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_negative PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_start_end_invalid PASSED tickets/ticket47910_test.py::test_ticket47910_logconv_noaccesslogs PASSED tickets/ticket47920_test.py::test_ticket47920_init PASSED tickets/ticket47920_test.py::test_ticket47920_mod_readentry_ctrl PASSED tickets/ticket47921_test.py::test_ticket47921 PASSED tickets/ticket47927_test.py::test_ticket47927_init PASSED tickets/ticket47927_test.py::test_ticket47927_one PASSED tickets/ticket47927_test.py::test_ticket47927_two PASSED tickets/ticket47927_test.py::test_ticket47927_three PASSED tickets/ticket47927_test.py::test_ticket47927_four PASSED tickets/ticket47927_test.py::test_ticket47927_five PASSED tickets/ticket47927_test.py::test_ticket47927_six PASSED tickets/ticket47931_test.py::test_ticket47931 PASSED tickets/ticket47937_test.py::test_ticket47937 PASSED tickets/tick et47950_test.py::test_ticket47950 PASSED tickets/ticket47953_test.py::test_ticket47953 PASSED tickets/ticket47963_test.py::test_ticket47963 PASSED tickets/ticket47966_test.py::test_ticket47966 PASSED tickets/ticket47970_test.py::test_ticket47970 PASSED tickets/ticket47973_test.py::test_ticket47973 PASSED tickets/ticket47976_test.py::test_ticket47976_init PASSED tickets/ticket47976_test.py::test_ticket47976_1 PASSED tickets/ticket47976_test.py::test_ticket47976_2 PASSED tickets/ticket47976_test.py::test_ticket47976_3 PASSED tickets/ticket47980_test.py::test_ticket47980 PASSED tickets/ticket47981_test.py::test_ticket47981 PASSED tickets/ticket47988_test.py::test_ticket47988_init PASSED tickets/ticket47988_test.py::test_ticket47988_1 PASSED tickets/ticket47988_test.py::test_ticket47988_2 PASSED tickets/ticket47988_test.py::test_ticket47988_3 PASSED tickets/ticket47988_test.py::test_ticket47988_4 PASSED tickets/ticket47988_test.py::test_ticket47988_5 PASSED tickets/ticket47988_test.py::test_ticket47988_6 PASSED tickets/ticket48005_test.py::test_ticket48005_setup PASSED tickets/ticket48005_test.py::test_ticket48005_memberof PASSED tickets/ticket48005_test.py::test_ticket48005_automember PASSED tickets/ticket48005_test.py::test_ticket48005_syntaxvalidate PASSED tickets/ticket48005_test.py::test_ticket48005_usn PASSED tickets/ticket48005_test.py::test_ticket48005_schemareload PASSED tickets/ticket48013_test.py::test_ticket48013 PASSED tickets/ticket48026_test.py::test_ticket48026 PASSED tickets/ticket48109_test.py::test_ticket48109 FAILED tickets/ticket48170_test.py::test_ticket48170 PASSED tickets/ticket48194_test.py::test_init PASSED tickets/ticket48194_test.py::test_run_0 PASSED tickets/ticket48194_test.py::test_run_1 PASSED tickets/ticket48194_test.py::test_run_2 PASSED tickets/ticket48194_test.py::test_run_3 PASSED tickets/ticket48194_test.py::test_run_4 PASSED tickets/ticket48194_test.py::test_run_5 PASSED tickets/ticket48194_test.py::test_run_6 PASSED tickets/ticket48194_test.py::test_run_7 PASSED tickets/ticket4 8194_test.py::test_run_8 PASSED tickets/ticket48194_test.py::test_run_9 PASSED tickets/ticket48194_test.py::test_run_10 PASSED tickets/ticket48194_test.py::test_run_11 PASSED tickets/ticket48212_test.py::test_ticket48212 PASSED tickets/ticket48214_test.py::test_ticket48214_run PASSED tickets/ticket48226_test.py::test_ticket48226_set_purgedelay PASSED tickets/ticket48226_test.py::test_ticket48226_1 PASSED tickets/ticket48228_test.py::test_ticket48228_test_global_policy PASSED tickets/ticket48228_test.py::test_ticket48228_test_subtree_policy PASSED tickets/ticket48233_test.py::test_ticket48233 PASSED tickets/ticket48234_test.py::test_ticket48234 PASSED tickets/ticket48252_test.py::test_ticket48252_setup PASSED tickets/ticket48252_test.py::test_ticket48252_run_0 PASSED tickets/ticket48252_test.py::test_ticket48252_run_1 PASSED tickets/ticket48265_test.py::test_ticket48265_test PASSED tickets/ticket48266_test.py::test_ticket48266_fractional ERROR tickets/ticket48266_test.py::test_ticket48266_check_repl_desc ERROR tickets/ticket48266_test.py::test_ticket48266_count_csn_evaluation ERROR tickets/ticket48270_test.py::test_ticket48270_init PASSED tickets/ticket48270_test.py::test_ticket48270_homeDirectory_indexed_cis FAILED tickets/ticket48270_test.py::test_ticket48270_homeDirectory_mixed_value PASSED tickets/ticket48270_test.py::test_ticket48270_extensible_search PASSED tickets/ticket48272_test.py::test_ticket48272 PASSED tickets/ticket48294_test.py::test_48294_init PASSED tickets/ticket48294_test.py::test_48294_run_0 PASSED tickets/ticket48294_test.py::test_48294_run_1 PASSED tickets/ticket48294_test.py::test_48294_run_2 PASSED tickets/ticket48295_test.py::test_48295_init PASSED tickets/ticket48295_test.py::test_48295_run PASSED tickets/ticket48312_test.py::test_ticket48312 PASSED tickets/ticket48325_test.py::test_ticket48325 PASSED tickets/ticket48342_test.py::test_ticket4026 ERROR tickets/ticket48354_test.py::test_ticket48354 PASSED tickets/ticket48362_test.py::test_ticket48362 PASSED tickets/ticket48366_test.py::test _ticket48366_init PASSED tickets/ticket48366_test.py::test_ticket48366_search_user PASSED tickets/ticket48366_test.py::test_ticket48366_search_dm PASSED tickets/ticket48370_test.py::test_ticket48370 PASSED tickets/ticket48383_test.py::test_ticket48383 FAILED tickets/ticket48497_test.py::test_ticket48497_init PASSED tickets/ticket48497_test.py::test_ticket48497_homeDirectory_mixed_value PASSED tickets/ticket48497_test.py::test_ticket48497_extensible_search PASSED tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_cfg PASSED tickets/ticket48497_test.py::test_ticket48497_homeDirectory_index_run FAILED tickets/ticket48637_test.py::test_ticket48637 PASSED tickets/ticket48665_test.py::test_ticket48665 PASSED tickets/ticket48745_test.py::test_ticket48745_init PASSED tickets/ticket48745_test.py::test_ticket48745_homeDirectory_indexed_cis FAILED tickets/ticket48745_test.py::test_ticket48745_homeDirectory_mixed_value PASSED tickets/ticket48745_test.py::test_ticket48745_extensible_search_after_index PASSED tickets/ticket48746_test.py::test_ticket48746_init PASSED tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_cis FAILED tickets/ticket48746_test.py::test_ticket48746_homeDirectory_mixed_value PASSED tickets/ticket48746_test.py::test_ticket48746_extensible_search_after_index PASSED tickets/ticket48746_test.py::test_ticket48746_homeDirectory_indexed_ces FAILED tickets/ticket48755_test.py::test_ticket48755 PASSED tickets/ticket48759_test.py::test_ticket48759 PASSED tickets/ticket48784_test.py::test_ticket48784 PASSED tickets/ticket48798_test.py::test_ticket48798 PASSED tickets/ticket48799_test.py::test_ticket48799 PASSED tickets/ticket48808_test.py::test_ticket48808 PASSED tickets/ticket48844_test.py::test_ticket48844_init PASSED tickets/ticket48844_test.py::test_ticket48844_bitwise_on PASSED tickets/ticket48844_test.py::test_ticket48844_bitwise_off PASSED tickets/ticket48891_test.py::test_ticket48891_setup PASSED tickets/ticket48893_test.py::test_ticket48893 PASSED tickets/ticket48896_test.py ::test_ticket48896 PASSED tickets/ticket48906_test.py::test_ticket48906_setup PASSED tickets/ticket48906_test.py::test_ticket48906_dblock_default PASSED tickets/ticket48906_test.py::test_ticket48906_dblock_ldap_update FAILED tickets/ticket48906_test.py::test_ticket48906_dblock_edit_update FAILED tickets/ticket48906_test.py::test_ticket48906_dblock_robust FAILED tickets/ticket48916_test.py::test_ticket48916 PASSED tickets/ticket48956_test.py::test_ticket48956 PASSED tickets/ticket548_test.py::test_ticket548_test_with_no_policy PASSED tickets/ticket548_test.py::test_ticket548_test_global_policy PASSED tickets/ticket548_test.py::test_ticket548_test_subtree_policy PASSED suites/acct_usability_plugin/acct_usability_test.py::test_acct_usability_init PASSED suites/acct_usability_plugin/acct_usability_test.py::test_acct_usability_ PASSED suites/acctpolicy_plugin/acctpolicy_test.py::test_acctpolicy_init PASSED suites/acctpolicy_plugin/acctpolicy_test.py::test_acctpolicy_ PASSED suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[lang-ja] PASSED suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[binary] PASSED suites/acl/acl_test.py::test_aci_attr_subtype_targetattr[phonetic] PASSED suites/acl/acl_test.py::test_mode_default_add_deny PASSED suites/acl/acl_test.py::test_mode_default_delete_deny PASSED suites/acl/acl_test.py::test_moddn_staging_prod[0-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] PASSED suites/acl/acl_test.py::test_moddn_staging_prod[1-cn=staged user,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] PASSED suites/acl/acl_test.py::test_moddn_staging_prod[2-cn=staged user,dc=example,dc=com-cn=bad*,dc=example,dc=com-True] PASSED suites/acl/acl_test.py::test_moddn_staging_prod[3-cn=st*,dc=example,dc=com-cn=accounts,dc=example,dc=com-False] PASSED suites/acl/acl_test.py::test_moddn_staging_prod[4-cn=bad*,dc=example,dc=com-cn=accounts,dc=example,dc=com-True] PASSED suites/acl/acl_test.py::test_moddn_staging_prod[5-cn=st*,dc=example,dc=com-cn=ac*,dc=example,dc=com-False] PASSE D suites/acl/acl_test.py::test_moddn_staging_prod[6-None-cn=ac*,dc=example,dc=com-False] PASSED suites/acl/acl_test.py::test_moddn_staging_prod[7-cn=st*,dc=example,dc=com-None-False] PASSED suites/acl/acl_test.py::test_moddn_staging_prod[8-None-None-False] PASSED suites/acl/acl_test.py::test_moddn_staging_prod_9 PASSED suites/acl/acl_test.py::test_moddn_prod_staging PASSED suites/acl/acl_test.py::test_check_repl_M2_to_M1 PASSED suites/acl/acl_test.py::test_moddn_staging_prod_except PASSED suites/acl/acl_test.py::test_mode_default_ger_no_moddn PASSED suites/acl/acl_test.py::test_mode_default_ger_with_moddn PASSED suites/acl/acl_test.py::test_mode_switch_default_to_legacy PASSED suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn1 PASSED suites/acl/acl_test.py::test_mode_legacy_ger_no_moddn2 PASSED suites/acl/acl_test.py::test_mode_legacy_ger_with_moddn PASSED suites/acl/acl_test.py::test_rdn_write_get_ger PASSED suites/acl/acl_test.py::test_rdn_write_modrdn_anonymous PASSED suites/attr_encryption/attr_encrypt_test.py::test_attr_encrypt_init PASSED suites/attr_encryption/attr_encrypt_test.py::test_attr_encrypt_ PASSED suites/attr_uniqueness_plugin/attr_uniqueness_test.py::test_attr_uniqueness_init PASSED suites/attr_uniqueness_plugin/attr_uniqueness_test.py::test_attr_uniqueness PASSED suites/automember_plugin/automember_test.py::test_automember_init PASSED suites/automember_plugin/automember_test.py::test_automember_ PASSED suites/basic/basic_test.py::test_basic_ops PASSED suites/basic/basic_test.py::test_basic_import_export PASSED suites/basic/basic_test.py::test_basic_backup PASSED suites/basic/basic_test.py::test_basic_acl PASSED suites/basic/basic_test.py::test_basic_searches PASSED suites/basic/basic_test.py::test_basic_referrals PASSED suites/basic/basic_test.py::test_basic_systemctl PASSED suites/basic/basic_test.py::test_basic_ldapagent PASSED suites/basic/basic_test.py::test_basic_dse PASSED suites/basic/basic_test.py::test_def_rootdse_attr[namingContexts] PASSED suites/basic/basic_test.py::test_def_roo tdse_attr[supportedLDAPVersion] PASSED suites/basic/basic_test.py::test_def_rootdse_attr[supportedControl] PASSED suites/basic/basic_test.py::test_def_rootdse_attr[supportedExtension] PASSED suites/basic/basic_test.py::test_def_rootdse_attr[supportedSASLMechanisms] PASSED suites/basic/basic_test.py::test_def_rootdse_attr[vendorName] PASSED suites/basic/basic_test.py::test_def_rootdse_attr[vendorVersion] PASSED suites/basic/basic_test.py::test_mod_def_rootdse_attr[namingContexts] PASSED suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedLDAPVersion] PASSED suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedControl] PASSED suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedExtension] PASSED suites/basic/basic_test.py::test_mod_def_rootdse_attr[supportedSASLMechanisms] PASSED suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorName] PASSED suites/basic/basic_test.py::test_mod_def_rootdse_attr[vendorVersion] PASSED suites/betxns/betxn_test.py::test_betxn_init PASSED suites/betxns/betxn_test.py::test_betxt_7bit PASSED suites/betxns/betxn_test.py::test_betxn_attr_uniqueness PASSED suites/betxns/betxn_test.py::test_betxn_memberof PASSED suites/chaining_plugin/chaining_test.py::test_chaining_init PASSED suites/chaining_plugin/chaining_test.py::test_chaining_ PASSED suites/clu/clu_test.py::test_clu_init PASSED suites/clu/clu_test.py::test_clu_pwdhash PASSED suites/clu/db2ldif_test.py::test_db2ldif_init PASSED suites/collation_plugin/collatation_test.py::test_collatation_init PASSED suites/collation_plugin/collatation_test.py::test_collatation_ PASSED suites/config/config_test.py::test_maxbersize_repl PASSED suites/config/config_test.py::test_config_listen_backport_size PASSED suites/config/config_test.py::test_config_deadlock_policy PASSED suites/cos_plugin/cos_test.py::test_cos_init PASSED suites/cos_plugin/cos_test.py::test_cos_ PASSED suites/deref_plugin/deref_test.py::test_deref_init PASSED suites/deref_plugin/deref_test.py::test_deref_ PASSED suites/disk_monitoring/disk_mon itor_test.py::test_disk_monitor_init PASSED suites/disk_monitoring/disk_monitor_test.py::test_disk_monitor_ PASSED suites/distrib_plugin/distrib_test.py::test_distrib_init PASSED suites/distrib_plugin/distrib_test.py::test_distrib_ PASSED suites/dna_plugin/dna_test.py::test_dna_init PASSED suites/dna_plugin/dna_test.py::test_dna_ PASSED suites/ds_logs/ds_logs_test.py::test_ds_logs_init PASSED suites/ds_logs/ds_logs_test.py::test_ds_logs_ PASSED suites/dynamic-plugins/test_dynamic_plugins.py::test_dynamic_plugins FAILED suites/filter/filter_test.py::test_filter_init PASSED suites/filter/filter_test.py::test_filter_escaped PASSED suites/filter/filter_test.py::test_filter_search_original_attrs PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_supported_features PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-*] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-False-oper_attr_list0-objectClass] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-*] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[-True-oper_attr_list1-objectClass] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-*] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-False-oper_attr_list2-objectClass] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-*] PASSED suites/filter/rfc3673_all_oper_attrs_test.py: :test_search_basic[ou=people,dc=example,dc=com-True-oper_attr_list3-objectClass] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-*] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-False-oper_attr_list4-objectClass] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-*] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[uid=all_attrs_test,ou=people,dc=example,dc=com-True-oper_attr_list5-objectClass] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-*] PASSED suites/filter/rfc3673_all_oper_attrs_test.py::test_search_basic[cn=config-False-oper_attr_list6-objectClass] PASSED suites/get_effective_rights/ger_test.py::test_ger_init PASSED suites/get_effective_rights/ger_test.py::test_ger_ PASSED suites/gssapi_repl/gssapi_repl_test.py::test_gssapi_repl PASSED suites/ldapi/ldapi_test.py::test_ldapi_init PASSED suites/ldapi/ldapi_test.py::test_ldapi_ PASSED suites/linkedattrs_plugin/linked_attrs_test.py::test_linked_attrs_init PASSED suites/linkedattrs_plugin/linked_attrs_test.py::test_linked_attrs_ PASSED suites/mapping_tree/mapping_tree_test.py::test_mapping_tree_init PASSED suites/mapping_tree/mapping_tree_test.py::test_mapping_tree_ PASSED suites/memberof_plugin/memberof_test.py::test_memberof_auto_add_oc PASSED suites/memory_leaks/range_search_test.py::test_range_search_init FAILED suites/memory_leaks/range_search_test.py::t est_range_search PASSED suites/memory_leaks/range_search_test.py::test_range_search ERROR suites/monitor/monitor_test.py::test_monitor_init PASSED suites/monitor/monitor_test.py::test_monitor_ PASSED suites/paged_results/paged_results_test.py::test_search_success[6-5] PASSED suites/paged_results/paged_results_test.py::test_search_success[5-5] PASSED suites/paged_results/paged_results_test.py::test_search_success[5-25] PASSED suites/paged_results/paged_results_test.py::test_search_limits_fail[50-200-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-idlistscanlimit-100-UNWILLING_TO_PERFORM] PASSED suites/paged_results/paged_results_test.py::test_search_limits_fail[5-15-cn=config-nsslapd-timelimit-20-UNAVAILABLE_CRITICAL_EXTENSION] PASSED suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-sizelimit-20-SIZELIMIT_EXCEEDED] PASSED suites/paged_results/paged_results_test.py::test_search_limits_fail[21-50-cn=config-nsslapd-pagedsizelimit-5-SIZELIMIT_EXCEEDED] PASSED suites/paged_results/paged_results_test.py::test_search_limits_fail[5-50-cn=config,cn=ldbm database,cn=plugins,cn=config-nsslapd-lookthroughlimit-20-ADMINLIMIT_EXCEEDED] PASSED suites/paged_results/paged_results_test.py::test_search_sort_success PASSED suites/paged_results/paged_results_test.py::test_search_abandon PASSED suites/paged_results/paged_results_test.py::test_search_with_timelimit PASSED suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[dns = "localhost.localdomain"] PASSED suites/paged_results/paged_results_test.py::test_search_dns_ip_aci[ip = "::1" or ip = "127.0.0.1"] PASSED suites/paged_results/paged_results_test.py::test_search_multiple_paging PASSED suites/paged_results/paged_results_test.py::test_search_invalid_cookie[1000] PASSED suites/paged_results/paged_results_test.py::test_search_invalid_cookie[-1] PASSED suites/paged_results/paged_results_test.py::test_search_abandon_with_zero_size PASSED suites/paged_results/paged_results_test.py::test_search_pagedsizelimit_success PASSE D suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[5-15-PASS] PASSED suites/paged_results/paged_results_test.py::test_search_nspagedsizelimit[15-5-SIZELIMIT_EXCEEDED] PASSED suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] PASSED suites/paged_results/paged_results_test.py::test_search_paged_limits[conf_attr_values1-PASS] PASSED suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values0-ADMINLIMIT_EXCEEDED] PASSED suites/paged_results/paged_results_test.py::test_search_paged_user_limits[conf_attr_values1-PASS] PASSED suites/paged_results/paged_results_test.py::test_ger_basic PASSED suites/paged_results/paged_results_test.py::test_multi_suffix_search FAILED suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[None] PASSED suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[-1] PASSED suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_success[1000] PASSED suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[0] PASSED suites/paged_results/paged_results_test.py::test_maxsimplepaged_per_conn_failure[1] PASSED suites/pam_passthru_plugin/pam_test.py::test_pam_init PASSED suites/pam_passthru_plugin/pam_test.py::test_pam_ PASSED suites/passthru_plugin/passthru_test.py::test_passthru_init PASSED suites/passthru_plugin/passthru_test.py::test_passthru_ PASSED suites/password/password_test.py::test_password_init PASSED suites/password/password_test.py::test_password_delete_specific_password PASSED suites/password/pwdAdmin_test.py::test_pwdAdmin_init PASSED suites/password/pwdAdmin_test.py::test_pwdAdmin PASSED suites/password/pwdAdmin_test.py::test_pwdAdmin_config_validation PASSED suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-off-UNWILLING_TO_PERFORM] PASSED suites/password/pwdPolicy_attribute_test.py::test_change_pwd[off-off-UNWILLING_TO_PERFORM] PASSED suites/password/pwdPolicy_attribute _test.py::test_change_pwd[off-on-None] PASSED suites/password/pwdPolicy_attribute_test.py::test_change_pwd[on-on-None] PASSED suites/password/pwdPolicy_attribute_test.py::test_pwd_min_age PASSED suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-off] PASSED suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[on-off] PASSED suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_no_restrictions[off-on] PASSED suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_restrictions[cn=config] PASSED suites/password/pwdPolicy_inherit_global_test.py::test_entry_has_restrictions[cn="cn=nsPwPolicyEntry,ou=People,dc=example,dc=com",cn=nsPwPolicyContainer,ou=People,dc=example,dc=com] PASSED suites/password/pwdPolicy_syntax_test.py::test_pwdPolicy_syntax PASSED suites/password/pwdPolicy_warning_test.py::test_different_values[ ] PASSED suites/password/pwdPolicy_warning_test.py::test_different_values[junk123] PASSED suites/password/pwdPolicy_warning_test.py::test_different_values[on] PASSED suites/password/pwdPolicy_warning_test.py::test_different_values[off] PASSED suites/password/pwdPolicy_warning_test.py::test_expiry_time PASSED suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordSendExpiringTime-off] PASSED suites/password/pwdPolicy_warning_test.py::test_password_warning[passwordWarning-3600] PASSED suites/password/pwdPolicy_warning_test.py::test_with_different_password_states PASSED suites/password/pwdPolicy_warning_test.py::test_default_behavior PASSED suites/password/pwdPolicy_warning_test.py::test_with_local_policy PASSED suites/password/pwp_history_test.py::test_pwp_history_test PASSED suites/posix_winsync_plugin/posix_winsync_test.py::test_posix_winsync_init PASSED suites/posix_winsync_plugin/posix_winsync_test.py::test_posix_winsync_ PASSED suites/psearch/psearch_test.py::test_psearch_init PASSED suites/psearch/psearch_test.py::test_psearch_ PASSED suites/referint_plugin/referint_test.py::test_referint_init PASSED su ites/referint_plugin/referint_test.py::test_referint_ PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_init PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_clean PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_clean_restart PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_clean_force PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_abort PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_abort_restart PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_abort_certify PASSED suites/replication/cleanallruv_test.py::test_cleanallruv_stress_clean PASSED suites/replication/wait_for_async_feature_test.py::test_not_int_value PASSED suites/replication/wait_for_async_feature_test.py::test_multi_value PASSED suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr0] PASSED suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr1] PASSED suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr2] PASSED suites/replication/wait_for_async_feature_test.py::test_value_check[waitfor_async_attr3] PASSED suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr0] PASSED suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr1] PASSED suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr2] PASSED suites/replication/wait_for_async_feature_test.py::test_behavior_with_value[waitfor_async_attr3] PASSED suites/replsync_plugin/repl_sync_test.py::test_repl_sync_init PASSED suites/replsync_plugin/repl_sync_test.py::test_repl_sync_ PASSED suites/resource_limits/res_limits_test.py::test_res_limits_init PASSED suites/resource_limits/res_limits_test.py::test_res_limits_ PASSED suites/retrocl_plugin/retrocl_test.py::test_retrocl_init PASSED suites/retrocl_plugin/retrocl_test.py::test_retrocl_ PASSED suites/reverpwd_plugin/reverpwd_test.py::te st_reverpwd_init PASSED suites/reverpwd_plugin/reverpwd_test.py::test_reverpwd_ PASSED suites/roles_plugin/roles_test.py::test_roles_init PASSED suites/roles_plugin/roles_test.py::test_roles_ PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_init PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_specific_time PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_day_of_week PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_denied_ip PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_denied_host PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_allowed_ip PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_access_allowed_host PASSED suites/rootdn_plugin/rootdn_plugin_test.py::test_rootdn_config_validate PASSED suites/sasl/sasl_test.py::test_sasl_init PASSED suites/sasl/sasl_test.py::test_sasl_ PASSED suites/schema/test_schema.py::test_schema_comparewithfiles PASSED suites/schema_reload_plugin/schema_reload_test.py::test_schema_reload_init PASSED suites/schema_reload_plugin/schema_reload_test.py::test_schema_reload_ PASSED suites/snmp/snmp_test.py::test_snmp_init PASSED suites/snmp/snmp_test.py::test_snmp_ PASSED suites/ssl/ssl_test.py::test_ssl_init PASSED suites/ssl/ssl_test.py::test_ssl_ PASSED suites/syntax_plugin/syntax_test.py::test_syntax_init PASSED suites/syntax_plugin/syntax_test.py::test_syntax_ PASSED suites/usn_plugin/usn_test.py::test_usn_init PASSED suites/usn_plugin/usn_test.py::test_usn_ PASSED suites/views_plugin/views_test.py::test_views_init PASSED suites/views_plugin/views_test.py::test_views_ PASSED suites/vlv/vlv_test.py::test_vlv_init PASSED suites/vlv/vlv_test.py::test_vlv_ PASSED suites/whoami_plugin/whoami_test.py::test_whoami_init PASSED suites/whoami_plugin/whoami_test.py::test_whoami_ PASSED ==================================== ERRORS ==================================== ________________ ERROR at setup of test_ticket48266_fractional _________________ request = <SubRequ est 'topology' for <Function 'test_ticket48266_fractional'>> @pytest.fixture(scope="module") def topology(request): global installation1_prefix if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix # Creating master 1... master1 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_1 args_instance[SER_PORT] = PORT_MASTER_1 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_1 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master1.allocate(args_master) instance_master1 = master1.exists() if instance_master1: master1.delete() master1.create() master1.open() master1.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_1) # Creating master 2... master2 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_2 args_instance[SER_PORT] = PORT_MASTER_2 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_2 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master2.allocate(args_master) instance_master2 = master2.exists() if instance_master2: master2.delete() master2.create() master2.open() master2.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_2) # # Create all the agreements # # Creating agreement from master 1 to master 2 properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m1_m2_agmt = master1.agreement.create(suffix=SUFFIX, host=master2.host, port=master2.port, properties=properties) if not m1_m2_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m1_m2_agmt) # Creating agreement from master 2 to master 1 properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m2_m1_agmt = master2.agreement.create(suffix=SUFFIX, host=master1.host, port=master1.port, properties=properties) if not m2_m1_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m2_m1_agmt) # Allow the replicas to get situated with the new agreements... time.sleep(5) # # Initialize all the agreements # master1.agreement.init(SUFFIX, HOST_MASTER_2, PORT_MASTER_2) > master1.waitForReplInit(m1_m2_agmt) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48266_test.py>:104: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return self.replica.wait_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = self.check_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, "(objectclass=*)", attrlist) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = self.result(res) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inne r return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd4151f0440> func = <built-in method result4 of LDAP object at 0x7fd415e43468> args = (17, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK us er dirsrv exists ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}/ecTMmFs5i6NRXNp+xUCcKLH/Jaog39xn+WYAD48Z3ZAFLOAL41fWMR3txL8mAi06R3LlnzlKibM+BOnYu+Notrol10Ax2ij INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}AaqPX6+DZ67BObb2+or3fL6vmwppkqQmuLKcL4eNeHm4Wi5wGHQyriWVm23w012f9LdsJigMh39kL/PTS2wRTOPcW/yTWsbK DEBUG:tickets.ticket48266_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created DEBUG:tickets.ticket48266_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created INFO:lib389:Starting total init cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config ______________ ERROR at setup of test_ticket48266_check_repl_desc ______________ request = <SubRequest 'topology' for <Function 'test_ticket48266_fractional'>> @pytest.fixture(scope="module") def topology(request): global installation1_prefix if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix # Creating master 1... master1 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_1 args_instance[SER_PORT] = PORT_MASTER_1 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_1 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master1.allocate(args_master) instance_master1 = master1.exists() if instance_master1: master1.delete() master1.create() master1.open() master1.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_1) # Creating master 2... m aster2 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_2 args_instance[SER_PORT] = PORT_MASTER_2 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_2 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master2.allocate(args_master) instance_master2 = master2.exists() if instance_master2: master2.delete() master2.create() master2.open() master2.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_2) # # Create all the agreements # # Creating agreement from master 1 to master 2 properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m1_m2_agmt = master1.agreement.create(suffix=SUFFIX, host=master2.host, port=master2.port, properties=properties) if not m1_m2_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m1_m2_agmt) # Creating agreement from master 2 to master 1 properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m2_m1_agmt = master2.agreement.create(suffix=SUFFIX, host=master1.host, port=master1.port, properties=properties) if not m2_m1_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m2_m1_agmt) # Allow the replicas to get situated with the new agreements... time.sleep(5) # # Initialize all the agreements # master1.agreement.init(SUFFIX, HOST_MASTER_2, PORT_MASTER_2) > master1.waitForReplInit(m1_m2_agmt) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket4 8266_test.py>:104: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return self.replica.wait_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = self.check_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, "(objectclass=*)", attrlist) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = self.result(res) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/l ib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd4151f0440> func = <built-in method result4 of LDAP object at 0x7fd415e43468> args = (17, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ___________ ERROR at setup of test_ticket48266_count_csn_evaluation ____________ request = <SubRequest 'topology' for <Function 'test_ticket48266_fractional'>> @pytest.fixture(scope="module") def topology(request): global installation1_prefix if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix # Creating master 1... master1 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_1 args_instance[SER_PORT] = PORT_MASTER_1 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_1 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master1.allocate(args_master) instance_master1 = master1.exists() if instance_master1: master1.delete() master1.create() master1.open() master1.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_1) # Creating master 2... master2 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_2 args_instance[SER_PORT] = PORT_MASTER_2 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_2 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master2.allocate(args_master) instance_master2 = master2.exists() if instance_master2: master2.delete() master2.create() master2.open() master2.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_2) # # Create all the agreements # # Creating agreement from master 1 to master 2 properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m1_m2_agmt = master1.agreement.create(suffix=SUFFIX, host=master2.host, port=master2.port, properties=properties) if not m1_m2_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m1_m2_agmt) # Creating agreement from master 2 to master 1 properties = {RA_NAME: r'meTo_$host:$port', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m2_m1_agmt = master2.agreement.create(suffix=SUFFIX, host=master1.host, port=master1.port, properties=properties) if not m2_m1_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m2_m1_agmt) # Allow the replicas to get situated with the new agreements... time.sleep(5) # # Initialize all the agreements # master1.agreement.init(SUFFIX, HOST_MASTER_2, PORT_MASTER_2) > master1.waitForReplInit(m1_m2_agmt) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48266_test.py>:104: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return self.replica.wait_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = self.check_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, "(objectclass=*)", attrlist) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = self.result(res) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd4151f0440> func = <built-in method result4 of LDAP object at 0x7fd415e43468> args = (17, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ______________________ ERROR at setup of test_ticket4026 _______________________ request = <SubRequest 'topology' for <Function 'test_ticket4026'>> @pytest.fixture(scope="module") def topology(request): global installation1_prefix if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix # Creating master 1... master1 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_1 args_instance[SER_PORT] = PORT_MASTER_1 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_1 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master1.allocate(args_master) instance_master1 = master1.exists() if instance_master1: master1.delete() master1.create() master1.open() master1.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_1) # Creating master 2... master2 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instan ce[SER_HOST] = HOST_MASTER_2 args_instance[SER_PORT] = PORT_MASTER_2 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_2 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master2.allocate(args_master) instance_master2 = master2.exists() if instance_master2: master2.delete() master2.create() master2.open() master2.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_2) # Creating master 3... master3 = DirSrv(verbose=False) if installation1_prefix: args_instance[SER_DEPLOYED_DIR] = installation1_prefix args_instance[SER_HOST] = HOST_MASTER_3 args_instance[SER_PORT] = PORT_MASTER_3 args_instance[SER_SERVERID_PROP] = SERVERID_MASTER_3 args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_master = args_instance.copy() master3.allocate(args_master) instance_master3 = master3.exists() if instance_master3: master3.delete() master3.create() master3.open() master3.replica.enableReplication(suffix=SUFFIX, role=REPLICAROLE_MASTER, replicaId=REPLICAID_MASTER_3) # # Create all the agreements # # Creating agreement from master 1 to master 2 properties = {RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m1_m2_agmt = master1.agreement.create(suffix=SUFFIX, host=master2.host, port=master2.port, properties=properties) if not m1_m2_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m1_m2_agmt) # Creating agreement from master 1 to master 3 # properties = {RA_NAME: r'meTo_$host:$port', # RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], # RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], # RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], # RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} # m1_m3_agmt = master1.agreement.create(suffix=SUFFIX, host=master3.host, port=master3.port, properties=properties) # if not m1 _m3_agmt: # log.fatal("Fail to create a master -> master replica agreement") # sys.exit(1) # log.debug("%s created" % m1_m3_agmt) # Creating agreement from master 2 to master 1 properties = {RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m2_m1_agmt = master2.agreement.create(suffix=SUFFIX, host=master1.host, port=master1.port, properties=properties) if not m2_m1_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m2_m1_agmt) # Creating agreement from master 2 to master 3 properties = {RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m2_m3_agmt = master2.agreement.create(suffix=SUFFIX, host=master3.host, port=master3.port, properties=properties) if not m2_m3_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m2_m3_agmt) # Creating agreement from master 3 to master 1 # properties = {RA_NAME: r'meTo_$host:$port', # RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], # RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], # RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], # RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} # m3_m1_agmt = master3.agreement.create(suffix=SUFFIX, host=master1.host, port=master1.port, properties=properties) # if not m3_m1_agmt: # log.fatal("Fail to create a master -> master replica agreement") # sys.exit(1) # log.debug("%s created" % m3_m1_agmt) # Creating agreement from master 3 to master 2 properties = {RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} m3_m2_agmt = master3.agreement.create(suffix=SUFFIX, host=master2.host, port=master2.port, properties=properties) if not m3_m2_agmt: log.fatal("Fail to create a master -> master replica agreement") sys.exit(1) log.debug("%s created" % m3_m2_agmt) # Allow the replicas to get situated with the new agreements... time.sleep(5) # # Initialize all the agreements # master1.agreement.init(SUFFIX, HOST_MASTER_2, PORT_MASTER_2) master1.waitForReplInit(m1_m2_agmt) time.sleep(5) # just to be safe master2.agreement.init(SUFFIX, HOST_MASTER_3, PORT_MASTER_3) > master2.waitForReplInit(m2_m3_agmt) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48342_test.py>:171: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:2177: in waitForReplInit return self.replica.wait_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:596: in wait_init done, haserror = self.check_init(agmtdn) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/replica.py>:548: in check_init agmtdn, ldap.SCOPE_BASE, "(objectclass=*)", attrlist) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:1574: in getEntry restype, obj = self.result(res) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:127: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/__init__.py>:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd41476ae18> func = <built-in method result4 of LDAP object at 0x7fd415cf5dc8> args = (21, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists Instance slapd-master_2 removed. OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ('Update succeeded: status ', '0 Total update succ eeded') ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}9eNP/JN3T5BbuSmXd4mH30B092YbFrY9EGvjXepy1jcqC6hN5C6pf37nZaI3qnnpUXE1zH8UF1SA5I/D1U+0Wia91ccC39Lb INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}BO+TCDI/Ze/G1j1sKbzu2o4/Bb4tUsXhuO95rjOygP/tEwmhfrX0eHF1ZA6H2ltoU2inw9SISUR+4hwvvskVb2a672svxdld INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}z6/245iCbldFkSrBhHJRhc/5XGwZpL9J+OWcuybW9GU3bHio82qdtIj1XPbR4lvr3bXRwWDd0yb9SLwi4L1g/uVlOjpP7a9R DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38941,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38943,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created DEBUG:tickets.ticket48342_test:cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created INFO:lib389:Starting total init cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO:lib389:Starting total init cn=meTo_localhost.localdomain:38943,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config ____________________ ERROR at teardown of test_range_search ____________________ def fin(): standalone.delete() if not standalone.has_asan(): sbin_dir = standalone.get_sb in_dir() > valgrind_disable(sbin_dir) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/memory_leaks/range_search_test.py>:61: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ sbin_dir = '/usr/sbin' def valgrind_disable(sbin_dir): ''' Restore the ns-slapd binary to its original state - the server instances are expected to be stopped. Note - selinux is enabled at the end of this process. :param sbin_dir - the location of the ns-slapd binary (e.g. /usr/sbin) :raise ValueError :raise EnvironmentError: If script is not run as 'root' ''' if os.geteuid() != 0: log.error('This script must be run as root to use valgrind') raise EnvironmentError nsslapd_orig = '%s/ns-slapd' % sbin_dir nsslapd_backup = '%s/ns-slapd.original' % sbin_dir # Restore the original ns-slapd try: shutil.copyfile(nsslapd_backup, nsslapd_orig) except IOError as e: log.fatal('valgrind_disable: failed to restore ns-slapd, error: %s' % e.strerror) > raise ValueError('failed to restore ns-slapd, error: %s' % e.strerror) E ValueError: failed to restore ns-slapd, error: Text file busy <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/utils.py>:288: ValueError ----------------------------- Captured stderr call ----------------------------- INFO:suites.memory_leaks.range_search_test:Running test_range_search... CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user1,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user2,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user3,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user4,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites .memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user5,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user6,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user7,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user8,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user9,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user10,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user11,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user12,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user13,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user14,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user15,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user16,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user17,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user ui d=user18,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user19,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user20,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user21,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user22,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user23,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user24,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user25,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user26,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user27,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user28,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user29,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user30,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user31,dc=example,dc=com: error Can't contact LDAP server CRITICAL:sui tes.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user32,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user33,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user34,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user35,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user36,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user37,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user38,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user39,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user40,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user41,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user42,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user43,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user44,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user45,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user46,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user47,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user48,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user49,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user50,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user51,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user52,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user53,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user54,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user55,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user56,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user57,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user58,dc=example,dc=com: error Can't contact LDAP server CRIT ICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user59,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user60,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user61,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user62,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user63,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user64,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user65,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user66,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user67,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user68,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user69,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user70,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user71,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user72,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user73,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user74,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user75,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user76,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user77,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user78,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user79,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user80,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user81,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user82,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user83,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user84,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user85,dc=example,dc=com: error Can't contact LDAP ser ver CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user86,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user87,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user88,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user89,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user90,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user91,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user92,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user93,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user94,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user95,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user96,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user97,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Failed to add test user uid=user98,dc=example,dc=com: error Can't contact LDAP server CRITICAL:suites.memory_leaks.range_search_test:test_range_search: Fa iled to add test user uid=user99,dc=example,dc=com: error Can't contact LDAP server INFO:suites.memory_leaks.range_search_test:test_range_search: PASSED --------------------------- Captured stdout teardown --------------------------- Instance slapd-standalone removed. --------------------------- Captured stderr teardown --------------------------- CRITICAL:lib389.utils:valgrind_disable: failed to restore ns-slapd, error: Text file busy =================================== FAILURES =================================== ______________________________ test_ticket1347760 ______________________________ topology = <tickets.ticket1347760_test.TopologyStandalone object at 0x7fd415b56210> def test_ticket1347760(topology): """ Prevent revealing the entry info to whom has no access rights. """ log.info('Testing Bug 1347760 - Information disclosure via repeated use of LDAP ADD operation, etc.') log.info('Disabling accesslog logbuffering') topology.standalone.modify_s(CONFIG_DN, [(ldap.MOD_REPLACE, 'nsslapd-accesslog-logbuffering', 'off')]) log.info('Bind as {%s,%s}' % (DN_DM, PASSWORD)) topology.standalone.simple_bind_s(DN_DM, PASSWORD) log.info('Adding ou=%s a bind user belongs to.' % BOU) topology.standalone.add_s(Entry((BINDOU, { 'objectclass': 'top organizationalunit'.split(), 'ou': BOU}))) log.info('Adding a bind user.') topology.standalone.add_s(Entry((BINDDN, {'objectclass': "top person organizationalPerson inetOrgPerson".split(), 'cn': 'bind user', 'sn': 'user', 'userPassword': BINDPW}))) log.info('Adding a test user.') topology.standalone.add_s(Entry((TESTDN, {'objectclass': "top person organizationalPerson inetOrgPerson".split(), 'cn': 'test user', 'sn': 'user', 'userPassword': TESTPW}))) log.info('Deleting aci in %s.' % DEFAULT_SUFFIX) topology.standalone.modify_s(DEFAULT_SUFFIX, [(ldap.MOD_DELETE, 'aci', None)]) log.info('Bind case 1. the bind user has no rights to read the entry itself, bind should be successful.') log.info('Bind as {%s,%s} who has no access rights.' % (BINDDN, BINDPW)) try: topology.standalone.sim ple_bind_s(BINDDN, BINDPW) except ldap.LDAPError as e: log.info('Desc ' + e.message['desc']) assert False file_path = os.path.join(topology.standalone.prefix, 'var/log/dirsrv/slapd-%s/access' % topology.standalone.serverid) > file_obj = open(file_path, "r") E IOError: [Errno 2] No such file or directory: '/usr/var/log/dirsrv/slapd-standalone/access' tickets/ticket1347760_test.py:236: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket1347760_test:Testing Bug 1347760 - Information disclosure via repeated use of LDAP ADD operation, etc. INFO:tickets.ticket1347760_test:Disabling accesslog logbuffering INFO:tickets.ticket1347760_test:Bind as {cn=Directory Manager,password} INFO:tickets.ticket1347760_test:Adding ou=BOU a bind user belongs to. INFO:tickets.ticket1347760_test:Adding a bind user. INFO:tickets.ticket1347760_test:Adding a test user. INFO:tickets.ticket1347760_test:Deleting aci in dc=example,dc=com. INFO:tickets.ticket1347760_test:Bind case 1. the bind user has no rights to read the entry itself, bind should be successful. INFO:tickets.ticket1347760_test:Bind as {uid=buser123,ou=BOU,dc=example,dc=com,buser123} who has no access rights. ______________________________ test_ticket47431_1 ______________________________ topology = <tickets.ticket47431_test.TopologyStandalone object at 0x7fd4152d89d0> def test_ticket47431_1(topology): ''' nsslapd-pluginarg0: uid nsslapd-pluginarg1: mail nsslapd-pluginarg2: userpassword <== repeat 27 times nsslapd-pluginarg3: , nsslapd-pluginarg4: dc=example,dc=com The duplicated values are removed by str2entry_dupcheck as follows: [..] - str2entry_dupcheck: 27 duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config. Extra values ignored. ''' log.info("Ticket 47431 - 1: Check 26 duplicate values are treated as one...") expected = "str2entry_dupcheck - . .. .cache duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config." log.debug('modify_s %s' % DN_7BITPLUGIN) try: topology.standalone.modify_s(DN_7BITPLUGIN, [(ldap.MOD_REPLACE, 'nsslapd-pluginarg0', "uid"), (ldap.MOD_REPLACE, 'nsslapd-pluginarg1', "mail"), (ldap.MOD_REPLACE, 'nsslapd-pluginarg2', "userpassword"), (ldap.MOD_REPLACE, 'nsslapd-pluginarg3', ","), (ldap.MOD_REPLACE, 'nsslapd-pluginarg4', SUFFIX)]) except ValueError: log.error('modify failed: Some problem occured with a value that was provided') assert False arg2 = "nsslapd-pluginarg2: userpassword" topology.standalone.stop(timeout=10) dse_ldif = topology.standalone.confdir + '/dse.ldif' os.system('mv %s %s.47431' % (dse_ldif, dse_ldif)) os.system('sed -e "s/\\(%s\\)/\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1\\n\\1/" %s.47431 > %s' % (arg2, dse_ldif, dse_ldif)) topology.standalone.start(timeout=10) cmdline = 'egrep -i "%s" %s' % (expected, topology.standalone.errlog) p = os.popen(cmdline, "r") line = p.readline() if line == "": log.error('Expected error "%s" not logged in %s' % (expected, topology.standalone.errlog)) > assert False E assert False tickets/ticket47431_test.py:110: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47431_test:Ticket 47431 - 1: Check 26 duplicate values are treated as one... DEBUG:tickets.ticket47431_test:modify_s cn=7-bit check,cn=plugins,cn=config grep: /var/log/dirsrv/slapd-standalone/error: No such file or directory ERROR:tickets.ticket47431_test:Expected error "str2entry_dupcheck - . .. .cache duplicate values for attribute type nsslapd-pluginarg2 detected in entry cn=7-bit check,cn=plugins,cn=config." not logged in /var/log/dirsrv/slapd-standalone/error _______________________________ test_ticket47462 _______________________________ topology = <tickets.ticket47462_test.TopologyMaster1Mas ter2 object at 0x7fd41598fd90> def test_ticket47462(topology): """ Test that AES properly replaces DES during an update/restart, and that replication also works correctly. """ # # First set config as if it's an older version. Set DES to use # libdes-plugin, MMR to depend on DES, delete the existing AES plugin, # and set a DES password for the replication agreement. # # Add an extra attribute to the DES plugin args # try: topology.master1.modify_s(DES_PLUGIN, [(ldap.MOD_REPLACE, 'nsslapd-pluginEnabled', 'on')]) except ldap.LDAPError as e: log.fatal('Failed to enable DES plugin, error: ' + e.message['desc']) assert False try: topology.master1.modify_s(DES_PLUGIN, [(ldap.MOD_ADD, 'nsslapd-pluginarg2', 'description')]) except ldap.LDAPError as e: log.fatal('Failed to reset DES plugin, error: ' + e.message['desc']) assert False try: topology.master1.modify_s(MMR_PLUGIN, [(ldap.MOD_DELETE, 'nsslapd-plugin-depends-on-named', 'AES')]) except ldap.NO_SUCH_ATTRIBUTE: pass except ldap.LDAPError as e: log.fatal('Failed to reset MMR plugin, error: ' + e.message['desc']) assert False # # Delete the AES plugin # try: topology.master1.delete_s(AES_PLUGIN) except ldap.NO_SUCH_OBJECT: pass except ldap.LDAPError as e: log.fatal('Failed to delete AES plugin, error: ' + e.message['desc']) assert False # restart the server so we must use DES plugin topology.master1.restart(timeout=10) # # Get the agmt dn, and set the password # try: entry = topology.master1.search_s('cn=config', ldap.SCOPE_SUBTREE, 'objectclass=nsDS5ReplicationAgreement') if entry: agmt_dn = entry[0].dn log.info('Found agmt dn (%s)' % agmt_dn) else: log.fatal('No replication agreements!') assert False except ldap.LDAPError as e: log.fatal('Failed to search for replica credentials: ' + e.message['desc']) assert False try: properties = {RA_BINDPW: "password"} topology.master1.agreement.setProperties(None, agmt_dn, None, properties) log.info('Successfully modified replication agreement') except ValueError: log.error('Failed to update replica agreement: ' + AGMT_DN) asser t False # # Check replication works with the new DES password # try: topology.master1.add_s(Entry((USER1_DN, {'objectclass': "top person".split(), 'sn': 'sn', 'description': 'DES value to convert', 'cn': 'test_user'}))) loop = 0 ent = None while loop <= 10: try: ent = topology.master2.getEntry(USER1_DN, ldap.SCOPE_BASE, "(objectclass=*)") break except ldap.NO_SUCH_OBJECT: time.sleep(1) loop += 1 if not ent: log.fatal('Replication test failed fo user1!') assert False else: log.info('Replication test passed') except ldap.LDAPError as e: log.fatal('Failed to add test user: ' + e.message['desc']) assert False # # Add a backend (that has no entries) # try: topology.master1.backend.create("o=empty", {BACKEND_NAME: "empty"}) except ldap.LDAPError as e: log.fatal('Failed to create extra/empty backend: ' + e.message['desc']) assert False # # Run the upgrade... # > topology.master1.upgrade('online') tickets/ticket47462_test.py:269: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:2500: in upgrade DirSrvTools.runUpgrade(self.prefix, online) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ prefix = '/usr', online = True @staticmethod def runUpgrade(prefix, online=True): ''' Run "setup-ds.pl --update" We simply pass in one DirSrv isntance, and this will update all the instances that are in this prefix. For the update to work we must fix/adjust the permissions of the scripts in: /prefix/lib[64]/dirsrv/slapd-INSTANCE/ ''' if not prefix: prefix = '' # This is an RPM run - check if /lib exists, if not use /lib64 if os.path.isdir('/usr/lib/dirsrv'): libdir = '/usr/lib/dirsrv/' else: if os.path.isdir('/usr/lib64/dirsrv'): libdir = '/usr/lib64/dirsrv/' else: log.fatal('runUpgrade: failed to find slapd lib dir!') assert False else: # Standard prefix lib location if os.path.isdir('/usr/lib64/dirsrv'): libdir = '/usr/lib64/dirsrv/' else: libdir = '/lib/dirsrv/' # Gather all the instances so we can adjust the permissions, otherwise servers = [] path = prefix + '/etc/dirsrv' > for files in os.listdir(path): E OSError: [Errno 2] No such file or directory: '/usr/etc/dirsrv' ../../../lib389/lib389/tools.py:932: OSError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ('Update succeeded: status ', '0 Total update succeeded') ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}mE9qv43agtT5lEz/TTqQv+2Ft4GvxreW8ceX9JrNVWsviG7H+bYTuN8rzhxruDhCzFLwowsrPTMeVRz+WFm+ZSjq7hUBtR4/ INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}G2NatsHbvZNupGcfcysPi4oH3D3yIJFdbCd+Sy9GNysCZgtxcNzg6cwZjlluW2F5/5crqYCowBYCNT6ZehQB08Cmhmga+XT8 DEBUG:tickets.ticket47462_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created INFO:lib389:Starting total init cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO:tickets.ticket47462_test:Replication is working. ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47462_test:Found agmt dn (cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config) INFO:tickets.ticket47462_test:Successfully modified replication agreement INFO:tickets.ticket47462_test:Replication test passed INFO:lib389:List backend with suffix=o=empty INFO:lib389:Creating a local backend INFO:lib389:List backend cn=empty,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=empty,cn=ldbm database,cn=plugins,cn=config cn: empty nsslapd-cachememsize: 10485760 nsslapd-cach esize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-master_1/db/empty nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: o=empty objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance _______________________________ test_ticket47536 _______________________________ topology = <tickets.ticket47536_test.TopologyReplication object at 0x7fd415b65110> def test_ticket47536(topology): """ Set up 2way MMR: master_1 ----- startTLS -----> master_2 master_1 <-- TLS_clientAuth -- master_2 Check CA cert, Server-Cert and Key are retrieved as PEM from cert db when the server is started. First, the file names are not specified and the default names derived from the cert nicknames. Next, the file names are specified in the encryption config entries. Each time add 5 entries to master 1 and 2 and check they are replicated. """ log.info("Ticket 47536 - Allow usage of OpenLDAP libraries that don't use NSS for crypto") create_keys_certs(topology) config_tls_agreements(topology) add_entry(topology.master1, 'master1', 'uid=m1user', 0, 5) add_entry(topology.master2, 'master2', 'uid=m2user', 0, 5) time.sleep(1) log.info('##### Searching for entries on master1...') entries = topology.master1.search_s(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, '(uid=*)') assert 10 == len(entries) log.info('##### Searching for entries on master2...') entries = topology.master2.search_s(DEFAULT_SUFFIX, ldap.SCOPE_SUBTREE, '(uid=*)') > assert 10 == len(entries) E assert 10 == 5 E + where 5 = len([dn: uid=m2user0,dc=example,dc=com\ncn: master2 user0\nobjectClass: top\nobjectClass: person\nobjectClass: extensibleObjec...er2 user4\nobjectClass: top\nobjectClass: person\nobjectClass: extensibleObject\nsn: user4\nuid: uid=m2user4\nuid: m2user4\n\n]) tickets/ticket47536_test.py:494: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ('Update succeeded: status ', '0 Total up date succeeded') ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}4i908B6GLJzSpF8N7NvSIqTyO9RflHjGkkYn7naXDSOZW95jnjYd/kNlZ2CI70plZ5mK+EyRWGVXCcajcT3wIlldZJVj+8Tk INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}GBl4QZQv6FqBUZI2NhsPArjfcv6MzyB1l0rr/BndQK2EQHa8r74UOZhcRzYpM1OcWBqEOdeE8/X1ZWuFBhY2Jpe2RbvqHYo0 DEBUG:tickets.ticket47536_test:cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created DEBUG:tickets.ticket47536_test:cn=meTo_localhost.localdomain:38941,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created INFO:lib389:Starting total init cn=meTo_localhost.localdomain:38942,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO:tickets.ticket47536_test:Replication is working. ----------------------------- Captured stdout call ----------------------------- Is this a CA certificate [y/N]? Enter the path length constraint, enter to skip [<0 for unlimited path]: > Is this a critical extension [y/N]? pk12util: PKCS12 EXPORT SUCCESSFUL pk12util: PKCS12 IMPORT SUCCESSFUL ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47536_test:Ticket 47536 - Allow usage of OpenLDAP libraries that don't use NSS for crypto INFO:tickets.ticket47536_test: ######################### Creating SSL Keys and Certs ###################### INFO:tickets.ticket47536_test:##### shutdown master1 INFO:tickets.ticket47536_test:##### Creating a password file INFO:tickets.ticket47536_test:##### create the pin file INFO:tickets.ticket47536_test:##### Creating a noise file INFO:tickets.ticket4 7536_test:##### Create key3.db and cert8.db database (master1): ['certutil', '-N', '-d', '/etc/dirsrv/slapd-master_1', '-f', '/etc/dirsrv/slapd-master_1/pwdfile.txt'] INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Creating encryption key for CA (master1): ['certutil', '-G', '-d', '/etc/dirsrv/slapd-master_1', '-z', '/etc/dirsrv/slapd-master_1/noise.txt', '-f', '/etc/dirsrv/slapd-master_1/pwdfile.txt'] INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Creating self-signed CA certificate (master1) -- nickname CAcertificate Generating key. This may take a few moments... INFO:tickets.ticket47536_test:##### Creating Server certificate -- nickname Server-Cert1: ['certutil', '-S', '-n', 'Server-Cert1', '-s', 'CN=localhost.localdomain,OU=389 Directory Server', '-c', 'CAcertificate', '-t', ',,', '-m', '1001', '-v', '120', '-d', '/etc/dirsrv/slapd-master_1', '-z', '/etc/dirsrv/slapd-master_1/noise.txt', '-f', '/etc/dirsrv/slapd-master_1/pwdfile.txt'] INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Creating Server certificate -- nickname Server-Cert2: ['certutil', '-S', '-n', 'Server-Cert2', '-s', 'CN=localhost.localdomain,OU=390 Directory Server', '-c', 'CAcertificate', '-t', ',,', '-m', '1002', '-v', '120', '-d', '/etc/dirsrv/slapd-master_1', '-z', '/etc/dirsrv/slapd-master_1/noise.txt', '-f', '/etc/dirsrv/slapd-master_1/pwdfile.txt'] INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### start master1 INFO:tickets.ticket47536_test:##### enable SSL in master1 with all ciphers INFO:tickets.ticket47536_test: ######################### Enabling SSL LDAPSPORT 41636 ###################### INFO:tickets.ticket47536_test:##### Check the cert db: ['certutil', '-L', '-d', '/etc/dirsrv/slapd-master_1'] INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: INFO:tickets.ticket47536_test: Certifi cate Nickname Trust Attributes INFO:tickets.ticket47536_test: SSL,S/MIME,JAR/XPI INFO:tickets.ticket47536_test: INFO:tickets.ticket47536_test: CAcertificate CTu,u,u INFO:tickets.ticket47536_test: Server-Cert2 u,u,u INFO:tickets.ticket47536_test: Server-Cert1 u,u,u INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### restart master1 INFO:tickets.ticket47536_test:##### Check PEM files of master1 (before setting nsslapd-extract-pemfiles INFO:tickets.ticket47536_test: ######################### Check PEM files (CAcertificate, Server-Cert1, Server-Cert1-Key) not in /etc/dirsrv/slapd-master_1 ###################### INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/CAcertificate.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1-Key.pem is correctly not generated. INFO:tickets.ticket47536_test:##### Set on to nsslapd-extract-pemfiles INFO:tickets.ticket47536_test:##### restart master1 INFO:tickets.ticket47536_test:##### Check PEM files of master1 (after setting nsslapd-extract-pemfiles INFO:tickets.ticket47536_test: ######################### Check PEM files (CAcertificate, Server-Cert1, Server-Cert1-Key) in /etc/dirsrv/slapd-master_1 ###################### INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/CAcertificate.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_1/Server-Cert1-Key.pem is successfully generated. INFO:tickets.ticket47536_test:##### Extract PK12 file for master2: pk12util -o /tmp/Server-Cert2.pk12 -n "Server-Cert2" -d /etc/dirsrv/slapd-master_1 -w /etc/dirsrv/slapd-master_1/pwdfile.txt -k /etc/dirsrv/slapd-master_1/pwdfile.txt INFO:tickets.ticket47536_test:##### Check PK12 files INFO:tickets.ticket47536_test:/tmp/Server-Cert2.pk12 is successfully extracted. INFO:tickets.ticket47536_te st:##### stop master2 INFO:tickets.ticket47536_test:##### Initialize Cert DB for master2 INFO:tickets.ticket47536_test:##### Create key3.db and cert8.db database (master2): ['certutil', '-N', '-d', '/etc/dirsrv/slapd-master_2', '-f', '/etc/dirsrv/slapd-master_1/pwdfile.txt'] INFO:tickets.ticket47536_test: OUT: INFO:tickets.ticket47536_test: ERR: INFO:tickets.ticket47536_test:##### Import certs to master2 INFO:tickets.ticket47536_test:Importing CAcertificate INFO:tickets.ticket47536_test:##### Importing Server-Cert2 to master2: pk12util -i /tmp/Server-Cert2.pk12 -n "Server-Cert2" -d /etc/dirsrv/slapd-master_2 -w /etc/dirsrv/slapd-master_1/pwdfile.txt -k /etc/dirsrv/slapd-master_1/pwdfile.txt INFO:tickets.ticket47536_test:copy /etc/dirsrv/slapd-master_1/pin.txt to /etc/dirsrv/slapd-master_2/pin.txt INFO:tickets.ticket47536_test:##### start master2 INFO:tickets.ticket47536_test:##### enable SSL in master2 with all ciphers INFO:tickets.ticket47536_test: ######################### Enabling SSL LDAPSPORT 42636 ###################### INFO:tickets.ticket47536_test:##### restart master2 INFO:tickets.ticket47536_test:##### Check PEM files of master2 (before setting nsslapd-extract-pemfiles INFO:tickets.ticket47536_test: ######################### Check PEM files (CAcertificate, Server-Cert2, Server-Cert2-Key) not in /etc/dirsrv/slapd-master_2 ###################### INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/CAcertificate.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2.pem is correctly not generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2-Key.pem is correctly not generated. INFO:tickets.ticket47536_test:##### Set on to nsslapd-extract-pemfiles INFO:tickets.ticket47536_test:##### restart master2 INFO:tickets.ticket47536_test:##### Check PEM files of master2 (after setting nsslapd-extract-pemfiles INFO:tickets.ticket47536_test: ######################### Check PEM files (CAcertificate, Server-Cert2, Server-Cert2-Key) in /etc/dirsrv /slapd-master_2 ###################### INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/CAcertificate.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2.pem is successfully generated. INFO:tickets.ticket47536_test:/etc/dirsrv/slapd-master_2/Server-Cert2-Key.pem is successfully generated. INFO:tickets.ticket47536_test:##### restart master1 INFO:tickets.ticket47536_test: ######################### Creating SSL Keys and Certs Done ###################### INFO:tickets.ticket47536_test:######################### Configure SSL/TLS agreements ###################### INFO:tickets.ticket47536_test:######################## master1 -- startTLS -> master2 ##################### INFO:tickets.ticket47536_test:##################### master1 <- tls_clientAuth -- master2 ################## INFO:tickets.ticket47536_test:##### Update the agreement of master1 INFO:tickets.ticket47536_test:##### Add the cert to the repl manager on master1 INFO:tickets.ticket47536_test:##### master2 Server Cert in base64 format: MIICyjCCAbKgAwIBAgICA+owDQYJKoZIhvcNAQELBQAwETEPMA0GA1UEAxMGQ0FjZXJ0MB4XDTE2MTAyOTIyNDAzM1oXDTI2MTAyOTIyNDAzM1owPzEdMBsGA1UECxMUMzkwIERpcmVjdG9yeSBTZXJ2ZXIxHjAcBgNVBAMTFWxvY2FsaG9zdC5sb2NhbGRvbWFpbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMrZm+30t6wqcSdSUkFUQp7QOzPA4ppptNN6+5VbpOhV6F5b+Sd6crL3LmrZBvjzR5CN0dFzXHB9tUeGTVHcMzgIVEmFwS5O/K9LBlrO2DpZPI0qO6RAc3PxLn++PMRIUBiRAYis796mvW0bmVuYvvX3gtXUdhuEZbszEdIpOWQXuwcNX9k68fKY37jER2LIdcVQNQgm0R2S0J1Y9YTScKeF0A+Wn3CI0Ce5Gtp9xnJC35+KtqUI8KoYaSYHjUw+1hYglm4Nf36HdebmYHgTe40KNsOsGRQDikwFW7ijEsXFZYy0P0PJUyb8zWAyCBrmIttoFT8kdfwKwBmF9Z7/kM0CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEA05bqFI/yOGnwYATWPNDTnC176ZMM3OcywI1DNR99TRirQjcRTWtE4VPaJsbVoydz3h1yOXFuM15vXGTXDPSGPZFypmZUXmyLpRG9QWr5UtVGGzl/T8EJMtEs0XU62JNC4XphFXG/yPM3eUfK9h4MeG1dYtYRncrVFLZ1KGROz4QnLdD4UZ6L1yCULF9Cm7L6rNSW5LlxVbY5vtgrQCfVadLarY8N+LhnRUaG7mxAUesBGsXLm/ojghWi/Ch0b4nC92fkwa5jVHzeYjPy8wIBs/bfkBbO6IpaFSWZ5m3oWlLkmxSuPDyp8MAjRbDGS9T19Ac4dyzTkYl4nWJfx9ESWA== INFO:tickets.ticket47536_test :##### Replication manager on master1: cn=replrepl,cn=config INFO:tickets.ticket47536_test: ObjectClass: INFO:tickets.ticket47536_test: : top INFO:tickets.ticket47536_test: : person INFO:tickets.ticket47536_test:##### Modify the certmap.conf on master1 INFO:tickets.ticket47536_test:##### Update the agreement of master2 INFO:tickets.ticket47536_test: ######################### Configure SSL/TLS agreements Done ###################### INFO:tickets.ticket47536_test: ######################### Adding 5 entries to master1 ###################### INFO:tickets.ticket47536_test: ######################### Adding 5 entries to master2 ###################### INFO:tickets.ticket47536_test:##### Searching for entries on master1... INFO:tickets.ticket47536_test:##### Searching for entries on master2... ____________________________ test_ticket47619_init _____________________________ topology = Master[localhost.localdomain:38941] -> Consumer[localhost.localdomain:38961 def test_ticket47619_init(topology): """ Initialize the test environment """ topology.master.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) #topology.master.plugins.enable(name=PLUGIN_MEMBER_OF) #topology.master.plugins.enable(name=PLUGIN_REFER_INTEGRITY) topology.master.stop(timeout=10) topology.master.start(timeout=10) topology.master.log.info("test_ticket47619_init topology %r" % (topology)) # the test case will check if a warning message is logged in the # error log of the supplier > topology.master.errorlog_file = open(topology.master.errlog, "r") E IOError: [Errno 2] No such file or directory: '/var/log/dirsrv/slapd-master_1/error' tickets/ticket47619_test.py:141: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ('Update succeeded: status ', '0 Total update succeeded') ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replr epl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}TQExc/+oPZQzlnPgZjnRRqJNK14VgdNTQC8MZO5UHpjE4vXZij6XX6GxrWJLsii6eIBbDZWxWvQnWDAN33dCkBH1WqG5EI2q INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}NjuZ9tK8PgQ4S7q0rWsY3jYIC9mH5M9a0jKOKJjO3/GY75ZAK7EoLrrEWpg9Fk8+7PpVczdNKYHzbdDguZ4Sekq+67bVVU/r DEBUG:tickets.ticket47619_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created INFO:lib389:Starting total init cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO:tickets.ticket47619_test:Replication is working. ----------------------------- Captured stderr call ----------------------------- INFO:lib389:test_ticket47619_init topology Master[localhost.localdomain:38941] -> Consumer[localhost.localdomain:38961 ____________________________ test_ticket47669_init _____________________________ topology = <tickets.ticket47669_test.TopologyStandalone object at 0x7fd41528f7d0> def test_ticket47669_init(topology): """ Add cn=changelog5,cn=config Enable cn=Retro Changelog Plugin,cn=plugins,cn=config """ log.info('Testing Ticket 47669 - Test duration syntax in the changelogs') # bind as directory manager topology.standalone.log.info("Bind as %s" % DN_DM) topology.standalone.simple_bind_s(DN_DM, PASSWORD) try: changelogdir = "%s/changelog" % topology.standalone.dbdir topology.standalone.add_s(Entry((CHANGELOG, {'objectclass': 'top extensibleObject'.split(), 'nsslapd-changelogdir': changelogdir}))) except ldap.LDAPError as e: log.error('Failed to add ' + CHANGELOG + ': error ' + e.message['desc']) assert False try: topology.standalone.modify_s(RETROCHANGELOG, [(ldap.MOD_REPLACE, 'nsslapd-pluginEnabled', 'on')]) except ldap.LDAPError as e: log.error('Failed to enable ' + RETROCHANGELOG + ': error ' + e.m essage['desc']) assert False # restart the server > topology.standalone.restart(timeout=10) tickets/ticket47669_test.py:103: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:1215: in restart self.start(timeout) ../../../lib389/lib389/__init__.py:1096: in start "dirsrv@%s" % self.serverid]) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ popenargs = (['/usr/bin/systemctl', 'start', 'dirsrv@standalone'],), kwargs = {} retcode = 1, cmd = ['/usr/bin/systemctl', 'start', 'dirsrv@standalone'] def check_call(*popenargs, **kwargs): """Run command with arguments. Wait for command to complete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: check_call(["ls", "-l"]) """ retcode = call(*popenargs, **kwargs) if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] > raise CalledProcessError(retcode, cmd) E CalledProcessError: Command '['/usr/bin/systemctl', 'start', 'dirsrv@standalone']' returned non-zero exit status 1 /usr/lib64/python2.7/subprocess.py:541: CalledProcessError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:Testing Ticket 47669 - Test duration syntax in the changelogs INFO:lib389:Bind as cn=Directory Manager Job for dirsrv@standalone.service failed because the control process exited with error code. See "systemctl status dirsrv@standalone.service" and "journalctl -xe" for details. ______________________ test_ticket47669_changelog_maxage _______________________ topology = <tickets.ticket47669_test.TopologyStandalone object at 0x7fd41528f7d0> def test_ticket47669_changelog_maxage(topology): """ Test nsslapd-changelogmaxage in cn=changelog5, cn=config """ log.info('1. Test nsslapd-changelogmaxage in cn=changelog5,cn=config') # bind as directory manager topology.standalone.log.info("Bind as %s" % DN_DM) > topology.standalone.simple_bind_s(DN_DM, PASSWORD) tickets/ticket47669_test.py:159: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:223: in simple_bind_s resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all=1,timeout=self.timeout) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd414fe4200> func = <built-in method result4 of LDAP object at 0x7fd415cf1a08> args = (13, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:1 06: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:1. Test nsslapd-changelogmaxage in cn=changelog5,cn=config INFO:lib389:Bind as cn=Directory Manager ___________________ test_ticket47669_changelog_triminterval ____________________ topology = <tickets.ticket47669_test.TopologyStandalone object at 0x7fd41528f7d0> def test_ticket47669_changelog_triminterval(topology): """ Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config """ log.info('2. Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config') # bind as directory manager topology.standalone.log.info("Bind as %s" % DN_DM) > topology.standalone.simple_bind_s(DN_DM, PASSWORD) tickets/ticket47669_test.py:179: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:222: in simple_bind_s msgid = self.simple_bind(who,cred,serverctrls,clientctrls) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:216: in simple_bind return self._ldap_call(self._l.simple_bind,who,cred,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls)) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd414fe4200> func = <built-in method simple_bind of LDAP object at 0x7fd415cf1a08> args = ('cn=Directory Manager', 'password', None, None), kwargs = {} diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class __.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:2. Test nsslapd-changelogtrim-interval in cn=changelog5,cn=config INFO:lib389:Bind as cn=Directory Manager _________________ test_ticket47669_changelog_compactdbinterval _________________ topology = <tickets.ticket47669_test.TopologyStandalone object at 0x7fd41528f7d0> def test_ticket47669_changelog_compactdbinterval(topology): """ Test nsslapd-changelogcompactdb-interval in cn=changelog5,cn=config """ log.info('3. Test nsslapd-changelogcompactdb-interval in cn=changelog5,cn=config') # bind as directory manager topology.standalone.log.info("Bind as %s" % DN_DM) > topology.standalone.simple_bind_s(DN_DM, PASSWORD) tickets/ticket47669_test.py:199: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:222: in simple_bind_s msgid = self.simple_bind(who,cred,serverctrls,clientctrls) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:216: in simple_bind return self._ldap_call(self._l.simple_bind,who,cred,RequestControlTuples(serverctrls),RequestControlTuples(clientctrls)) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd414fe4200> func = <built-in method simple_bind of LDAP object at 0x7fd415cf1a08> args = ('cn=Directory Manager', 'password', None, None), kwargs = {} diagnostic_messag e_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:3. Test nsslapd-changelogcompactdb-interval in cn=changelog5,cn=config INFO:lib389:Bind as cn=Directory Manager ____________________ test_ticket47669_retrochangelog_maxage ____________________ topology = <tickets.ticket47669_test.TopologyStandalone object at 0x7fd41528f7d0> def test_ticket47669_retrochangelog_maxage(topology): """ Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config """ log.info('4. Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config') # bind as directory manager topology.standalone.log.info("Bind as %s" % DN_DM) > topology.standalone.simple_bind_s(DN_DM, PASSWORD) tickets/ticket47669_test.py:219: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:222: in simple_bind_s msgid = self.simple_bind(who,cred,serverctrls,clientctrls) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:216: in simple_bind return self._ldap_call(self._l.simple_bind,who,cred,RequestControlTuples(serverctrls),Request ControlTuples(clientctrls)) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd414fe4200> func = <built-in method simple_bind of LDAP object at 0x7fd415cf1a08> args = ('cn=Directory Manager', 'password', None, None), kwargs = {} diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket47669_test:4. Test nsslapd-changelogmaxage in cn=Retro Changelog Plugin,cn=plugins,cn=config INFO:lib389:Bind as cn=Directory Manager ____________________________ test_ticket47823_init _____________________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticket47823_init(topology): """ """ # Enabled the plugins topology.standalone.plugins.enable(name=PLUGIN_ATTR_UNIQUENESS) topology.standalone.restart(timeout=120) topology.standalone.add_s(Entry((PROVISIONING_DN, {'objectclass': "top nscontainer".split(), 'cn': PROVISIONING_CN}))) topology.standalone.add_s(Entry((ACTIVE_DN, {'objectclass': "top nscontainer".split(), 'cn': ACTIVE_CN}))) topology.standalone.add_s(Entry((STAGE_DN, {'objectclass': "top nscontainer".split(), 'cn': STAGE_CN}))) topology.standalone.add_s(Ent ry((DELETE_DN, {'objectclass': "top nscontainer".split(), 'cn': DELETE_CN}))) > topology.standalone.errorlog_file = open(topology.standalone.errlog, "r") E IOError: [Errno 2] No such file or directory: '/var/log/dirsrv/slapd-standalone/error' tickets/ticket47823_test.py:477: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ______________________ test_ticket47823_invalid_config_1 _______________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticket47823_invalid_config_1(topology): ''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg0 is missing ''' _header(topology, "Invalid config (old): arg0 is missing") _config_file(topology, action='save') # create an invalid config without arg0 config = _build_config(topology, attr_name='cn', subtree_1=ACTIVE_DN, subtree_2=None, type_config='old', across_subtrees=False) del config.data['nsslapd-pluginarg0'] # replace 'cn' uniqueness entry try: topology.standalone.delete_s(config.dn) except ldap.NO_SUCH_OBJECT: pass topology.standalone.add_s(config) topology.standalone.getEntry(config.dn, ldap.SCOPE_BASE, "(objectclass=nsSlapdPlugin)", ALL_CONFIG_ATTRS) # Check the server did not restart topology.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-errorlog-level', '65536')]) try: > topology.standalone.restart(timeout=5) tickets/ticket47823_test.py:636: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/__init__.py:1215: in restart self.start(timeout) ../../../lib389/lib389/__init__.py:1096: in start "dirsrv@%s" % self.serverid]) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ popenargs = (['/usr/bin/systemctl', 'start', 'dirsrv@standalone'],), kwargs = {} retcode = 1, cmd = ['/usr/bin/systemctl', 'start', 'dirsrv@standalone'] def check_call(*popenargs, **kwargs): """Run command with arguments. Wait for command to c omplete. If the exit code was zero then return, otherwise raise CalledProcessError. The CalledProcessError object will have the return code in the returncode attribute. The arguments are the same as for the Popen constructor. Example: check_call(["ls", "-l"]) """ retcode = call(*popenargs, **kwargs) if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] > raise CalledProcessError(retcode, cmd) E CalledProcessError: Command '['/usr/bin/systemctl', 'start', 'dirsrv@standalone']' returned non-zero exit status 1 /usr/lib64/python2.7/subprocess.py:541: CalledProcessError ----------------------------- Captured stderr call ----------------------------- INFO:lib389: ############################################### INFO:lib389:####### INFO:lib389:####### Invalid config (old): arg0 is missing INFO:lib389:####### INFO:lib389:############################################### Job for dirsrv@standalone.service failed because the control process exited with error code. See "systemctl status dirsrv@standalone.service" and "journalctl -xe" for details. ______________________ test_ticket47823_invalid_config_2 _______________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticket47823_invalid_config_2(topology): ''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg1 is missing ''' _header(topology, "Invalid config (old): arg1 is missing") _config_file(topology, action='save') # create an invalid config without arg0 > config = _build_config(topology, attr_name='cn', subtree_1=ACTIVE_DN, subtree_2=None, type_config='old', across_subtrees=False) tickets/ticket47823_test.py:672: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:124: in _build_config config = _uniqueness_config_entry(topology, attr_name) tickets/ticket47823_test.py:112: in _uniqueness_config_entry 'nsslapd-pluginDescription']) ../../../lib389/lib389/__init__.py:1574: in getEntry restype, obj = self.result(res) .. /../../lib389/lib389/__init__.py:127: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd415907128> func = <built-in method result4 of LDAP object at 0x7fd415de00d0> args = (15, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: ####################### ######################## INFO:lib389:####### INFO:lib389:####### Invalid config (old): arg1 is missing INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_3 _______________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticket47823_invalid_config_3(topology): ''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg0 is missing ''' _header(topology, "Invalid config (old): arg0 is missing but new config attrname exists") _config_file(topology, action='save') # create an invalid config without arg0 > config = _build_config(topology, attr_name='cn', subtree_1=ACTIVE_DN, subtree_2=None, type_config='old', across_subtrees=False) tickets/ticket47823_test.py:723: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:124: in _build_config config = _uniqueness_config_entry(topology, attr_name) tickets/ticket47823_test.py:112: in _uniqueness_config_entry 'nsslapd-pluginDescription']) ../../../lib389/lib389/__init__.py:1573: in getEntry res = self.search(*args, **kwargs) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd415907128> func = <built-in method search_ext of LDAP object at 0x7fd415de00d0> args = ('cn=attribute uniqueness,cn=plugins,cn=config', 0, '(objectclass=nsSlapdPlugin)', ['objectClass', 'cn', 'nsslapd-pluginPath', 'nsslapd-pluginInitfunc', 'nsslapd-pluginType', 'nsslapd-p luginEnabled', ...], 0, None, ...) kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: ############################################### INFO:lib389:####### INFO:lib389:####### Invalid config (old): arg0 is missing but new config attrname exists INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_4 _______________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticket47823_invalid_config_4(topology): ''' Check that an invalid config is detected. No uniqueness enforced Using old config: arg1 is missing ''' _header(topology, "Invalid config (old): arg1 is missing but new config exist") _config_file(topology, action='save') # create an invalid config without arg0 > config = _build_config(topology, attr_name='cn', subtree_1=ACTIVE_DN, subtree_2=None, type_config='old', across_subtrees=False) tickets/ticket47823_test.py:776: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:124: in _build_config config = _uniqueness_config_entry(topology, attr_name) tickets/ticket47823_test.py:112: in _uniqueness_config_entry 'nsslapd-pluginDescription']) ../../ ../lib389/lib389/__init__.py:1573: in getEntry res = self.search(*args, **kwargs) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd415907128> func = <built-in method search_ext of LDAP object at 0x7fd415de00d0> args = ('cn=attribute uniqueness,cn=plugins,cn=config', 0, '(objectclass=nsSlapdPlugin)', ['objectClass', 'cn', 'nsslapd-pluginPath', 'nsslapd-pluginInitfunc', 'nsslapd-pluginType', 'nsslapd-pluginEnabled', ...], 0, None, ...) kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: ############################################### INFO:lib389:####### INFO:lib389:####### Invalid config (old): arg1 is missing but new config exist INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_5 _______________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticket47823_invalid_config_5(topology): ''' Check that an invalid config is detected. No uniqueness enforced Using new config: uniqueness-attribute-name is missing ''' _header(topology, "Invalid config (new): uniqueness-attribute-name is missing") _config_file(topology, action='save') # create an invalid config without arg0 > config = _build_config(topology, attr_name='cn', subtree_1=ACTIVE_DN, subtree_2=None, type_config='new', across_subtrees=False) tickets/ticket47823_test.py:828: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:131: in _build_config config = _uniqueness_config_entry(topology, attr_name) tickets/ticket47823_test.py:112: in _uniqueness_config_entry 'nsslapd-pluginDescription']) ../../../lib389/lib389/__init__.py:1573: in getEntry res = self.search(*args, **kwargs) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd415907128> func = <built-in method search_ext of LDAP object at 0x7fd415de00d0> args = ('cn=attribute uniqueness,cn=plugins,cn=config', 0, '(objectclass=nsSlapdPlugin)', ['objectClass', 'cn', 'nsslapd-pluginPath', 'nsslapd-pluginInitfunc', 'nsslapd-pluginType', 'nsslapd-pluginEnabled', ...], 0, None, ...) kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,** kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: ############################################### INFO:lib389:####### INFO:lib389:####### Invalid config (new): uniqueness-attribute-name is missing INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_6 _______________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticket47823_invalid_config_6(topology): ''' Check that an invalid config is detected. No uniqueness enforced Using new config: uniqueness-subtrees is missing ''' _header(topology, "Invalid config (new): uniqueness-subtrees is missing") _config_file(topology, action='save') # create an invalid config without arg0 > config = _build_config(topology, attr_name='cn', subtree_1=ACTIVE_DN, subtree_2=None, type_config='new', across_subtrees=False) tickets/ticket47823_test.py:879: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:131: in _build_config config = _uniqueness_config_entry(topology, attr_name) tickets/ticket47823_test.py:112: in _uniqueness_config_entry 'nsslapd-pluginDescription']) ../../../lib389/lib389/__init__.py:1573: in getEntry res = self.search(*args, **kwargs) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/pyth on2.7/site-packages/ldap/ldapobject.py:594: in search return self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd415907128> func = <built-in method search_ext of LDAP object at 0x7fd415de00d0> args = ('cn=attribute uniqueness,cn=plugins,cn=config', 0, '(objectclass=nsSlapdPlugin)', ['objectClass', 'cn', 'nsslapd-pluginPath', 'nsslapd-pluginInitfunc', 'nsslapd-pluginType', 'nsslapd-pluginEnabled', ...], 0, None, ...) kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: ############################################### INFO:lib389:####### INFO:lib389:####### Invalid config (new): uniqueness-subtrees is missing INFO:lib389:####### INFO:lib389:############################################### ______________________ test_ticket47823_invalid_config_7 _______________________ topology = <tickets.ticket47823_test.TopologyStandalone object at 0x7fd4151fc490> def test_ticke t47823_invalid_config_7(topology): ''' Check that an invalid config is detected. No uniqueness enforced Using new config: uniqueness-subtrees is missing ''' _header(topology, "Invalid config (new): uniqueness-subtrees are invalid") _config_file(topology, action='save') # create an invalid config without arg0 > config = _build_config(topology, attr_name='cn', subtree_1="this_is dummy DN", subtree_2="an other=dummy DN", type_config='new', across_subtrees=False) tickets/ticket47823_test.py:930: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ tickets/ticket47823_test.py:131: in _build_config config = _uniqueness_config_entry(topology, attr_name) tickets/ticket47823_test.py:112: in _uniqueness_config_entry 'nsslapd-pluginDescription']) ../../../lib389/lib389/__init__.py:1573: in getEntry res = self.search(*args, **kwargs) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:594: in search return self.search_ext(base,scope,filterstr,attrlist,attrsonly,None,None) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:586: in search_ext timeout,sizelimit, ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv instance at 0x7fd415907128> func = <built-in method search_ext of LDAP object at 0x7fd415de00d0> args = ('cn=attribute uniqueness,cn=plugins,cn=config', 0, '(objectclass=nsSlapdPlugin)', ['objectClass', 'cn', 'nsslapd-pluginPath', 'nsslapd-pluginInitfunc', 'nsslapd-pluginType', 'nsslapd-pluginEnabled', ...], 0, None, ...) kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1 : self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stderr call ----------------------------- INFO:lib389: ############################################### INFO:lib389:####### INFO:lib389:####### Invalid config (new): uniqueness-subtrees are invalid INFO:lib389:####### INFO:lib389:############################################### ____________________________ test_ticket47871_init _____________________________ topology = Master[localhost.localdomain:38941] -> Consumer[localhost.localdomain:38961 def test_ticket47871_init(topology): """ Initialize the test environment """ topology.master.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) mod = [(ldap.MOD_REPLACE, 'nsslapd-changelogmaxage', "10s"), # 10 second triming (ldap.MOD_REPLACE, 'nsslapd-changelog-trim-interval', "5s")] topology.master.modify_s("cn=%s,%s" % (PLUGIN_RETRO_CHANGELOG, DN_PLUGIN), mod) #topology.master.plugins.enable(name=PLUGIN_MEMBER_OF) #topology.master.plugins.enable(name=PLUGIN_REFER_INTEGRITY) topology.master.stop(timeout=10) topology.master.start(timeout=10) topology.master.log.info("test_ticket47871_init topology %r" % (topology)) # the test case will check if a warning message is logged in the # error log of the supplier > topology.master.errorlog_file = open(topology.master.errlog, "r") E IOError: [Errno 2] No such file or directory: '/var/log/dirsrv/slapd-master_1/error' tickets/ticket47871_test.py:147: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ('Update succeeded: status ' , '0 Total update succeeded') ---------------------------- Captured stderr setup ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}ExX81be84URksIaqLWqKsoBAFQwBAiHltdNM8jhnaOGwIKzOExXuKuYzHtP+fMBy+ObdxOiJoY2XvJnFAYYfm3QWuNKWs7QN INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}PJRtPwGd3e+KDL3ErsG5KTyo1qq+tvNLdJSKxlWUfmviHbGK8sMyeob38mTsYNhWXReDZpvnchh2WuoJXxwLWXpBzSOz0Htp DEBUG:tickets.ticket47871_test:cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config created INFO:lib389:Starting total init cn=meTo_$host:$port,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO:tickets.ticket47871_test:Replication is working. ----------------------------- Captured stderr call ----------------------------- INFO:lib389:test_ticket47871_init topology Master[localhost.localdomain:38941] -> Consumer[localhost.localdomain:38961 _______________________________ test_ticket48109 _______________________________ topology = <tickets.ticket48109_test.TopologyStandalone object at 0x7fd415228c10> def test_ticket48109(topology): ''' Set SubStr lengths to cn=uid,cn=index,... objectClass: extensibleObject nsIndexType: sub nsSubStrBegin: 2 nsSubStrEnd: 2 ''' log.info('Test case 0') # add substr setting to UID_INDEX try: topology.standalone.modify_s(UID_INDEX, [(ldap.MOD_ADD, 'objectClass', 'extensibleObject'), (ldap.MOD_ADD, 'nsIndexType', 'sub'), (ldap.MOD_ADD, 'nsSubStrBegin', '2'), (ldap.MOD_ADD, 'nsSubStrEnd', '2')]) except ldap.LDAPError as e: log.error('Failed to add substr lengths: error ' + e.message['desc']) assert False # restart the server to apply the indexing topology.standalone.restart(timeout=10) # add a test user UID = 'auser0' USER_DN = 'uid=%s,%s' % (UID, SUFFIX) try: topology.standalone.add_s(Entry((USER_DN, { 'objectclass': 'top person organizationalPerson inetOrgPerson'.split(), 'cn': 'a user0', 'sn': 'user0', 'givenname': 'a', 'mail': UID}))) except ldap.LDAPError as e: log.error('Failed to add ' + USER_DN + ': error ' + e.message['desc']) assert False entries = topology.standalone.search_s(SUFFIX, ldap.SCOPE_SUBTREE, '(uid=a*)') assert len(entries) == 1 # restart the server to check the access log topology.standalone.restart(timeout=10) cmdline = 'egrep %s %s | egrep "uid=a\*"' % (SUFFIX, topology.standalone.accesslog) p = os.popen(cmdline, "r") l0 = p.readline() if l0 == "": log.error('Search with "(uid=a*)" is not logged in ' + topology.standalone.accesslog) > assert False E assert False <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48109_test.py>:121: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48109_test:Test case 0 ERROR:tickets.ticket48109_test:Search with "(uid=a*)" is not logged in /var/log/dirsrv/slapd-standalone/access __________________ test_ticket48270_homeDirectory_indexed_cis __________________ topology = <tickets.ticket48270_test.TopologyStandalone object at 0x7fd415217790> def test_ticket48270_homeDirectory_indexed_cis(topology): log.info("\n\nindex homeDirectory in caseIgnoreIA5Match and caseExactIA5Match") try: ent = topology.standalone.getEntry(HOMEDIRECTORY_INDEX, ldap.SCOPE_BASE) except ldap.NO_SUCH_OBJECT: topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX, { 'objectclass': "top nsIndex".split(), 'cn': HOMEDIRECTORY_CN, 'nsSystemIndex': 'false', 'nsIndexType': 'eq'}))) #log.info("attach debugger") #time.sleep(60) IGNORE_MR_NAME='caseIgnoreIA5Match' EXACT_MR_NAME='caseExactIA5Match' mod = [(ldap.MOD_REPLACE, MATCHINGRULE, (IGNORE_MR_NAME, EXACT_MR_NAME))] topology.standalone.modify_s(HOMEDIRECTORY_INDEX, mod) #topology.standalone.stop(timeout=10) log.info("successfully checked that filter with exact mr , a filter with lowercase eq is failing") #assert topology.standalone.db2index(bename=DEFAULT_BENAME, suffixes=None, attrs=['homeDirectory']) #topology.standalone.start(timeout=10) args = {TASK_WAIT: True} topology.standalone.tasks.reindex(suffix=SUFFIX, attrname='homeDirectory', args=args) log.info("Check indexing succeeded with a specified matching rule") file_path = os.path.join(topology.standalone.prefix, "var/log/dirsrv/slapd-%s/errors" % topology.standalone.serverid) > file_obj = open(file_path, "r") E IOError: [Errno 2] No such file or directory: '/usr/var/log/dirsrv/slapd-standalone/errors' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48270_test.py>:100: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48270_test: index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO:tickets.ticket48270_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_013453 completed successfully INFO:tickets.ticket48270_test:Check indexing succeeded with a specified matching rule _______________________________ test_ticket48383 _______________________________ topology = <tickets.ticket48383_test.TopologyStandalone object at 0x7fd4104d2150> def test_ticket48383(topology): """ This test case will check that we re-alloc buffer sizes on import.c We achieve this by setting the servers dbcachesize to a stupid small value and adding huge objects to ds. Then when we run db2index, either: data stress suites tickets tmp If we are not using the re-alloc code, it will FAIL (Bad) data stress suites tickets tmp If we re-alloc properly, it all works regardless. """ topology.sta ndalone.config.set('nsslapd-maxbersize', '200000000') topology.standalone.restart() # Create some stupid huge objects / attributes in DS. # seeAlso is indexed by default. Lets do that! # This will take a while ... data = [random.choice(string.letters) for x in xrange(10000000)] s = "".join(data) # This was here for an iteration test. i = 1 USER_DN = 'uid=user%s,ou=people,%s' % (i, DEFAULT_SUFFIX) padding = ['%s' % n for n in range(400)] user = Entry((USER_DN, { 'objectclass': 'top posixAccount person extensibleObject'.split(), 'uid': 'user%s' % (i), 'cn': 'user%s' % (i), 'uidNumber': '%s' % (i), 'gidNumber': '%s' % (i), 'homeDirectory': '/home/user%s' % (i), 'description': 'user description', 'sn' : s , 'padding' : padding , })) try: topology.standalone.add_s(user) except ldap.LDAPError as e: log.fatal('test 48383: Failed to user%s: error %s ' % (i, e.message['desc'])) assert False # Set the dbsize really low. try: topology.standalone.modify_s(DEFAULT_BENAME, [(ldap.MOD_REPLACE, 'nsslapd-cachememsize', '1')]) except ldap.LDAPError as e: log.fatal('Failed to change nsslapd-cachememsize ' + e.message['desc']) ## Does ds try and set a minimum possible value for this? ## Yes: [16/Feb/2016:16:39:18 +1000] - WARNING: cache too small, increasing to 500K bytes # Given the formula, by default, this means DS will make the buffsize 400k # So an object with a 1MB attribute should break indexing # stop the server topology.standalone.stop(timeout=30) # Now export and import the DB. It's easier than db2index ... topology.standalone.db2ldif(bename=DEFAULT_BENAME, suffixes=[DEFAULT_SUFFIX], excludeSuffixes=[], encrypt=False, \ repl_data=True, outputfile='%s/ldif/%s.ldif' % (topology.standalone.dbdir,SERVERID_STANDALONE )) result = topology.standalone.ldif2db(DEFAULT_BENAME, None, None, False, '%s/ldif/%s.ldif' % (topology.standalone.dbdir,SERVERID_STANDALONE )) > assert(result) E assert False <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48383_test.py>:123: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stdout call ----------------------------- OK group dirsrv exists OK user dirsrv exists Exported ldif file: /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- CRITICAL:tickets.ticket48383_test:Failed to change nsslapd-cachememsize No such object INFO:lib389:Running script: /usr/sbin/db2ldif -Z standalone -n userRoot -s dc=example,dc=com -a /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif -r [30/Oct/2016:01:46:17.520373120 +0200] - DEBUG - ldbm_back_start - userRoot: entry cache size: 10485760 B; db size: 10321920 B [30/Oct/2016:01:46:17.939192514 +0200] - DEBUG - ldbm_back_start - total cache size: 20971520 B; [30/Oct/2016:01:46:17.983696414 +0200] - DEBUG - ldbm_back_start - Total entry cache size: 20971520 B; dbcache size: 10000000 B; available memory size: 2143031296 B; [30/Oct/2016:01:46:18.006396738 +0200] - NOTICE - dblayer_start - Detected Disorderly Shutdown last time Directory Server was running, recovering database. ldiffile: /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif [30/Oct/2016:01:46:18.936350117 +0200] - ERR - ldbm_back_ldbm2ldif - db2ldif: can't open /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif: 2 (No such file or directory) [30/Oct/2016:01:46:19.436065901 +0200] - INFO - dblayer_pre_close - Waiting for 4 database threads to stop [30/Oct/2016:01:46:19.871891563 +0200] - INFO - dblayer_pre_close - All database threads now stopped ERROR:lib389:ldif2db: Can't find file: /var/lib/dirsrv/slapd-standalone/db/ldif/standalone.ldif ___________________ test_ticket48497_homeDirectory_index_run ___________________ topology = <tickets.ticket48497_test.TopologyStandalone object at 0x7fd41049f650> def test_ticket48497_homeDirectory_index_run(topology): args = {TASK_WAIT: Tru e} topology.standalone.tasks.reindex(suffix=SUFFIX, attrname='homeDirectory', args=args) log.info("Check indexing succeeded with a specified matching rule") file_path = os.path.join(topology.standalone.prefix, "var/log/dirsrv/slapd-%s/errors" % topology.standalone.serverid) > file_obj = open(file_path, "r") E IOError: [Errno 2] No such file or directory: '/usr/var/log/dirsrv/slapd-standalone/errors' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48497_test.py>:139: IOError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014648 completed successfully INFO:tickets.ticket48497_test:Check indexing succeeded with a specified matching rule __________________ test_ticket48745_homeDirectory_indexed_cis __________________ topology = <tickets.ticket48745_test.TopologyStandalone object at 0x7fd414b5d3d0> def test_ticket48745_homeDirectory_indexed_cis(topology): log.info("\n\nindex homeDirectory in caseIgnoreIA5Match and caseExactIA5Match") try: ent = topology.standalone.getEntry(HOMEDIRECTORY_INDEX, ldap.SCOPE_BASE) except ldap.NO_SUCH_OBJECT: topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX, { 'objectclass': "top nsIndex".split(), 'cn': HOMEDIRECTORY_CN, 'nsSystemIndex': 'false', 'nsIndexType': 'eq'}))) #log.info("attach debugger") #time.sleep(60) IGNORE_MR_NAME='caseIgnoreIA5Match' EXACT_MR_NAME='caseExactIA5Match' mod = [(ldap.MOD_REPLACE, MATCHINGRULE, (IGNORE_MR_NAME, EXACT_MR_NAME))] topology.standalone.modify_s(HOMEDIRECTORY_INDEX, mod) #topology.standalone.stop(timeout=10) log.info("successfully checked that filter with exact mr , a filter with lowercase eq is failing") #assert topology.standalone.db2index(bename=DEFAULT_BENAME, suffixes=None, attrs=['homeDirectory']) #topology.standalone.start(timeout=10) args = {TASK_WAIT: True} topology.standalone.tasks.reindex(suffix=SUFFIX, attrname='homeDirectory', args=args) log.info("Check indexing succeeded with a specified matching rule") file_path = os.path.join(topology.standalone.prefix, "var/log/dirsrv/slapd-%s/errors" % topology.standalone.serverid) > file_obj = open(file_path, "r") E IOError: [Errno 2] No such file or directory: '/usr/var/log/dirsrv/slapd-standalone/errors' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48745_test.py>:110: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48745_test: index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO:tickets.ticket48745_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014817 completed successfully INFO:tickets.ticket48745_test:Check indexing succeeded with a specified matching rule __________________ test_ticket48746_homeDirectory_indexed_cis __________________ topology = <tickets.ticket48746_test.TopologyStandalone object at 0x7fd410970790> def test_ticket48746_homeDirectory_indexed_cis(topology): log.info("\n\nindex homeDirectory in caseIgnoreIA5Match and caseExactIA5Match") try: ent = topology.standalone.getEntry(HOMEDIRECTORY_INDEX, ldap.SCOPE_BASE) except ldap.NO_SUCH_OBJECT: topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX, { 'objectclass': "top nsIndex".split(), 'cn': HOMEDIRECTORY_CN, 'nsSystemIndex': 'false', 'nsIndexType': 'eq'}))) #log.info("attach debugger") #time.sleep(60) IGNORE_MR_NAME='caseIgnoreIA5Match' EXACT_MR_NAME='caseExactIA5Match' mod = [(ldap.MOD_REPLACE, MATCHINGRULE, (IGNORE_MR_NAME, EXACT_MR_NAME))] topology.standalone.modify_s(HOMEDIRECTORY_INDEX, mod) #topology.standalone.stop(timeout=10) log.info("successfully checked that filter with exact mr , a filter with lowercase eq is failing") #assert topology.standalone.db2index(bename=DEFAULT_BENAME, suffixes=None, attrs=['homeDirectory']) #to pology.standalone.start(timeout=10) args = {TASK_WAIT: True} topology.standalone.tasks.reindex(suffix=SUFFIX, attrname='homeDirectory', args=args) log.info("Check indexing succeeded with a specified matching rule") file_path = os.path.join(topology.standalone.prefix, "var/log/dirsrv/slapd-%s/errors" % topology.standalone.serverid) > file_obj = open(file_path, "r") E IOError: [Errno 2] No such file or directory: '/usr/var/log/dirsrv/slapd-standalone/errors' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48746_test.py>:108: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48746_test: index homeDirectory in caseIgnoreIA5Match and caseExactIA5Match INFO:tickets.ticket48746_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014853 completed successfully INFO:tickets.ticket48746_test:Check indexing succeeded with a specified matching rule __________________ test_ticket48746_homeDirectory_indexed_ces __________________ topology = <tickets.ticket48746_test.TopologyStandalone object at 0x7fd410970790> def test_ticket48746_homeDirectory_indexed_ces(topology): log.info("\n\nindex homeDirectory in caseExactIA5Match, this would trigger the crash") try: ent = topology.standalone.getEntry(HOMEDIRECTORY_INDEX, ldap.SCOPE_BASE) except ldap.NO_SUCH_OBJECT: topology.standalone.add_s(Entry((HOMEDIRECTORY_INDEX, { 'objectclass': "top nsIndex".split(), 'cn': HOMEDIRECTORY_CN, 'nsSystemIndex': 'false', 'nsIndexType': 'eq'}))) # log.info("attach debugger") # time.sleep(60) EXACT_MR_NAME='caseExactIA5Match' mod = [(ldap.MOD_REPLACE, MATCHINGRULE, (EXACT_MR_NAME))] topology.standalone.modify_s(HOMEDIRECTORY_INDEX, mod) #topology.standalone.stop(timeout=10) log.info("successfully checked that filter with exact mr , a filter with lowercase eq is failing") #assert top ology.standalone.db2index(bename=DEFAULT_BENAME, suffixes=None, attrs=['homeDirectory']) #topology.standalone.start(timeout=10) args = {TASK_WAIT: True} topology.standalone.tasks.reindex(suffix=SUFFIX, attrname='homeDirectory', args=args) log.info("Check indexing succeeded with a specified matching rule") file_path = os.path.join(topology.standalone.prefix, "var/log/dirsrv/slapd-%s/errors" % topology.standalone.serverid) > file_obj = open(file_path, "r") E IOError: [Errno 2] No such file or directory: '/usr/var/log/dirsrv/slapd-standalone/errors' <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48746_test.py>:172: IOError ----------------------------- Captured stderr call ----------------------------- INFO:tickets.ticket48746_test: index homeDirectory in caseExactIA5Match, this would trigger the crash INFO:tickets.ticket48746_test:successfully checked that filter with exact mr , a filter with lowercase eq is failing INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Index task index_homeDirectory_10302016_014855 completed successfully INFO:tickets.ticket48746_test:Check indexing succeeded with a specified matching rule _____________________ test_ticket48906_dblock_ldap_update ______________________ topology = <tickets.ticket48906_test.TopologyStandalone object at 0x7fd4104a3590> def test_ticket48906_dblock_ldap_update(topology): topology.standalone.log.info('###################################') topology.standalone.log.info('###') topology.standalone.log.info('### Check that after ldap update') topology.standalone.log.info('### - monitor contains DEFAULT') topology.standalone.log.info('### - configured contains DBLOCK_LDAP_UPDATE') topology.standalone.log.info('### - After stop dse.ldif contains DBLOCK_LDAP_UPDATE') topology.standalone.log.info('### - After stop guardian contains DEFAULT') topology.standalone.log.info('### In fact guardian should differ from config to recreate the env') topology.standalone.log.info('### Check th at after restart (DBenv recreated)') topology.standalone.log.info('### - monitor contains DBLOCK_LDAP_UPDATE ') topology.standalone.log.info('### - configured contains DBLOCK_LDAP_UPDATE') topology.standalone.log.info('### - dse.ldif contains DBLOCK_LDAP_UPDATE') topology.standalone.log.info('###') topology.standalone.log.info('###################################') topology.standalone.modify_s(ldbm_config, [(ldap.MOD_REPLACE, DBLOCK_ATTR_CONFIG, DBLOCK_LDAP_UPDATE)]) _check_monitored_value(topology, DBLOCK_DEFAULT) _check_configured_value(topology, attr=DBLOCK_ATTR_CONFIG, expected_value=DBLOCK_LDAP_UPDATE, required=True) topology.standalone.stop(timeout=10) _check_dse_ldif_value(topology, attr=DBLOCK_ATTR_CONFIG, expected_value=DBLOCK_LDAP_UPDATE) > _check_guardian_value(topology, attr=DBLOCK_ATTR_GUARDIAN, expected_value=DBLOCK_DEFAULT) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:218: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <tickets.ticket48906_test.TopologyStandalone object at 0x7fd4104a3590> attr = 'locks', expected_value = '10000' def _check_guardian_value(topology, attr=DBLOCK_ATTR_CONFIG, expected_value=None): guardian_file = topology.standalone.dbdir + '/db/guardian' > assert(os.path.exists(guardian_file)) E assert <function exists at 0x7fd425a60050>('/var/lib/dirsrv/slapd-standalone/db/db/guardian') E + where <function exists at 0x7fd425a60050> = <module 'posixpath' from '/usr/lib64/python2.7/posixpath.pyc'>.exists E + where <module 'posixpath' from '/usr/lib64/python2.7/posixpath.pyc'> = os.path <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:164: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:################################### INFO:lib389:### INFO:lib389:### Check that after ldap update INFO:lib389:### - monitor contai ns DEFAULT INFO:lib389:### - configured contains DBLOCK_LDAP_UPDATE INFO:lib389:### - After stop dse.ldif contains DBLOCK_LDAP_UPDATE INFO:lib389:### - After stop guardian contains DEFAULT INFO:lib389:### In fact guardian should differ from config to recreate the env INFO:lib389:### Check that after restart (DBenv recreated) INFO:lib389:### - monitor contains DBLOCK_LDAP_UPDATE INFO:lib389:### - configured contains DBLOCK_LDAP_UPDATE INFO:lib389:### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO:lib389:### INFO:lib389:################################### _____________________ test_ticket48906_dblock_edit_update ______________________ topology = <tickets.ticket48906_test.TopologyStandalone object at 0x7fd4104a3590> def test_ticket48906_dblock_edit_update(topology): topology.standalone.log.info('###################################') topology.standalone.log.info('###') topology.standalone.log.info('### Check that after stop') topology.standalone.log.info('### - dse.ldif contains DBLOCK_LDAP_UPDATE') topology.standalone.log.info('### - guardian contains DBLOCK_LDAP_UPDATE') topology.standalone.log.info('### Check that edit dse+restart') topology.standalone.log.info('### - monitor contains DBLOCK_EDIT_UPDATE') topology.standalone.log.info('### - configured contains DBLOCK_EDIT_UPDATE') topology.standalone.log.info('### Check that after stop') topology.standalone.log.info('### - dse.ldif contains DBLOCK_EDIT_UPDATE') topology.standalone.log.info('### - guardian contains DBLOCK_EDIT_UPDATE') topology.standalone.log.info('###') topology.standalone.log.info('###################################') topology.standalone.stop(timeout=10) _check_dse_ldif_value(topology, attr=DBLOCK_ATTR_CONFIG, expected_value=DBLOCK_LDAP_UPDATE) > _check_guardian_value(topology, attr=DBLOCK_ATTR_GUARDIAN, expected_value=DBLOCK_LDAP_UPDATE) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:243: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <tickets.ticket48906_test.TopologyStandalone object at 0x7fd4104a3590> attr = 'locks', expected_value = '20000' def _check_guardian_value(topology, attr=DBLOCK_ATTR_CONFIG, expected_value=None): guardian_file = topology.standalone.dbdir + '/db/guardian' > assert(os.path.exists(guardian_file)) E assert <function exists at 0x7fd425a60050>('/var/lib/dirsrv/slapd-standalone/db/db/guardian') E + where <function exists at 0x7fd425a60050> = <module 'posixpath' from '/usr/lib64/python2.7/posixpath.pyc'>.exists E + where <module 'posixpath' from '/usr/lib64/python2.7/posixpath.pyc'> = os.path <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:164: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:################################### INFO:lib389:### INFO:lib389:### Check that after stop INFO:lib389:### - dse.ldif contains DBLOCK_LDAP_UPDATE INFO:lib389:### - guardian contains DBLOCK_LDAP_UPDATE INFO:lib389:### Check that edit dse+restart INFO:lib389:### - monitor contains DBLOCK_EDIT_UPDATE INFO:lib389:### - configured contains DBLOCK_EDIT_UPDATE INFO:lib389:### Check that after stop INFO:lib389:### - dse.ldif contains DBLOCK_EDIT_UPDATE INFO:lib389:### - guardian contains DBLOCK_EDIT_UPDATE INFO:lib389:### INFO:lib389:################################### ________________________ test_ticket48906_dblock_robust ________________________ topology = <tickets.ticket48906_test.TopologyStandalone object at 0x7fd4104a3590> def test_ticket48906_dblock_robust(topology): topology.standalone.log.info('###################################') topology.standalone.log.info('###') topology.standalone.log.info('### Check that the following values are rejected') topology.standalone.log.info('### - negative value') topology.standalone.log.info('### - insuffisant value') topology.standalone.log.info('### - invalid value') topology.standalone.log.info('### Check that minimum value is accepted' ) topology.standalone.log.info('###') topology.standalone.log.info('###################################') topology.standalone.start(timeout=10) > _check_monitored_value(topology, DBLOCK_EDIT_UPDATE) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:291: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ topology = <tickets.ticket48906_test.TopologyStandalone object at 0x7fd4104a3590> expected_value = '40000' def _check_monitored_value(topology, expected_value): entries = topology.standalone.search_s(ldbm_monitor, ldap.SCOPE_BASE, '(objectclass=*)') > assert(entries[0].hasValue(DBLOCK_ATTR_MONITOR) and entries[0].getValue(DBLOCK_ATTR_MONITOR) == expected_value) E assert (True and '20000' == '40000' E + where True = <bound method Entry.hasValue of dn: cn=database,cn=monitor,cn=ldbm database,cn...pd-db-txn-region-wait-rate: 0\nobjectClass: top\nobjectClass: extensibleObject\n\n>('nsslapd-db-configured-locks') E + where <bound method Entry.hasValue of dn: cn=database,cn=monitor,cn=ldbm database,cn...pd-db-txn-region-wait-rate: 0\nobjectClass: top\nobjectClass: extensibleObject\n\n> = dn: cn=database,cn=monitor,cn=ldbm database,cn=plugins,cn=config\ncn: database\n...apd-db-txn-region-wait-rate: 0\nobjectClass: top\nobjectClass: extensibleObject\n\n.hasValue E - 20000 E ? ^ E + 40000 E ? ^) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/tickets/ticket48906_test.py>:144: AssertionError ----------------------------- Captured stderr call ----------------------------- INFO:lib389:################################### INFO:lib389:### INFO:lib389:### Check that the following values are rejected INFO:lib389:### - negative value INFO:lib389:### - insuffisant value INFO:lib389:### - invalid value INFO:lib389:### Check that minimum value is accepted INFO:lib389:### INFO:lib389:################################### INFO:lib389:open(): Connecting to uri ldap://localh ost.localdomain:38931/ INFO:lib389:open(): bound as cn=Directory Manager _____________________________ test_dynamic_plugins _____________________________ topology = <test_dynamic_plugins.TopologyStandalone object at 0x7fd414a16290> def test_dynamic_plugins(topology): """ Test Dynamic Plugins - exercise each plugin and its main features, while changing the configuration without restarting the server. Need to test: functionality, stability, and stress. These tests need to run with replication disabled, and with replication setup with a second instance. Then test if replication is working, and we have same entries on each side. Functionality - Make sure that as configuration changes are made they take effect immediately. Cross plugin interaction (e.g. automember/memberOf) needs to tested, as well as plugin tasks. Need to test plugin config validation(dependencies, etc). Memory Corruption - Restart the plugins many times, and in different orders and test functionality, and stability. This will excerise the internal plugin linked lists, dse callbacks, and task handlers. Stress - Put the server under load that will trigger multiple plugins(MO, RI, DNA, etc) Restart various plugins while these operations are going on. Perform this test 5 times(stress_max_run). """ REPLICA_PORT = 33334 RUV_FILTER = '(&(nsuniqueid=ffffffff-ffffffff-ffffffff-ffffffff)(objectclass=nstombstone))' master_maxcsn = 0 replica_maxcsn = 0 msg = ' (no replication)' replication_run = False stress_max_runs = 5 # First enable dynamic plugins try: topology.standalone.modify_s(DN_CONFIG, [(ldap.MOD_REPLACE, 'nsslapd-dynamic-plugins', 'on')]) except ldap.LDAPError as e: ldap.fatal('Failed to enable dynamic plugin!' + e.message['desc']) assert False # Test that critical plugins can be updated even though the change might not be applied try: topology.standalone.modify_s(DN_LDBM, [(ldap.MOD_REPLACE, 'description', 'test')]) except ldap.LDAPError as e: ldap.fatal('Failed to apply change to critical plugin' + e.message['desc']) assert False while 1: # # First run the tests with replication disabled, then rerun them with replication set up # ############################################################################ # Test plugin functionality ############################################################################ log.info('####################################################################') log.info('Testing Dynamic Plugins Functionality' + msg + '...') log.info('####################################################################\n') plugin_tests.test_all_plugins(topology.standalone) log.info('####################################################################') log.info('Successfully Tested Dynamic Plugins Functionality' + msg + '.') log.info('####################################################################\n') ############################################################################ # Test the stability by exercising the internal lists, callabcks, and task handlers ############################################################################ log.info('####################################################################') log.info('Testing Dynamic Plugins for Memory Corruption' + msg + '...') log.info('####################################################################\n') prev_plugin_test = None prev_prev_plugin_test = None for plugin_test in plugin_tests.func_tests: # # Restart the plugin several times (and prev plugins) - work that linked list # plugin_test(topology.standalone, "restart") if prev_prev_plugin_test: prev_prev_plugin_test(topology.standalone, "restart") plugin_test(topology.standalone, "restart") if prev_plugin_test: prev_plugin_test(topology.standalone, "restart") plugin_test(topology.standalone, "restart") # Now run the functional test plugin_test(topology.standalone) # Set the previous tests if prev_plugin_test: prev_prev_plugin_test = prev_plugin_test prev_plugin_test = plugin_test log.info('####################################################################') log.info('Successfully Tested Dynamic Plugins for Memory Corruption' + msg + '.') log.info('####################################################################\n') ############################################################################ # Stress two plugins while restarting it, and while restarting other plugins. # The goal is to not crash, and have the plugins work after stressing them. ############################################################################ log.info('####################################################################') log.info('Stressing Dynamic Plugins' + msg + '...') log.info('####################################################################\n') stress_tests.configureMO(topology.standalone) stress_tests.configureRI(topology.standalone) stress_count = 0 while stress_count < stress_max_runs: log.info('####################################################################') log.info('Running stress test' + msg + '. Run (%d/%d)...' % (stress_count + 1, stress_max_runs)) log.info('####################################################################\n') try: # Launch three new threads to add a bunch of users add_users = stress_tests.AddUsers(topology.standalone, 'employee', True) add_users.start() add_users2 = stress_tests.AddUsers(topology.standalone, 'entry', True) add_users2.start() add_users3 = stress_tests.AddUsers(topology.standalone, 'person', True) add_users3.start() time.sleep(1) # While we are adding users restart the MO plugin and an idle plugin topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) time.sleep(1) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) time.sleep(1) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS) topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS) time.sleep(1) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) time.sleep(2) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) time.sleep(1) topol ogy.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS) topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) time.sleep(1) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) # Wait for the 'adding' threads to complete add_users.join() add_users2.join() add_users3.join() # Now launch three threads to delete the users del_users = stress_tests.DelUsers(topology.standalone, 'employee') del_users.start() del_users2 = stress_tests.DelUsers(topology.standalone, 'entry') del_users2.start() del_users3 = stress_tests.DelUsers(topology.standalone, 'person') del_users3.start() time.sleep(1) # Restart both the MO, RI plugins during these deletes, and an idle plugin topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY) time.sleep(1) topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY) time.sleep(1) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) time.sleep(1) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) time.sleep(1) topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY) topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS) topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS) topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_OF) topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY) time.sleep(2) topology.standalone.plugins.disable(name=PLUGIN_REFER_INTEGRITY) time.sleep(1) topology.standalone.plugins.disable(name=PLUGIN_MEMBER_OF) time.sleep(1) topology.standalone.plugins.enable(name=PLUGIN_MEMBER_ OF) time.sleep(1) topology.standalone.plugins.enable(name=PLUGIN_REFER_INTEGRITY) topology.standalone.plugins.disable(name=PLUGIN_LINKED_ATTRS) topology.standalone.plugins.enable(name=PLUGIN_LINKED_ATTRS) # Wait for the 'deleting' threads to complete del_users.join() del_users2.join() del_users3.join() # Now make sure both the MO and RI plugins still work correctly plugin_tests.func_tests[8](topology.standalone) # RI plugin plugin_tests.func_tests[5](topology.standalone) # MO plugin # Cleanup the stress tests stress_tests.cleanup(topology.standalone) except: log.info('Stress test failed!') repl_fail(replica_inst) stress_count += 1 log.info('####################################################################') log.info('Successfully Stressed Dynamic Plugins' + msg + '. Completed (%d/%d)' % (stress_count, stress_max_runs)) log.info('####################################################################\n') if replication_run: # We're done. break else: # # Enable replication and run everything one more time # log.info('Setting up replication, and rerunning the tests...\n') # Create replica instance replica_inst = DirSrv(verbose=False) args_instance[SER_HOST] = LOCALHOST args_instance[SER_PORT] = REPLICA_PORT args_instance[SER_SERVERID_PROP] = 'replica' args_instance[SER_CREATION_SUFFIX] = DEFAULT_SUFFIX args_replica_inst = args_instance.copy() replica_inst.allocate(args_replica_inst) replica_inst.create() replica_inst.open() try: topology.standalone.replica.enableReplication(suffix=DEFAULT_SUFFIX, role=REPLICAROLE_MASTER, replicaId=1) replica_inst.replica.enableReplication(suffix=DEFAULT_SUFFIX, role=REPLICAROLE_CONSUMER, replicaId=65535) properties = {RA_NAME: r'to_replica', RA_BINDDN: defaultProperties[REPLICATION_BIND_DN], RA_BINDPW: defaultProperties[REPLICATION_BIND_PW], RA_METHOD: defaultProperties[REPLICATION_BIND_METHOD], RA_TRANSPORT_PROT: defaultProperties[REPLICATION_TRANSPORT]} repl_agreement = topology.standalone.agreement.create(suffix=DEFAULT_SUFFIX, host=LOCALHOST, port=REPLICA_PORT, properties=proper ties) if not repl_agreement: log.fatal("Fail to create a replica agreement") repl_fail(replica_inst) topology.standalone.agreement.init(DEFAULT_SUFFIX, LOCALHOST, REPLICA_PORT) topology.standalone.waitForReplInit(repl_agreement) except: log.info('Failed to setup replication!') > repl_fail(replica_inst) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/dynamic-plugins/test_dynamic_plugins.py>:347: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ replica = <lib389.DirSrv instance at 0x7fd410a04128> def repl_fail(replica): # remove replica instance, and assert failure replica.delete() > assert False E assert False <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/dynamic-plugins/test_dynamic_plugins.py>:40: AssertionError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stdout call ----------------------------- OK group dirsrv exists OK user dirsrv exists Instance slapd-passthru removed. OK group dirsrv exists OK user dirsrv exists Instance slapd-passthru removed. OK group dirsrv exists OK user dirsrv exists Instance slapd-replica removed. ----------------------------- Captured stderr call ----------------------------- INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Testing Dynamic Plugins Functionality (no replication)... INFO:test_dynamic_plugins:#################################################################### INFO:plugin_tests:Testing Account Policy Plugin... INFO:plugin_tests:test_acctpolicy: PASS INFO:plugin_tests:Testing attribute uniqueness... INFO:plugin_tests:test_attruniq: PASS INFO:plugin_tests:Testing Auto Membership Plugin... INFO:plugin_tests:test_automember: PASS INFO:plugin_tests:Testing Distributed Numeric Assignment Plugin... INFO:plugin_tests:test_dna: PAS S INFO:plugin_tests:Testing Linked Attributes... INFO:plugin_tests:test_linkedattrs: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:plugin_tests:Testing Managed Entries... INFO:plugin_tests:test_mep: PASS INFO:plugin_tests:Testing Pass Through Authentication... INFO:lib389:List backend with suffix=dc=pass2,dc=thru INFO:lib389:Creating a local backend INFO:lib389:List backend cn=PASS2,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=PASS2,cn=ldbm database,cn=plugins,cn=config cn: PASS2 nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-passthru/db/PASS2 nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: dc=pass2,dc=thru objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: cn="dc=pass2,dc=thru",cn=mapping tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: cn=dc\3Dpass2\2Cdc\3Dthru,cn=mapping tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:plugin_tests:test_passthru: PASS INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing Retro Changelog Plugin... INFO:plugin_tests:test_retrocl: PASS INFO:plugin_tests:Testing RootDN Access Control... INFO:plugin_tests:test_rootdn: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Tested Dynamic Plugins Functionality (no replication). INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Testing Dynamic Plugins for Memor y Corruption (no replication)... INFO:test_dynamic_plugins:#################################################################### INFO:plugin_tests:Testing Account Policy Plugin... INFO:plugin_tests:test_acctpolicy: PASS INFO:plugin_tests:Testing attribute uniqueness... INFO:plugin_tests:test_attruniq: PASS INFO:plugin_tests:Testing Auto Membership Plugin... INFO:plugin_tests:test_automember: PASS INFO:plugin_tests:Testing Distributed Numeric Assignment Plugin... INFO:plugin_tests:test_dna: PASS INFO:plugin_tests:Testing Linked Attributes... INFO:plugin_tests:test_linkedattrs: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:plugin_tests:Testing Managed Entries... INFO:plugin_tests:test_mep: PASS INFO:plugin_tests:Testing Pass Through Authentication... INFO:lib389:List backend with suffix=dc=pass2,dc=thru INFO:lib389:Creating a local backend INFO:lib389:List backend cn=PASS2,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=PASS2,cn=ldbm database,cn=plugins,cn=config cn: PASS2 nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-passthru/db/PASS2 nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: dc=pass2,dc=thru objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: cn="dc=pass2,dc=thru",cn=mapping tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: cn=dc\3Dpass2\2Cdc\3Dthru,cn=mapping tree,cn=config cn: dc=pass2,dc=thru nsslapd-backend: PASS2 nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:plugin_tests:test_passthru: PASS INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing Retro Changelog Plugin... INFO:plugin_tests:test_retrocl: PASS INFO:plugin_tests:Testing Ro otDN Access Control... INFO:plugin_tests:test_rootdn: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Tested Dynamic Plugins for Memory Corruption (no replication). INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Stressing Dynamic Plugins (no replication)... INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test (no replication). Run (1/5)... INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries (employee)... INFO:stress_tests:AddUsers - Adding 250 entries (entry)... INFO:stress_tests:AddUsers - Adding 250 entries (person)... INFO:stress_tests:AddUsers - Finished adding 250 entries (person). INFO:stress_tests:AddUsers - Finished adding 250 entries (employee). INFO:stress_tests:AddUsers - Finished adding 250 entries (entry). INFO:stress_tests:DelUsers - Deleting 250 entries (employee)... INFO:stress_tests:DelUsers - Deleting 250 entries (entry)... INFO:stress_tests:DelUsers - Deleting 250 entries (person)... INFO:stress_tests:DelUsers - Finished deleting 250 entries (employee). INFO:stress_tests:DelUsers - Finished deleting 250 entries (entry). INFO:stress_tests:DelUsers - Finished deleting 250 entries (person). INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins (no replication). Completed (1/5) INFO:test_d ynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test (no replication). Run (2/5)... INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries (employee)... INFO:stress_tests:AddUsers - Adding 250 entries (person)... INFO:stress_tests:AddUsers - Adding 250 entries (entry)... INFO:stress_tests:AddUsers - Finished adding 250 entries (person). INFO:stress_tests:AddUsers - Finished adding 250 entries (entry). INFO:stress_tests:AddUsers - Finished adding 250 entries (employee). INFO:stress_tests:DelUsers - Deleting 250 entries (employee)... INFO:stress_tests:DelUsers - Deleting 250 entries (entry)... INFO:stress_tests:DelUsers - Deleting 250 entries (person)... INFO:stress_tests:DelUsers - Finished deleting 250 entries (person). INFO:stress_tests:DelUsers - Finished deleting 250 entries (employee). INFO:stress_tests:DelUsers - Finished deleting 250 entries (entry). INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins (no replication). Completed (2/5) INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test (no replication). Run (3/5)... INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries (employee)... INFO:stress_tests:AddUsers - Adding 250 entries (entry)... INFO:stress_tests:AddUsers - Adding 250 entries (person). .. INFO:stress_tests:AddUsers - Finished adding 250 entries (employee). INFO:stress_tests:AddUsers - Finished adding 250 entries (entry). INFO:stress_tests:AddUsers - Finished adding 250 entries (person). INFO:stress_tests:DelUsers - Deleting 250 entries (person)... INFO:stress_tests:DelUsers - Deleting 250 entries (entry)... INFO:stress_tests:DelUsers - Deleting 250 entries (employee)... INFO:stress_tests:DelUsers - Finished deleting 250 entries (entry). INFO:stress_tests:DelUsers - Finished deleting 250 entries (person). INFO:stress_tests:DelUsers - Finished deleting 250 entries (employee). INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins (no replication). Completed (3/5) INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test (no replication). Run (4/5)... INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries (entry)... INFO:stress_tests:AddUsers - Adding 250 entries (person)... INFO:stress_tests:AddUsers - Adding 250 entries (employee)... INFO:stress_tests:AddUsers - Finished adding 250 entries (employee). INFO:stress_tests:AddUsers - Finished adding 250 entries (entry). INFO:stress_tests:AddUsers - Finished adding 250 entries (person). INFO:stress_tests:DelUsers - Deleting 250 entries (employee)... INFO:stress_tests:DelUsers - Deleting 250 entries (entry)... INFO:stress_tests:DelUsers - Deleting 250 entries (person)... INFO:stress_tests:DelUsers - Finished deleting 250 entries (employee). INFO:stress_tests:DelUsers - Finished deleting 250 entries (person ). INFO:stress_tests:DelUsers - Finished deleting 250 entries (entry). INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins (no replication). Completed (4/5) INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Running stress test (no replication). Run (5/5)... INFO:test_dynamic_plugins:#################################################################### INFO:stress_tests:AddUsers - Adding 250 entries (employee)... INFO:stress_tests:AddUsers - Adding 250 entries (person)... INFO:stress_tests:AddUsers - Adding 250 entries (entry)... INFO:stress_tests:AddUsers - Finished adding 250 entries (person). INFO:stress_tests:AddUsers - Finished adding 250 entries (employee). INFO:stress_tests:AddUsers - Finished adding 250 entries (entry). INFO:stress_tests:DelUsers - Deleting 250 entries (employee)... INFO:stress_tests:DelUsers - Deleting 250 entries (entry)... INFO:stress_tests:DelUsers - Deleting 250 entries (person)... INFO:stress_tests:DelUsers - Finished deleting 250 entries (person). INFO:stress_tests:DelUsers - Finished deleting 250 entries (entry). INFO:stress_tests:DelUsers - Finished deleting 250 entries (employee). INFO:plugin_tests:Testing referential integrity postoperation... INFO:plugin_tests:test_referint: PASS INFO:plugin_tests:Testing MemberOf Plugin... INFO:plugin_tests:test_memberof: PASS INFO:test_dynamic_plugins:#################################################################### INFO:test_dynamic_plugins:Successfully Stressed Dynamic Plugins (no replication). Completed (5/5) INFO:test_dynamic_plugins:##################################################### ############### INFO:test_dynamic_plugins:Setting up replication, and rerunning the tests... INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}JV7exSg0ZOVtFUJs1Me7nkHfcAk+Scrghco05426bFodD5+k04fScbd6z455BXTjuqmFvReC2dPFqsj+diUbPKR2ZKghF0pm INFO:lib389:List backend with suffix=dc=example,dc=com INFO:lib389:Found entry dn: cn=replrepl,cn=config cn: bind dn pseudo user cn: replrepl objectClass: top objectClass: person sn: bind dn pseudo user userPassword: {SSHA512}vWC5GEutgGMU45/KDdNDLhz/glIDFixC0LmS/ROY82BQxsgSCPhntMOXf2Apl+yAZGBty+57SzJFCNyij8g0dTpAGpxurYkw INFO:lib389:Starting total init cn=to_replica,cn=replica,cn=dc\3Dexample\2Cdc\3Dcom,cn=mapping tree,cn=config INFO:test_dynamic_plugins:Failed to setup replication! ____________________________ test_range_search_init ____________________________ topology = <suites.memory_leaks.range_search_test.TopologyStandalone object at 0x7fd410939710> def test_range_search_init(topology): ''' Enable retro cl, and valgrind. Since valgrind tests move the ns-slapd binary around it's important to always "valgrind_disable" before "assert False"ing, otherwise we leave the wrong ns-slapd in place if there is a failure ''' log.info('Initializing test_range_search...') topology.standalone.plugins.enable(name=PLUGIN_RETRO_CHANGELOG) # First stop the instance topology.standalone.stop(timeout=30) # Get the sbin directory so we know where to replace 'ns-slapd' sbin_dir = get_sbin_dir(prefix=topology.standalone.prefix) # Enable valgrind if not topology.standalone.has_asan(): > valgrind_enable(sbin_dir) <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/memory_leaks/range_search_test.py>:86: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ sbin_dir = '/usr/sbin' wrapper = '<http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:808 0/job/389-DS-NIGHTLY/ws/source/lib389/lib389/ns-slapd.valgrind'> def valgrind_enable(sbin_dir, wrapper=None): ''' Copy the valgrind ns-slapd wrapper into the /sbin directory (making a backup of the original ns-slapd binary). The script calling valgrind_enable() must be run as the 'root' user as selinux needs to be disabled for valgrind to work The server instance(s) should be stopped prior to calling this function. Then after calling valgrind_enable(): - Start the server instance(s) with a timeout of 60 (valgrind takes a while to startup) - Run the tests - Stop the server - Get the results file - Run valgrind_check_file(result_file, "pattern", "pattern", ...) - Run valgrind_disable() :param sbin_dir: the location of the ns-slapd binary (e.g. /usr/sbin) :param wrapper: The valgrind wrapper script for ns-slapd (if not set, a default wrapper is used) :raise IOError: If there is a problem setting up the valgrind scripts :raise EnvironmentError: If script is not run as 'root' ''' if os.geteuid() != 0: log.error('This script must be run as root to use valgrind') raise EnvironmentError if not wrapper: # use the default ns-slapd wrapper wrapper = '%s/%s' % (os.path.dirname(os.path.abspath(__file__)), VALGRIND_WRAPPER) nsslapd_orig = '%s/ns-slapd' % sbin_dir nsslapd_backup = '%s/ns-slapd.original' % sbin_dir if os.path.isfile(nsslapd_backup): # There is a backup which means we never cleaned up from a previous # run(failed test?) if not filecmp.cmp(nsslapd_backup, nsslapd_orig): # Files are different sizes, we assume valgrind is already setup log.info('Valgrind is already enabled.') return # Check both nsslapd's exist if not os.path.isfile(wrapper): raise IOError('The valgrind wrapper (%s) does not exist. file=%s' % (wrapper, __file__)) if not os.path.isfile(nsslapd_orig): raise IOError('The binary (%s) does not exist or is not accessible.' % nsslapd_orig) # Make a backup of the original ns-slapd and copy the wrapper into place try: shutil.copy2(nsslapd_orig, nsslapd_backup) except IOError as e: log.fatal('valgrind_enable( ): failed to backup ns-slapd, error: %s' % e.strerror) raise IOError('failed to backup ns-slapd, error: %s' % e.strerror) # Copy the valgrind wrapper into place try: shutil.copy2(wrapper, nsslapd_orig) except IOError as e: log.fatal('valgrind_enable(): failed to copy valgrind wrapper ' 'to ns-slapd, error: %s' % e.strerror) raise IOError('failed to copy valgrind wrapper to ns-slapd, error: %s' % > e.strerror) E IOError: failed to copy valgrind wrapper to ns-slapd, error: Text file busy <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/lib389/lib389/utils.py>:255: IOError ---------------------------- Captured stdout setup ----------------------------- OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:suites.memory_leaks.range_search_test:Initializing test_range_search... CRITICAL:lib389.utils:valgrind_enable(): failed to copy valgrind wrapper to ns-slapd, error: Text file busy ___________________________ test_multi_suffix_search ___________________________ topology = <suites.paged_results.paged_results_test.TopologyStandalone object at 0x7fd40bfa4410> test_user = None, new_suffixes = None def test_multi_suffix_search(topology, test_user, new_suffixes): """Verify that page result search returns empty cookie if there is no returned entry. :Feature: Simple paged results :Setup: Standalone instance, test user for binding, two suffixes with backends, one is inserted into another, 10 users for the search base within each suffix :Steps: 1. Bind as test user 2. Search through all 20 added users with a simple paged control using page_size = 4 3. Wait some time logs to be updated 3. Check access log :Assert: All users should be found, the access log should contain the pr_cookie for each page request and it should be equal 0, except the last one should be equal -1 """ search_flt = r'(uid=test*)' searchreq_attrlist = ['dn', 'sn'] page_size = 4 users_num = 20 log.info('Clear the access log') topology.standalone.de leteAccessLogs() users_list_1 = add_users(topology, users_num / 2, NEW_SUFFIX_1) users_list_2 = add_users(topology, users_num / 2, NEW_SUFFIX_2) try: log.info('Set DM bind') topology.standalone.simple_bind_s(DN_DM, PASSWORD) req_ctrl = SimplePagedResultsControl(True, size=page_size, cookie='') all_results = paged_search(topology, NEW_SUFFIX_1, [req_ctrl], search_flt, searchreq_attrlist) log.info('{} results'.format(len(all_results))) assert len(all_results) == users_num log.info('Restart the server to flush the logs') topology.standalone.restart(timeout=10) access_log_lines = topology.standalone.ds_access_log.match('.*pr_cookie=.*') pr_cookie_list = ([line.rsplit('=', 1)[-1] for line in access_log_lines]) pr_cookie_list = [int(pr_cookie) for pr_cookie in pr_cookie_list] log.info('Assert that last pr_cookie == -1 and others pr_cookie == 0') pr_cookie_zeros = list(pr_cookie == 0 for pr_cookie in pr_cookie_list[0:-1]) assert all(pr_cookie_zeros) > assert pr_cookie_list[-1] == -1 E IndexError: list index out of range <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-NIGHTLY/ws/source/ds/dirsrvtests/tests/suites/paged_results/paged_results_test.py>:1198: IndexError ---------------------------- Captured stderr setup ----------------------------- INFO:suites.paged_results.paged_results_test:Adding suffix:o=test_parent and backend: parent_base INFO:lib389:List backend with suffix=o=test_parent INFO:lib389:Creating a local backend INFO:lib389:List backend cn=parent_base,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=parent_base,cn=ldbm database,cn=plugins,cn=config cn: parent_base nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db/parent_base nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: o=test_parent objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: cn="o=test_parent",cn=mapping tree,cn=config cn: o=test_parent nsslapd-backend: parent_base nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: cn=o\3Dtest_parent,cn=mapping tree,cn=config cn: o=test_parent nsslapd-backend: parent_base nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:suites.paged_results.paged_results_test:Adding suffix:ou=child,o=test_parent and backend: child_base INFO:lib389:List backend with suffix=ou=child,o=test_parent INFO:lib389:Creating a local backend INFO:lib389:List backend cn=child_base,cn=ldbm database,cn=plugins,cn=config INFO:lib389:Found entry dn: cn=child_base,cn=ldbm database,cn=plugins,cn=config cn: child_base nsslapd-cachememsize: 10485760 nsslapd-cachesize: -1 nsslapd-directory: /var/lib/dirsrv/slapd-standalone/db/child_base nsslapd-dncachememsize: 10485760 nsslapd-readonly: off nsslapd-require-index: off nsslapd-suffix: ou=child,o=test_parent objectClass: top objectClass: extensibleObject objectClass: nsBackendInstance INFO:lib389:Entry dn: cn="ou=child,o=test_parent",cn=mapping tree,cn=config cn: ou=child,o=test_parent nsslapd-backend: child_base nsslapd-parent-suffix: o=test_parent nsslapd-state: backend objectclass: top objectclass: extensibleObject objectclass: nsMappingTree INFO:lib389:Found entry dn: cn=ou\3Dchild\2Co\3Dtest_parent,cn=mapping tree,cn=config cn: ou=child,o=test_parent nsslapd-backend: child_base nsslapd-parent-suffix: o=test_parent nsslapd-state: backend objectClass: top objectClass: extensibleObject objectClass: nsMappingTree INFO:suites.paged_results.paged_results_test:Adding ACI to allow our test user to search ----------------------------- Captured stderr call ----------------------------- INFO:suites.paged_results.paged_results_test:Clear the access log INFO:suites.paged_results.paged_results_test:Adding 10 users INFO:suites.paged_results.paged_results_test:Adding 10 users INFO:suites.paged_results.paged_results_test:Set DM bind INFO:suites.paged_results.paged_results_test:Running simple paged result search with - search suffix: o=test_parent; filter: (uid=test*); attr list ['dn', 'sn']; page_size = 4; controls: [<ldap.controls.libldap.SimplePagedResultsControl instance at 0x7fd410506200>]. INFO:suites.paged_results.paged_results_test:Getting page 0 INFO:suites.paged_results.paged_results_test:Getting page 1 INFO:suites.paged_results.paged_results_test:Getting page 2 INFO:suites.paged_results.paged_results_test:Getting page 3 INFO:suites.paged_results.paged_results_test:Getting page 4 INFO:suites.paged_results.paged_results_test:Getting page 5 INFO:suites.paged_results.paged_results_test:20 results INFO:suites.paged_results.paged_results_test:Restart the server to flush the logs INFO:suites.paged_results.paged_results_test:Assert that last pr_cookie == -1 and others pr_cookie == 0 INFO:suites.paged_results.paged_results_test:Remove added users INFO:suites.paged_results.paged_results_test:Deleting 10 users INFO:suites.paged_results.paged_results_test:Deleting 10 users ============== 32 failed, 484 passed, 5 error in 9544.66 seconds =============== + MSG=FAILED + RC=1 + sudo /usr/sbin/sendmail mreynolds@xxxxxxxxxx + exit 1 Build step 'Execute shell' marked build as failure _______________________________________________ 389-devel mailing list -- 389-devel@xxxxxxxxxxxxxxxxxxxxxxx To unsubscribe send an email to 389-devel-leave@xxxxxxxxxxxxxxxxxxxxxxx