See <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-COMMIT/162/> ------------------------------------------ [...truncated 4682 lines...] + '[' 0 -ne 0 ']' + sudo sysctl -w fs.suid_dumpable=1 fs.suid_dumpable = 1 + sudo chmod 777 /etc/sysconfig/dirsrv.systemd + sudo echo LimitCORE=infinity + sudo systemctl daemon-reload + echo Run the lib389 tests... Run the lib389 tests... + cd ../../dirsrvtests/tests ++ date + DATE='Mon Feb 6 16:47:32 CET 2017' ++ sudo PYTHONPATH=/usr/lib64/python2.7:/usr/lib64/python2.7/plat-linux2:/usr/lib64/python2.7/site-packages:/usr/lib/python2.7/site-packages:<http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-COMMIT/ws/source/lib389/> py.test -v -x suites/basic/basic_test.py + RESULT='============================= test session starts ============================== platform linux2 -- Python 2.7.13, pytest-2.9.2, py-1.4.32, pluggy-0.3.1 -- /usr/bin/python2 cachedir: suites/basic/.cache rootdir: <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-COMMIT/ws/source/ds/dirsrvtests/tests/suites/basic,> inifile: plugins: sourceorder-0.5, multihost-1.1 collecting ... collected 23 items suites/basic/basic_test.py::test_basic_ops PASSED suites/basic/basic_test.py::test_basic_import_export FAILEDInstance slapd-standalone_1 removed. =================================== FAILURES =================================== ___________________________ test_basic_import_export ___________________________ topology_st = <lib389.topologies.TopologyMain object at 0x7f1e3acaf550> import_example_ldif = None def test_basic_import_export(topology_st, import_example_ldif): """Test online and offline LDIF imports & exports""" log.info('\''Running test_basic_import_export...'\'') tmp_dir = '\''/tmp'\'' # # Test online/offline LDIF imports # # Generate a test ldif (50k entries) ldif_dir = topology_st.standalone.get_ldif_dir() import_ldif = ldif_dir + '\''/basic_import.ldif'\'' try: topology_st.standalone.buildLDIF(50000, import_ldif) except OSError as e: log.fatal('\''test_basic_import_export: failed to create test ldif,\ error: %s - %s'\'' % (e.errno, e.strerror)) assert False # Online try: topology_st.standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX, input_file=import_ldif, args={TASK_WAIT: True}) except ValueError: log.fatal('\''test_basic_import_export: Online import failed'\'') assert False # Offline if not topology_st.standalone.ldif2db(DEFAULT_BENAME, None, None, None, import_ldif): log.fatal('\''test_basic_import_export: Offline import failed'\'') assert False # # Test online and offline LDIF export # # Online export export_ldif = ldif_dir + '\''/export.ldif'\'' exportTask = Tasks(topology_st.standalone) try: args = {TASK_WAIT: True} exportTask.exportLDIF(DEFAULT_SUFFIX, None, export_ldif, args) except ValueError: log.fatal('\''test_basic_import_export: Online export failed'\'') assert False # Offline export if not topology_st.standalone.db2ldif(DEFAULT_BENAME, (DEFAULT_SUFFIX,), None, None, None, export_ldif): log.fatal('\''test_basic_import_export: Failed to run offline db2ldif'\'') assert False # # Cleanup - Import the Example LDIF for the other tests in this suite # ldif = '\''%s/Example.ldif'\'' % get_data_dir(topology_st.standalone.prefix) import_ldif = topology_st.standalone.get_ldif_dir() + "/Example.ldif" shutil.copyfile(ldif, import_ldif) try: topology_st.standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX, input_file=import_ldif, > args={TASK_WAIT: True}) suites/basic/basic_test.py:271: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/tasks.py:105: in importLDIF self.conn.add_s(entry) ../../../lib389/lib389/__init__.py:155: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:210: in add_s return self.result(msgid,all=1,timeout=self.timeout) ../../../lib389/lib389/__init__.py:127: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7f1e3aee3310> func = <built-in method result4 of LDAP object at 0x7f1e3ad6b3a0> args = (23, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'\''desc'\'': "Can'\''t contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('\''*** %s %s - %s\n%s\n'\'' % ( repr(self), self._uri, '\''.'\''.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'\''desc'\'': "Can'\''t contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stdout call ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:dirsrvtests.tests.suites.basic.basic_test:Running test_basic_import_export... INFO:lib389:Import task import_02062017_164942 for file /var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif completed successfully INFO:lib389:Running script: /usr/sbin/ns-slapd ldif2db -D /etc/dirsrv/slapd-standalone_1 -n userRoot -i /var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif [06/Feb/2017:16:50:16.621763715 +0100] - INFO - dblayer_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [06/Feb/2017:16:50:16.886135323 +0100] - INFO - check_and_set_import_cache - pagesize: 4096, pages: 2039149, procpages: 4635 [06/Feb/2017:16:50:16.955269419 +0100] - INFO - check_and_set_import_cache - Import allocates 2923928KB import cache. [06/Feb/2017:16:50:17.264552818 +0100] - INFO - import_main_offline - import userRoot: Beginning import job... [06/Feb/2017:16:50:17.325808202 +0100] - INFO - import_main_offline - import userRoot: Index buffering enabled with bucket size 100 [06/Feb/2017:16:50:18.128876735 +0100] - INFO - import_producer - import userRoot: Processing file "/var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif" [06/Feb/2017:16:50:30.775339620 +0100] - INFO - import_producer - import userRoot: Finished scanning file "/var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif" (50006 entries) [06/Feb/2017:16:50:32.188911971 +0100] - INFO - import_monitor_threads - import userRoot: Workers finished; cleaning up... [06/Feb/2017:16:50:32.709071142 +0100] - INFO - import_monitor_threads - import userRoot: Workers cleaned up. [06/Feb/2017:16:50:32.786803810 +0100] - INFO - import_main_offline - import userRoot: Cleaning up producer thread... [06/Feb/2017:16:50:32.854767387 +0100] - INFO - import_main_offline - import userRoot: Indexing complete. Post-processing... [06/Feb/2017:16:50:32.914331745 +0100] - INFO - import_main_offline - import userRoot: Generating numsubordinates (this may take several minutes to complete)... [06/Feb/2017:16:50:33.045391185 +0100] - INFO - import_main_offline - import userRoot: Generating numSubordinates complete. [06/Feb/2017:16:50:33.110617005 +0100] - INFO - ldbm_get_nonleaf_ids - import userRoot: Gathering ancestorid non-leaf IDs... [06/Feb/2017:16:50:33.174568246 +0100] - INFO - ldbm_get_nonleaf_ids - import userRoot: Finished gathering ancestorid non-leaf IDs. [06/Feb/2017:16:50:33.311934543 +0100] - INFO - ldbm_ancestorid_new_idl_create_index - import userRoot: Creating ancestorid index (new idl)... [06/Feb/2017:16:50:33.616574505 +0100] - INFO - ldbm_ancestorid_new_idl_create_index - import userRoot: Created ancestorid index (new idl). [06/Feb/2017:16:50:33.684557039 +0100] - INFO - import_main_offline - import userRoot: Flushing caches... [06/Feb/2017:16:50:33.741608959 +0100] - INFO - import_main_offline - import userRoot: Closing files... [06/Feb/2017:16:50:42.992568199 +0100] - INFO - dblayer_pre_close - All database threads now stopped [06/Feb/2017:16:50:43.057060737 +0100] - INFO - import_main_offline - import userRoot: Import complete. Processed 50006 entries in 26 seconds. (1923.31 entries/sec) INFO:lib389:Export task export_02062017_165045 for file /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif completed successfully INFO:lib389:Running script: /usr/sbin/ns-slapd db2ldif -D /etc/dirsrv/slapd-standalone_1 -n userRoot -s dc=example,dc=com -a /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif ldiffile: /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif [06/Feb/2017:16:51:09.654731372 +0100] - INFO - export_one_entry - export userRoot: Processed 1000 entries (1%). [06/Feb/2017:16:51:09.846346173 +0100] - INFO - export_one_entry - export userRoot: Processed 2000 entries (3%). [06/Feb/2017:16:51:10.591479004 +0100] - INFO - export_one_entry - export userRoot: Processed 3000 entries (5%). [06/Feb/2017:16:51:10.841175051 +0100] - INFO - export_one_entry - export userRoot: Processed 4000 entries (7%). [06/Feb/2017:16:51:11.067759180 +0100] - INFO - export_one_entry - export userRoot: Processed 5000 entries (9%). [06/Feb/2017:16:51:11.305169153 +0100] - INFO - export_one_entry - export userRoot: Processed 6000 entries (11%). [06/Feb/2017:16:51:11.525668320 +0100] - INFO - export_one_entry - export userRoot: Processed 7000 entries (13%). [06/Feb/2017:16:51:11.756032720 +0100] - INFO - export_one_entry - export userRoot: Processed 8000 entries (15%). [06/Feb/2017:16:51:12.461277100 +0100] - INFO - export_one_entry - export userRoot: Processed 9000 entries (17%). [06/Feb/2017:16:51:13.884864558 +0100] - INFO - export_one_entry - export userRoot: Processed 10000 entries (19%). [06/Feb/2017:16:51:14.123328214 +0100] - INFO - export_one_entry - export userRoot: Processed 11000 entries (21%). [06/Feb/2017:16:51:14.338841408 +0100] - INFO - export_one_entry - export userRoot: Processed 12000 entries (23%). [06/Feb/2017:16:51:15.000739150 +0100] - INFO - export_one_entry - export userRoot: Processed 13000 entries (25%). [06/Feb/2017:16:51:15.226037382 +0100] - INFO - export_one_entry - export userRoot: Processed 14000 entries (27%). [06/Feb/2017:16:51:15.860356722 +0100] - INFO - export_one_entry - export userRoot: Processed 15000 entries (29%). [06/Feb/2017:16:51:16.648809224 +0100] - INFO - export_one_entry - export userRoot: Processed 16000 entries (31%). [06/Feb/2017:16:51:16.891541212 +0100] - INFO - export_one_entry - export userRoot: Processed 17000 entries (33%). [06/Feb/2017:16:51:17.099683982 +0100] - INFO - export_one_entry - export userRoot: Processed 18000 entries (35%). [06/Feb/2017:16:51:17.781286374 +0100] - INFO - export_one_entry - export userRoot: Processed 19000 entries (37%). [06/Feb/2017:16:51:17.993620846 +0100] - INFO - export_one_entry - export userRoot: Processed 20000 entries (39%). [06/Feb/2017:16:51:21.002541592 +0100] - INFO - export_one_entry - export userRoot: Processed 21000 entries (41%). [06/Feb/2017:16:51:21.689119958 +0100] - INFO - export_one_entry - export userRoot: Processed 22000 entries (43%). [06/Feb/2017:16:51:21.948886985 +0100] - INFO - export_one_entry - export userRoot: Processed 23000 entries (45%). [06/Feb/2017:16:51:22.171054429 +0100] - INFO - export_one_entry - export userRoot: Processed 24000 entries (47%). [06/Feb/2017:16:51:22.381002484 +0100] - INFO - export_one_entry - export userRoot: Processed 25000 entries (49%). [06/Feb/2017:16:51:22.963952011 +0100] - INFO - export_one_entry - export userRoot: Processed 26000 entries (51%). [06/Feb/2017:16:51:23.712415842 +0100] - INFO - export_one_entry - export userRoot: Processed 27000 entries (53%). [06/Feb/2017:16:51:23.943837090 +0100] - INFO - export_one_entry - export userRoot: Processed 28000 entries (55%). [06/Feb/2017:16:51:24.231410191 +0100] - INFO - export_one_entry - export userRoot: Processed 29000 entries (57%). [06/Feb/2017:16:51:24.432873430 +0100] - INFO - export_one_entry - export userRoot: Processed 30000 entries (59%). [06/Feb/2017:16:51:24.633771155 +0100] - INFO - export_one_entry - export userRoot: Processed 31000 entries (61%). [06/Feb/2017:16:51:24.840913858 +0100] - INFO - export_one_entry - export userRoot: Processed 32000 entries (63%). [06/Feb/2017:16:51:25.523853170 +0100] - INFO - export_one_entry - export userRoot: Processed 33000 entries (65%). [06/Feb/2017:16:51:25.754798036 +0100] - INFO - export_one_entry - export userRoot: Processed 34000 entries (67%). [06/Feb/2017:16:51:25.969361611 +0100] - INFO - export_one_entry - export userRoot: Processed 35000 entries (69%). [06/Feb/2017:16:51:26.278006410 +0100] - INFO - export_one_entry - export userRoot: Processed 36000 entries (71%). [06/Feb/2017:16:51:26.531160165 +0100] - INFO - export_one_entry - export userRoot: Processed 37000 entries (73%). [06/Feb/2017:16:51:26.765456456 +0100] - INFO - export_one_entry - export userRoot: Processed 38000 entries (75%). [06/Feb/2017:16:51:26.989992874 +0100] - INFO - export_one_entry - export userRoot: Processed 39000 entries (77%). [06/Feb/2017:16:51:27.615325203 +0100] - INFO - export_one_entry - export userRoot: Processed 40000 entries (79%). [06/Feb/2017:16:51:27.849178025 +0100] - INFO - export_one_entry - export userRoot: Processed 41000 entries (81%). [06/Feb/2017:16:51:28.055264452 +0100] - INFO - export_one_entry - export userRoot: Processed 42000 entries (83%). [06/Feb/2017:16:51:28.253373256 +0100] - INFO - export_one_entry - export userRoot: Processed 43000 entries (85%). [06/Feb/2017:16:51:28.453406334 +0100] - INFO - export_one_entry - export userRoot: Processed 44000 entries (87%). [06/Feb/2017:16:51:29.322384380 +0100] - INFO - export_one_entry - export userRoot: Processed 45000 entries (89%). [06/Feb/2017:16:51:30.604566457 +0100] - INFO - export_one_entry - export userRoot: Processed 46000 entries (91%). [06/Feb/2017:16:51:30.808779498 +0100] - INFO - export_one_entry - export userRoot: Processed 47000 entries (93%). [06/Feb/2017:16:51:31.511660500 +0100] - INFO - export_one_entry - export userRoot: Processed 48000 entries (95%). [06/Feb/2017:16:51:31.779959056 +0100] - INFO - export_one_entry - export userRoot: Processed 49000 entries (97%). [06/Feb/2017:16:51:32.016080153 +0100] - INFO - export_one_entry - export userRoot: Processed 50000 entries (99%). [06/Feb/2017:16:51:32.574360353 +0100] - INFO - ldbm_back_ldbm2ldif - export userRoot: Processed 50006 entries (100%). [06/Feb/2017:16:51:32.660686365 +0100] - INFO - dblayer_pre_close - All database threads now stopped !!!!!!!!!!!!!!!!!!!! Interrupted: stopping after 1 failures !!!!!!!!!!!!!!!!!!!! ===================== 1 failed, 1 passed in 246.24 seconds =====================' + '[' 2 -ne 0 ']' + echo CI Tests 'FAILED!' CI Tests FAILED! + echo ============================= test session starts ============================== platform linux2 -- Python 2.7.13, pytest-2.9.2, py-1.4.32, pluggy-0.3.1 -- /usr/bin/python2 cachedir: suites/basic/.cache rootdir: <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-COMMIT/ws/source/ds/dirsrvtests/tests/suites/basic,> inifile: plugins: sourceorder-0.5, multihost-1.1 collecting ... collected 23 items suites/basic/basic_test.py::test_basic_ops PASSED suites/basic/basic_test.py::test_basic_import_export FAILEDInstance slapd-standalone_1 removed. =================================== FAILURES =================================== ___________________________ test_basic_import_export ___________________________ topology_st = '<lib389.topologies.TopologyMain' object at '0x7f1e3acaf550>' import_example_ldif = None def 'test_basic_import_export(topology_st,' 'import_example_ldif):' '"""Test' online and offline LDIF imports '&' 'exports"""' 'log.info('\''Running' 'test_basic_import_export...'\'')' tmp_dir = ''\''/tmp'\''' '#' '#' Test online/offline LDIF imports '#' '#' Generate a test ldif '(50k' 'entries)' ldif_dir = 'topology_st.standalone.get_ldif_dir()' import_ldif = ldif_dir + ''\''/basic_import.ldif'\''' try: 'topology_st.standalone.buildLDIF(50000,' 'import_ldif)' except OSError as e: 'log.fatal('\''test_basic_import_export:' failed to create test 'ldif,\' error: %s - '%s'\''' % '(e.errno,' 'e.strerror))' assert False '#' Online try: 'topology_st.standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX,' input_file=import_ldif, 'args={TASK_WAIT:' 'True})' except ValueError: 'log.fatal('\''test_basic_import_export:' Online import 'failed'\'')' assert False '#' Offline if not 'topology_st.standalone.ldif2db(DEFAULT_BENAME,' None, None, None, 'import_ldif):' 'log.fatal('\''test_basic_import_export:' Offline import 'failed'\'')' assert False '#' '#' Test online and offline LDIF export '#' '#' Online export export_ldif = ldif_dir + ''\''/export.ldif'\''' exportTask = 'Tasks(topology_st.standalone)' try: args = '{ TASK_WAIT:' 'True}' 'exportTask.exportLDIF(DEFAULT_SUFFIX,' None, export_ldif, 'args)' except ValueError: 'log.fatal('\''test_basic_import_export:' Online export 'failed'\'')' assert False '#' Offline export if not 'topology_st.standalone.db2ldif(DEFAULT_BENAME,' '(DEFAULT_SUFFIX,),' None, None, None, 'export_ldif):' 'log.fatal('\''test_basic_import_export:' Failed to run offline 'db2ldif'\'')' assert False '#' '#' Cleanup - Import the Example LDIF for the other tests in this suite '#' ldif = ''\''%s/Example.ldif'\''' % 'get_data_dir(topology_st.standalone.prefix)' import_ldif = 'topology_st.standalone.get_ldif_dir()' + '"/Example.ldif"' 'shutil.copyfile(ldif,' 'import_ldif)' try: 'topology_st.standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX,' input_file=import_ldif, '>' 'args={TASK_WAIT:' 'True})' suites/basic/basic_test.py:271: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/tasks.py:105: in importLDIF 'self.conn.add_s(entry)' ../../../lib389/lib389/__init__.py:155: in inner return 'f(ent.dn,' 'ent.toTupleList(),' '*args[2:])' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:210: in add_s return 'self.result(msgid,all=1,timeout=self.timeout)' ../../../lib389/lib389/__init__.py:127: in inner objtype, data = 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = 'self.result2(msgid,all,timeout)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = 'self.result3(msgid,all,timeout)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = 'self._ldap_call(sel f._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop)' ../../../lib389/lib389/__init__.py:159: in inner return 'f(*args,' '**kwargs)' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = '<lib389.DirSrv' object at '0x7f1e3aee3310>' func = '<built-in' method result4 of LDAP object at '0x7f1e3ad6b3a0>' args = '(23,' 1, -1, 0, 0, '0),' kwargs = '{},' diagnostic_message_success = None e = 'SERVER_DOWN({'\''desc'\'':' '"Can'\''t' contact LDAP 'server"},)' def '_ldap_call(self,func,*args,**kwargs):' '"""' Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs '"""' 'self._ldap_object_lock.acquire()' if __debug__: if 'self._trace_level>=1:' 'self._trace_file.write('\''***' %s %s - '%s\n%s\n'\''' % '(' 'repr(self),' self._uri, ''\''.'\''.join((self.__class__.__name__,func.__name__)),' 'pprint.pformat((args,kwargs))' '))' if 'self._trace_level>=9:' 'traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file)' diagnostic_message_success = None try: try: '>' result = 'func(*args,**kwargs)' E SERVER_DOWN: '{'\''desc'\'':' '"Can'\''t' contact LDAP 'server"}' /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stdout call ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:dirsrvtests.tests.suites.basic.basic_test:Running test_basic_import_export... INFO:lib389:Import task import_02062017_164942 for file /var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif completed successfully INFO:lib389:Running script: /usr/sbin/ns-slapd ldif2db -D /etc/dirsrv/slapd-standalone_1 -n userRoot -i /var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif '[06/Feb/2017:16:50:16.621763715' '+0100]' - INFO - dblayer_instance_start - Import is running with nsslapd-db-private-import-mem 'on;' No other process is allowed to access the database '[06/F eb/2017:16:50:16.886135323' '+0100]' - INFO - check_and_set_import_cache - pagesize: 4096, pages: 2039149, procpages: 4635 '[06/Feb/2017:16:50:16.955269419' '+0100]' - INFO - check_and_set_import_cache - Import allocates 2923928KB import cache. '[06/Feb/2017:16:50:17.264552818' '+0100]' - INFO - import_main_offline - import userRoot: Beginning import job... '[06/Feb/2017:16:50:17.325808202' '+0100]' - INFO - import_main_offline - import userRoot: Index buffering enabled with bucket size 100 '[06/Feb/2017:16:50:18.128876735' '+0100]' - INFO - import_producer - import userRoot: Processing file '"/var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif"' '[06/Feb/2017:16:50:30.775339620' '+0100]' - INFO - import_producer - import userRoot: Finished scanning file '"/var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif"' '(50006' 'entries)' '[06/Feb/2017:16:50:32.188911971' '+0100]' - INFO - import_monitor_threads - import userRoot: Workers 'finished;' cleaning up... '[06/Feb/2017:16:50:32.709071142' '+0100]' - INFO - import_monitor_threads - import userRoot: Workers cleaned up. '[06/Feb/2017:16:50:32.786803810' '+0100]' - INFO - import_main_offline - import userRoot: Cleaning up producer thread... '[06/Feb/2017:16:50:32.854767387' '+0100]' - INFO - import_main_offline - import userRoot: Indexing complete. Post-processing... '[06/Feb/2017:16:50:32.914331745' '+0100]' - INFO - import_main_offline - import userRoot: Generating numsubordinates '(this' may take several minutes to 'complete)...' '[06/Feb/2017:16:50:33.045391185' '+0100]' - INFO - import_main_offline - import userRoot: Generating numSubordinates complete. '[06/Feb/2017:16:50:33.110617005' '+0100]' - INFO - ldbm_get_nonleaf_ids - import userRoot: Gathering ancestorid non-leaf IDs... '[06/Feb/2017:16:50:33.174568246' '+0100]' - INFO - ldbm_get_nonleaf_ids - import userRoot: Finished gathering ancestorid non-leaf IDs. '[06/Feb/2017:16:50:33.311934543' '+0100]' - INFO - ldbm_ancestorid_new_idl_create_index - import userRoot: Creating ancestorid index '(new ' 'idl)...' '[06/Feb/2017:16:50:33.616574505' '+0100]' - INFO - ldbm_ancestorid_new_idl_create_index - import userRoot: Created ancestorid index '(new' 'idl).' '[06/Feb/2017:16:50:33.684557039' '+0100]' - INFO - import_main_offline - import userRoot: Flushing caches... '[06/Feb/2017:16:50:33.741608959' '+0100]' - INFO - import_main_offline - import userRoot: Closing files... '[06/Feb/2017:16:50:42.992568199' '+0100]' - INFO - dblayer_pre_close - All database threads now stopped '[06/Feb/2017:16:50:43.057060737' '+0100]' - INFO - import_main_offline - import userRoot: Import complete. Processed 50006 entries in 26 seconds. '(1923.31' 'entries/sec)' INFO:lib389:Export task export_02062017_165045 for file /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif completed successfully INFO:lib389:Running script: /usr/sbin/ns-slapd db2ldif -D /etc/dirsrv/slapd-standalone_1 -n userRoot -s dc=example,dc=com -a /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif ldiffile: /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif '[06/Feb/2017:16:51:09.654731372' '+0100]' - INFO - export_one_entry - export userRoot: Processed 1000 entries '(1%).' '[06/Feb/2017:16:51:09.846346173' '+0100]' - INFO - export_one_entry - export userRoot: Processed 2000 entries '(3%).' '[06/Feb/2017:16:51:10.591479004' '+0100]' - INFO - export_one_entry - export userRoot: Processed 3000 entries '(5%).' '[06/Feb/2017:16:51:10.841175051' '+0100]' - INFO - export_one_entry - export userRoot: Processed 4000 entries '(7%).' '[06/Feb/2017:16:51:11.067759180' '+0100]' - INFO - export_one_entry - export userRoot: Processed 5000 entries '(9%).' '[06/Feb/2017:16:51:11.305169153' '+0100]' - INFO - export_one_entry - export userRoot: Processed 6000 entries '(11%).' '[06/Feb/2017:16:51:11.525668320' '+0100]' - INFO - export_one_entry - export userRoot: Processed 7000 entries '(13%).' '[06/Feb/2017:16:51:11.756032720' '+0100]' - INFO - export_one_entry - export userRoot: Processed 8000 entries '(15%).' '[06/Feb/2017:16:51:12.461277100' '+0100]' - INFO - export_one_entr y - export userRoot: Processed 9000 entries '(17%).' '[06/Feb/2017:16:51:13.884864558' '+0100]' - INFO - export_one_entry - export userRoot: Processed 10000 entries '(19%).' '[06/Feb/2017:16:51:14.123328214' '+0100]' - INFO - export_one_entry - export userRoot: Processed 11000 entries '(21%).' '[06/Feb/2017:16:51:14.338841408' '+0100]' - INFO - export_one_entry - export userRoot: Processed 12000 entries '(23%).' '[06/Feb/2017:16:51:15.000739150' '+0100]' - INFO - export_one_entry - export userRoot: Processed 13000 entries '(25%).' '[06/Feb/2017:16:51:15.226037382' '+0100]' - INFO - export_one_entry - export userRoot: Processed 14000 entries '(27%).' '[06/Feb/2017:16:51:15.860356722' '+0100]' - INFO - export_one_entry - export userRoot: Processed 15000 entries '(29%).' '[06/Feb/2017:16:51:16.648809224' '+0100]' - INFO - export_one_entry - export userRoot: Processed 16000 entries '(31%).' '[06/Feb/2017:16:51:16.891541212' '+0100]' - INFO - export_one_entry - export userRoot: Processed 17000 entries '(33%).' '[06/Feb/2017:16:51:17.099683982' '+0100]' - INFO - export_one_entry - export userRoot: Processed 18000 entries '(35%).' '[06/Feb/2017:16:51:17.781286374' '+0100]' - INFO - export_one_entry - export userRoot: Processed 19000 entries '(37%).' '[06/Feb/2017:16:51:17.993620846' '+0100]' - INFO - export_one_entry - export userRoot: Processed 20000 entries '(39%).' '[06/Feb/2017:16:51:21.002541592' '+0100]' - INFO - export_one_entry - export userRoot: Processed 21000 entries '(41%).' '[06/Feb/2017:16:51:21.689119958' '+0100]' - INFO - export_one_entry - export userRoot: Processed 22000 entries '(43%).' '[06/Feb/2017:16:51:21.948886985' '+0100]' - INFO - export_one_entry - export userRoot: Processed 23000 entries '(45%).' '[06/Feb/2017:16:51:22.171054429' '+0100]' - INFO - export_one_entry - export userRoot: Processed 24000 entries '(47%).' '[06/Feb/2017:16:51:22.381002484' '+0100]' - INFO - export_one_entry - export userRoot: Processed 25000 entries '(49%).' '[06/Feb/2017:16:51:22.963952011' '+0100]' - INFO - export_ one_entry - export userRoot: Processed 26000 entries '(51%).' '[06/Feb/2017:16:51:23.712415842' '+0100]' - INFO - export_one_entry - export userRoot: Processed 27000 entries '(53%).' '[06/Feb/2017:16:51:23.943837090' '+0100]' - INFO - export_one_entry - export userRoot: Processed 28000 entries '(55%).' '[06/Feb/2017:16:51:24.231410191' '+0100]' - INFO - export_one_entry - export userRoot: Processed 29000 entries '(57%).' '[06/Feb/2017:16:51:24.432873430' '+0100]' - INFO - export_one_entry - export userRoot: Processed 30000 entries '(59%).' '[06/Feb/2017:16:51:24.633771155' '+0100]' - INFO - export_one_entry - export userRoot: Processed 31000 entries '(61%).' '[06/Feb/2017:16:51:24.840913858' '+0100]' - INFO - export_one_entry - export userRoot: Processed 32000 entries '(63%).' '[06/Feb/2017:16:51:25.523853170' '+0100]' - INFO - export_one_entry - export userRoot: Processed 33000 entries '(65%).' '[06/Feb/2017:16:51:25.754798036' '+0100]' - INFO - export_one_entry - export userRoot: Processed 34000 entries '(67%).' '[06/Feb/2017:16:51:25.969361611' '+0100]' - INFO - export_one_entry - export userRoot: Processed 35000 entries '(69%).' '[06/Feb/2017:16:51:26.278006410' '+0100]' - INFO - export_one_entry - export userRoot: Processed 36000 entries '(71%).' '[06/Feb/2017:16:51:26.531160165' '+0100]' - INFO - export_one_entry - export userRoot: Processed 37000 entries '(73%).' '[06/Feb/2017:16:51:26.765456456' '+0100]' - INFO - export_one_entry - export userRoot: Processed 38000 entries '(75%).' '[06/Feb/2017:16:51:26.989992874' '+0100]' - INFO - export_one_entry - export userRoot: Processed 39000 entries '(77%).' '[06/Feb/2017:16:51:27.615325203' '+0100]' - INFO - export_one_entry - export userRoot: Processed 40000 entries '(79%).' '[06/Feb/2017:16:51:27.849178025' '+0100]' - INFO - export_one_entry - export userRoot: Processed 41000 entries '(81%).' '[06/Feb/2017:16:51:28.055264452' '+0100]' - INFO - export_one_entry - export userRoot: Processed 42000 entries '(83%).' '[06/Feb/2017:16:51:28.253373256' '+0100]' - INFO - export_one_entry - export userRoot: Processed 43000 entries '(85%).' '[06/Feb/2017:16:51:28.453406334' '+0100]' - INFO - export_one_entry - export userRoot: Processed 44000 entries '(87%).' '[06/Feb/2017:16:51:29.322384380' '+0100]' - INFO - export_one_entry - export userRoot: Processed 45000 entries '(89%).' '[06/Feb/2017:16:51:30.604566457' '+0100]' - INFO - export_one_entry - export userRoot: Processed 46000 entries '(91%).' '[06/Feb/2017:16:51:30.808779498' '+0100]' - INFO - export_one_entry - export userRoot: Processed 47000 entries '(93%).' '[06/Feb/2017:16:51:31.511660500' '+0100]' - INFO - export_one_entry - export userRoot: Processed 48000 entries '(95%).' '[06/Feb/2017:16:51:31.779959056' '+0100]' - INFO - export_one_entry - export userRoot: Processed 49000 entries '(97%).' '[06/Feb/2017:16:51:32.016080153' '+0100]' - INFO - export_one_entry - export userRoot: Processed 50000 entries '(99%).' '[06/Feb/2017:16:51:32.574360353' '+0100]' - INFO - ldbm_back_ldbm2ldif - export userRoot: Processed 50006 entries '(100%).' '[06/Feb/2017:16:51:32.660686365' '+0100]' - INFO - dblayer_pre_close - All database threads now stopped '!!!!!!!!!!!!!!!!!!!!' Interrupted: stopping after 1 failures '!!!!!!!!!!!!!!!!!!!!' ===================== 1 failed, 1 passed in 246.24 seconds ===================== ============================= test session starts ============================== platform linux2 -- Python 2.7.13, pytest-2.9.2, py-1.4.32, pluggy-0.3.1 -- /usr/bin/python2 cachedir: suites/basic/.cache rootdir: <http://vm-058-081.abc.idm.lab.eng.brq.redhat.com:8080/job/389-DS-COMMIT/ws/source/ds/dirsrvtests/tests/suites/basic,> inifile: plugins: sourceorder-0.5, multihost-1.1 collecting ... collected 23 items suites/basic/basic_test.py::test_basic_ops PASSED suites/basic/basic_test.py::test_basic_import_export FAILEDInstance slapd-standalone_1 removed. =================================== FAILURES =================================== ___________________________ test_basic_import_export ___________________________ topology_st = <lib389.topologies.TopologyMain object at 0x7f1e3acaf550> import_example_ldif = None def test_basic_import_export(topology_st, import_example_ldif): """Test online and offline LDIF imports & exports""" log.info('Running test_basic_import_export...') tmp_dir = '/tmp' # # Test online/offline LDIF imports # # Generate a test ldif (50k entries) ldif_dir = topology_st.standalone.get_ldif_dir() import_ldif = ldif_dir + '/basic_import.ldif' try: topology_st.standalone.buildLDIF(50000, import_ldif) except OSError as e: log.fatal('test_basic_import_export: failed to create test ldif,\ error: %s - %s' % (e.errno, e.strerror)) assert False # Online try: topology_st.standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX, input_file=import_ldif, args={TASK_WAIT: True}) except ValueError: log.fatal('test_basic_import_export: Online import failed') assert False # Offline if not topology_st.standalone.ldif2db(DEFAULT_BENAME, None, None, None, import_ldif): log.fatal('test_basic_import_export: Offline import failed') assert False # # Test online and offline LDIF export # # Online export export_ldif = ldif_dir + '/export.ldif' exportTask = Tasks(topology_st.standalone) try: args = {TASK_WAIT: True} exportTask.exportLDIF(DEFAULT_SUFFIX, None, export_ldif, args) except ValueError: log.fatal('test_basic_import_export : Online export failed') assert False # Offline export if not topology_st.standalone.db2ldif(DEFAULT_BENAME, (DEFAULT_SUFFIX,), None, None, None, export_ldif): log.fatal('test_basic_import_export: Failed to run offline db2ldif') assert False # # Cleanup - Import the Example LDIF for the other tests in this suite # ldif = '%s/Example.ldif' % get_data_dir(topology_st.standalone.prefix) import_ldif = topology_st.standalone.get_ldif_dir() + "/Example.ldif" shutil.copyfile(ldif, import_ldif) try: topology_st.standalone.tasks.importLDIF(suffix=DEFAULT_SUFFIX, input_file=import_ldif, > args={TASK_WAIT: True}) suites/basic/basic_test.py:271: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ../../../lib389/lib389/tasks.py:105: in importLDIF self.conn.add_s(entry) ../../../lib389/lib389/__init__.py:155: in inner return f(ent.dn, ent.toTupleList(), *args[2:]) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:210: in add_s return self.result(msgid,all=1,timeout=self.timeout) ../../../lib389/lib389/__init__.py:127: in inner objtype, data = f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:503: in result resp_type, resp_data, resp_msgid = self.result2(msgid,all,timeout) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:507: in result2 resp_type, resp_data, resp_msgid, resp_ctrls = self.result3(msgid,all,timeout) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:514: in result3 resp_ctrl_classes=resp_ctrl_classes ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:521: in result4 ldap_result = self._ldap_call(self._l.result4,msgid,all,timeout,add_ctrls,add_intermediates,add_extop) ../../../lib389/lib389/__init__.py:159: in inner return f(*args, **kwargs) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <lib389.DirSrv object at 0x7f1e3aee3310> func = <built-in method result4 of LDAP object at 0x7f1e3ad6b3a0> args = (23, 1, -1, 0, 0, 0), kwargs = {}, diagnostic_message_success = None e = SERVER_DOWN({'desc': "Can't contact LDAP server"},) def _ldap_call(self,func,*args,**kwargs): """ Wrapper method mainly for serializing calls into OpenLDAP libs and trace logs """ self._ldap_object_lock.acquire() if __debug__: if self._trace_level>=1: self._trace_file.write('*** %s %s - %s\n%s\n' % ( repr(self), self._uri, '.'.join((self.__class__.__name__,func.__name__)), pprint.pformat((args,kwargs)) )) if self._trace_level>=9: traceback.print_stack(limit=self._trace_stack_limit,file=self._trace_file) diagnostic_message_success = None try: try: > result = func(*args,**kwargs) E SERVER_DOWN: {'desc': "Can't contact LDAP server"} /usr/lib64/python2.7/site-packages/ldap/ldapobject.py:106: SERVER_DOWN ----------------------------- Captured stdout call ----------------------------- OK group dirsrv exists OK user dirsrv exists OK group dirsrv exists OK user dirsrv exists ----------------------------- Captured stderr call ----------------------------- INFO:dirsrvtests.tests.suites.basic.basic_test:Running test_basic_import_export... INFO:lib389:Import task import_02062017_164942 for file /var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif completed successfully INFO:lib389:Running script: /usr/sbin/ns-slapd ldif2db -D /etc/dirsrv/slapd-standalone_1 -n userRoot -i /var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif [06/Feb/2017:16:50:16.621763715 +0100] - INFO - dblayer_instance_start - Import is running with nsslapd-db-private-import-mem on; No other process is allowed to access the database [06/Feb/2017:16:50:16.886135323 +0100] - INFO - check_and_set_import_cache - pagesize: 4096, pages: 2039149, procpages: 4635 [06/Feb/2017:16:50:16.955269419 +0100] - INFO - check_and_set_import_cache - Import allocates 2923928KB import cache. [06/Feb/2017:16:50:17.264552818 +0100] - INFO - import_main_offline - import userRoot: Beginni ng import job... [06/Feb/2017:16:50:17.325808202 +0100] - INFO - import_main_offline - import userRoot: Index buffering enabled with bucket size 100 [06/Feb/2017:16:50:18.128876735 +0100] - INFO - import_producer - import userRoot: Processing file "/var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif" [06/Feb/2017:16:50:30.775339620 +0100] - INFO - import_producer - import userRoot: Finished scanning file "/var/lib/dirsrv/slapd-standalone_1/ldif/basic_import.ldif" (50006 entries) [06/Feb/2017:16:50:32.188911971 +0100] - INFO - import_monitor_threads - import userRoot: Workers finished; cleaning up... [06/Feb/2017:16:50:32.709071142 +0100] - INFO - import_monitor_threads - import userRoot: Workers cleaned up. [06/Feb/2017:16:50:32.786803810 +0100] - INFO - import_main_offline - import userRoot: Cleaning up producer thread... [06/Feb/2017:16:50:32.854767387 +0100] - INFO - import_main_offline - import userRoot: Indexing complete. Post-processing... [06/Feb/2017:16:50:32.914331745 +0100] - INFO - import_main_offline - import userRoot: Generating numsubordinates (this may take several minutes to complete)... [06/Feb/2017:16:50:33.045391185 +0100] - INFO - import_main_offline - import userRoot: Generating numSubordinates complete. [06/Feb/2017:16:50:33.110617005 +0100] - INFO - ldbm_get_nonleaf_ids - import userRoot: Gathering ancestorid non-leaf IDs... [06/Feb/2017:16:50:33.174568246 +0100] - INFO - ldbm_get_nonleaf_ids - import userRoot: Finished gathering ancestorid non-leaf IDs. [06/Feb/2017:16:50:33.311934543 +0100] - INFO - ldbm_ancestorid_new_idl_create_index - import userRoot: Creating ancestorid index (new idl)... [06/Feb/2017:16:50:33.616574505 +0100] - INFO - ldbm_ancestorid_new_idl_create_index - import userRoot: Created ancestorid index (new idl). [06/Feb/2017:16:50:33.684557039 +0100] - INFO - import_main_offline - import userRoot: Flushing caches... [06/Feb/2017:16:50:33.741608959 +0100] - INFO - import_main_offline - import userRoot: Closing files... [06/Feb/2017:16:50:42.992568199 +0100] - INFO - dblayer_pre_close - All database threads now stopped [06/Feb/2017:16:50:43.057060737 +0100] - INFO - import_main_offline - import userRoot: Import complete. Processed 50006 entries in 26 seconds. (1923.31 entries/sec) INFO:lib389:Export task export_02062017_165045 for file /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif completed successfully INFO:lib389:Running script: /usr/sbin/ns-slapd db2ldif -D /etc/dirsrv/slapd-standalone_1 -n userRoot -s dc=example,dc=com -a /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif ldiffile: /var/lib/dirsrv/slapd-standalone_1/ldif/export.ldif [06/Feb/2017:16:51:09.654731372 +0100] - INFO - export_one_entry - export userRoot: Processed 1000 entries (1%). [06/Feb/2017:16:51:09.846346173 +0100] - INFO - export_one_entry - export userRoot: Processed 2000 entries (3%). [06/Feb/2017:16:51:10.591479004 +0100] - INFO - export_one_entry - export userRoot: Processed 3000 entries (5%). [06/Feb/2017:16:51:10.841175051 +0100] - INFO - export_one_entry - export userRoot: Processed 4000 entries (7%). [06/Feb/2017:16:51:11.067759180 +0100] - INFO - export_one_entry - export userRoot: Processed 5000 entries (9%). [06/Feb/2017:16:51:11.305169153 +0100] - INFO - export_one_entry - export userRoot: Processed 6000 entries (11%). [06/Feb/2017:16:51:11.525668320 +0100] - INFO - export_one_entry - export userRoot: Processed 7000 entries (13%). [06/Feb/2017:16:51:11.756032720 +0100] - INFO - export_one_entry - export userRoot: Processed 8000 entries (15%). [06/Feb/2017:16:51:12.461277100 +0100] - INFO - export_one_entry - export userRoot: Processed 9000 entries (17%). [06/Feb/2017:16:51:13.884864558 +0100] - INFO - export_one_entry - export userRoot: Processed 10000 entries (19%). [06/Feb/2017:16:51:14.123328214 +0100] - INFO - export_one_entry - export userRoot: Processed 11000 entries (21%). [06/Feb/2017:16:51:14.338841408 +0100] - INFO - export_one_entry - export userRoot: Processed 12000 entries (23%). [06/Feb/2017:16:51:15.000739150 +0100] - INFO - export_one_entry - export userRoot: Processed 13000 entries (25%). [06/Feb/2017:16:51:15.226037382 +0100] - INFO - export_one_entry - export userRoot: Processed 14000 entries (27%). [06/Feb/2017:16:51:15.860356722 +0100] - INFO - export_one_entry - export userRoot: Processed 15000 entries (29%). [06/Feb/2017:16:51:16.648809224 +0100] - INFO - export_one_entry - export userRoot: Processed 16000 entries (31%). [06/Feb/2017:16:51:16.891541212 +0100] - INFO - export_one_entry - export userRoot: Processed 17000 entries (33%). [06/Feb/2017:16:51:17.099683982 +0100] - INFO - export_one_entry - export userRoot: Processed 18000 entries (35%). [06/Feb/2017:16:51:17.781286374 +0100] - INFO - export_one_entry - export userRoot: Processed 19000 entries (37%). [06/Feb/2017:16:51:17.993620846 +0100] - INFO - export_one_entry - export userRoot: Processed 20000 entries (39%). [06/Feb/2017:16:51:21.002541592 +0100] - INFO - export_one_entry - export userRoot: Processed 21000 entries (41%). [06/Feb/2017:16:51:21.689119958 +0100] - INFO - export_one_entry - export userRoot: Processed 22000 entries (43%). [06/Feb/2017:16:51:21.948886985 +0100] - INFO - export_one_entry - export userRoot: Processed 23000 entries (45%). [06/Feb/2017:16:51:22.171054429 +0100] - INFO - export_one_entry - export userRoot: Processed 24000 entries (47%). [06/Feb/2017:16:51:22.381002484 +0100] - INFO - export_one_entry - export userRoot: Processed 25000 entries (49%). [06/Feb/2017:16:51:22.963952011 +0100] - INFO - export_one_entry - export userRoot: Processed 26000 entries (51%). [06/Feb/2017:16:51:23.712415842 +0100] - INFO - export_one_entry - export userRoot: Processed 27000 entries (53%). [06/Feb/2017:16:51:23.943837090 +0100] - INFO - export_one_entry - export userRoot: Processed 28000 entries (55%). [06/Feb/2017:16:51:24.231410191 +0100] - INFO - export_one_entry - export userRoot: Processed 29000 entries (57%). [06/Feb/2017:16:51:24.432873430 +0100] - INFO - export_one_entry - export userRoot: Processed 30000 entries (59%). [06/Feb/2017:16:51:24.633771155 +0100] - INFO - export_one_entry - expo rt userRoot: Processed 31000 entries (61%). [06/Feb/2017:16:51:24.840913858 +0100] - INFO - export_one_entry - export userRoot: Processed 32000 entries (63%). [06/Feb/2017:16:51:25.523853170 +0100] - INFO - export_one_entry - export userRoot: Processed 33000 entries (65%). [06/Feb/2017:16:51:25.754798036 +0100] - INFO - export_one_entry - export userRoot: Processed 34000 entries (67%). [06/Feb/2017:16:51:25.969361611 +0100] - INFO - export_one_entry - export userRoot: Processed 35000 entries (69%). [06/Feb/2017:16:51:26.278006410 +0100] - INFO - export_one_entry - export userRoot: Processed 36000 entries (71%). [06/Feb/2017:16:51:26.531160165 +0100] - INFO - export_one_entry - export userRoot: Processed 37000 entries (73%). [06/Feb/2017:16:51:26.765456456 +0100] - INFO - export_one_entry - export userRoot: Processed 38000 entries (75%). [06/Feb/2017:16:51:26.989992874 +0100] - INFO - export_one_entry - export userRoot: Processed 39000 entries (77%). [06/Feb/2017:16:51:27.615325203 +0100] - INFO - export_one_entry - export userRoot: Processed 40000 entries (79%). [06/Feb/2017:16:51:27.849178025 +0100] - INFO - export_one_entry - export userRoot: Processed 41000 entries (81%). [06/Feb/2017:16:51:28.055264452 +0100] - INFO - export_one_entry - export userRoot: Processed 42000 entries (83%). [06/Feb/2017:16:51:28.253373256 +0100] - INFO - export_one_entry - export userRoot: Processed 43000 entries (85%). [06/Feb/2017:16:51:28.453406334 +0100] - INFO - export_one_entry - export userRoot: Processed 44000 entries (87%). [06/Feb/2017:16:51:29.322384380 +0100] - INFO - export_one_entry - export userRoot: Processed 45000 entries (89%). [06/Feb/2017:16:51:30.604566457 +0100] - INFO - export_one_entry - export userRoot: Processed 46000 entries (91%). [06/Feb/2017:16:51:30.808779498 +0100] - INFO - export_one_entry - export userRoot: Processed 47000 entries (93%). [06/Feb/2017:16:51:31.511660500 +0100] - INFO - export_one_entry - export userRoot: Processed 48000 entries (95%). [06/Feb/2017:16:51:31.779959056 +0100] - INFO - e xport_one_entry - export userRoot: Processed 49000 entries (97%). [06/Feb/2017:16:51:32.016080153 +0100] - INFO - export_one_entry - export userRoot: Processed 50000 entries (99%). [06/Feb/2017:16:51:32.574360353 +0100] - INFO - ldbm_back_ldbm2ldif - export userRoot: Processed 50006 entries (100%). [06/Feb/2017:16:51:32.660686365 +0100] - INFO - dblayer_pre_close - All database threads now stopped !!!!!!!!!!!!!!!!!!!! Interrupted: stopping after 1 failures !!!!!!!!!!!!!!!!!!!! ===================== 1 failed, 1 passed in 246.24 seconds ===================== + MSG=FAILED + RC=1 + '[' 1 -ne 0 ']' + sudo /usr/sbin/sendmail mreynolds@xxxxxxxxxx + exit 1 Build step 'Execute shell' marked build as failure _______________________________________________ 389-devel mailing list -- 389-devel@xxxxxxxxxxxxxxxxxxxxxxx To unsubscribe send an email to 389-devel-leave@xxxxxxxxxxxxxxxxxxxxxxx