[ 9.879475] scsi 1:0:22:0: enclosure level(0x0000), connector name( C3 ) [ 9.886279] scsi 1:0:22:0: serial_number( 7SHPL9TW) [ 9.891767] scsi 1:0:22:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 9.912024] mpt3sas_cm0: detecting: handle(0x0072), sas_address(0x5000cca2525fb942), phy(21) [ 9.920464] mpt3sas_cm0: REPORT_LUNS: handle(0x0072), retries(0) [ 9.926599] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0072), lun(0) [ 9.944464] scsi 1:0:23:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 9.952764] scsi 1:0:23:0: SSP: handle(0x0072), sas_addr(0x5000cca2525fb942), phy(21), device_name(0x5000cca2525fb943) [ 9.963448] scsi 1:0:23:0: enclosure logical id(0x5000ccab04037180), slot(30) [ 9.970667] scsi 1:0:23:0: enclosure level(0x0000), connector name( C3 ) [ 9.977457] scsi 1:0:23:0: serial_number( 7SHPMZHW) [ 9.982944] scsi 1:0:23:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.012617] mpt3sas_cm0: detecting: handle(0x0073), sas_address(0x5000cca2525e22e6), phy(22) [ 10.021052] mpt3sas_cm0: REPORT_LUNS: handle(0x0073), retries(0) [ 10.027184] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0073), lun(0) [ 10.033814] scsi 1:0:24:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.042120] scsi 1:0:24:0: SSP: handle(0x0073), sas_addr(0x5000cca2525e22e6), phy(22), device_name(0x5000cca2525e22e7) [ 10.052804] scsi 1:0:24:0: enclosure logical id(0x5000ccab04037180), slot(31) [ 10.060024] scsi 1:0:24:0: enclosure level(0x0000), connector name( C3 ) [ 10.066833] scsi 1:0:24:0: serial_number( 7SHNSXKW) [ 10.072326] scsi 1:0:24:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.092033] mpt3sas_cm0: detecting: handle(0x0074), sas_address(0x5000cca2525fb5be), phy(23) [ 10.100470] mpt3sas_cm0: REPORT_LUNS: handle(0x0074), retries(0) [ 10.106611] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0074), lun(0) [ 10.148418] scsi 1:0:25:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.156702] scsi 1:0:25:0: SSP: handle(0x0074), sas_addr(0x5000cca2525fb5be), phy(23), device_name(0x5000cca2525fb5bf) [ 10.167389] scsi 1:0:25:0: enclosure logical id(0x5000ccab04037180), slot(32) [ 10.174610] scsi 1:0:25:0: enclosure level(0x0000), connector name( C3 ) [ 10.181397] scsi 1:0:25:0: serial_number( 7SHPMS7W) [ 10.186884] scsi 1:0:25:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.210035] mpt3sas_cm0: detecting: handle(0x0075), sas_address(0x5000cca2525eb77e), phy(24) [ 10.218475] mpt3sas_cm0: REPORT_LUNS: handle(0x0075), retries(0) [ 10.224608] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0075), lun(0) [ 10.231244] scsi 1:0:26:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.239553] scsi 1:0:26:0: SSP: handle(0x0075), sas_addr(0x5000cca2525eb77e), phy(24), device_name(0x5000cca2525eb77f) [ 10.250237] scsi 1:0:26:0: enclosure logical id(0x5000ccab04037180), slot(33) [ 10.257454] scsi 1:0:26:0: enclosure level(0x0000), connector name( C3 ) [ 10.264260] scsi 1:0:26:0: serial_number( 7SHP2UAW) [ 10.269749] scsi 1:0:26:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.290043] mpt3sas_cm0: detecting: handle(0x0076), sas_address(0x5000cca2525e113a), phy(25) [ 10.298480] mpt3sas_cm0: REPORT_LUNS: handle(0x0076), retries(0) [ 10.304624] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0076), lun(0) [ 10.311472] scsi 1:0:27:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.319785] scsi 1:0:27:0: SSP: handle(0x0076), sas_addr(0x5000cca2525e113a), phy(25), device_name(0x5000cca2525e113b) [ 10.330475] scsi 1:0:27:0: enclosure logical id(0x5000ccab04037180), slot(34) [ 10.337692] scsi 1:0:27:0: enclosure level(0x0000), connector name( C3 ) [ 10.344498] scsi 1:0:27:0: serial_number( 7SHNRS2W) [ 10.349986] scsi 1:0:27:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.370037] mpt3sas_cm0: detecting: handle(0x0077), sas_address(0x5000cca2526014fa), phy(26) [ 10.378477] mpt3sas_cm0: REPORT_LUNS: handle(0x0077), retries(0) [ 10.384643] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0077), lun(0) [ 10.391254] scsi 1:0:28:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.399570] scsi 1:0:28:0: SSP: handle(0x0077), sas_addr(0x5000cca2526014fa), phy(26), device_name(0x5000cca2526014fb) [ 10.410253] scsi 1:0:28:0: enclosure logical id(0x5000ccab04037180), slot(35) [ 10.417473] scsi 1:0:28:0: enclosure level(0x0000), connector name( C3 ) [ 10.424276] scsi 1:0:28:0: serial_number( 7SHPV2VW) [ 10.429765] scsi 1:0:28:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.452040] mpt3sas_cm0: detecting: handle(0x0078), sas_address(0x5000cca252598786), phy(27) [ 10.460481] mpt3sas_cm0: REPORT_LUNS: handle(0x0078), retries(0) [ 10.466613] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0078), lun(0) [ 10.473260] scsi 1:0:29:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.481560] scsi 1:0:29:0: SSP: handle(0x0078), sas_addr(0x5000cca252598786), phy(27), device_name(0x5000cca252598787) [ 10.492250] scsi 1:0:29:0: enclosure logical id(0x5000ccab04037180), slot(36) [ 10.499470] scsi 1:0:29:0: enclosure level(0x0000), connector name( C3 ) [ 10.506275] scsi 1:0:29:0: serial_number( 7SHL7BRW) [ 10.511761] scsi 1:0:29:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.535039] mpt3sas_cm0: detecting: handle(0x0079), sas_address(0x5000cca2525f5366), phy(28) [ 10.543476] mpt3sas_cm0: REPORT_LUNS: handle(0x0079), retries(0) [ 10.549642] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0079), lun(0) [ 10.556254] scsi 1:0:30:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.564556] scsi 1:0:30:0: SSP: handle(0x0079), sas_addr(0x5000cca2525f5366), phy(28), device_name(0x5000cca2525f5367) [ 10.575244] scsi 1:0:30:0: enclosure logical id(0x5000ccab04037180), slot(37) [ 10.582463] scsi 1:0:30:0: enclosure level(0x0000), connector name( C3 ) [ 10.589274] scsi 1:0:30:0: serial_number( 7SHPE66W) [ 10.594765] scsi 1:0:30:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.622825] mpt3sas_cm0: detecting: handle(0x007a), sas_address(0x5000cca2525e263e), phy(29) [ 10.631262] mpt3sas_cm0: REPORT_LUNS: handle(0x007a), retries(0) [ 10.637394] mpt3sas_cm0: TEST_UNIT_READY: handle(0x007a), lun(0) [ 10.643989] scsi 1:0:31:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.652293] scsi 1:0:31:0: SSP: handle(0x007a), sas_addr(0x5000cca2525e263e), phy(29), device_name(0x5000cca2525e263f) [ 10.662980] scsi 1:0:31:0: enclosure logical id(0x5000ccab04037180), slot(38) [ 10.670197] scsi 1:0:31:0: enclosure level(0x0000), connector name( C3 ) [ 10.677002] scsi 1:0:31:0: serial_number( 7SHNT4GW) [ 10.682491] scsi 1:0:31:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.705041] mpt3sas_cm0: detecting: handle(0x007b), sas_address(0x5000cca2525f6082), phy(30) [ 10.713475] mpt3sas_cm0: REPORT_LUNS: handle(0x007b), retries(0) [ 10.723431] mpt3sas_cm0: TEST_UNIT_READY: handle(0x007b), lun(0) [ 10.732342] scsi 1:0:32:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.740647] scsi 1:0:32:0: SSP: handle(0x007b), sas_addr(0x5000cca2525f6082), phy(30), device_name(0x5000cca2525f6083) [ 10.751329] scsi 1:0:32:0: enclosure logical id(0x5000ccab04037180), slot(39) [ 10.758548] scsi 1:0:32:0: enclosure level(0x0000), connector name( C3 ) [ 10.765354] scsi 1:0:32:0: serial_number( 7SHPG28W) [ 10.770840] scsi 1:0:32:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.795056] mpt3sas_cm0: detecting: handle(0x007c), sas_address(0x5000cca2525ec83e), phy(31) [ 10.803490] mpt3sas_cm0: REPORT_LUNS: handle(0x007c), retries(0) [ 10.809653] mpt3sas_cm0: TEST_UNIT_READY: handle(0x007c), lun(0) [ 10.816252] scsi 1:0:33:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.824552] scsi 1:0:33:0: SSP: handle(0x007c), sas_addr(0x5000cca2525ec83e), phy(31), device_name(0x5000cca2525ec83f) [ 10.835241] scsi 1:0:33:0: enclosure logical id(0x5000ccab04037180), slot(40) [ 10.842461] scsi 1:0:33:0: enclosure level(0x0000), connector name( C3 ) [ 10.849265] scsi 1:0:33:0: serial_number( 7SHP3XXW) [ 10.854753] scsi 1:0:33:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.877045] mpt3sas_cm0: detecting: handle(0x007d), sas_address(0x5000cca2525ec01a), phy(32) [ 10.885479] mpt3sas_cm0: REPORT_LUNS: handle(0x007d), retries(0) [ 10.891641] mpt3sas_cm0: TEST_UNIT_READY: handle(0x007d), lun(0) [ 10.898245] scsi 1:0:34:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.906541] scsi 1:0:34:0: SSP: handle(0x007d), sas_addr(0x5000cca2525ec01a), phy(32), device_name(0x5000cca2525ec01b) [ 10.917231] scsi 1:0:34:0: enclosure logical id(0x5000ccab04037180), slot(41) [ 10.924450] scsi 1:0:34:0: enclosure level(0x0000), connector name( C3 ) [ 10.931254] scsi 1:0:34:0: serial_number( 7SHP3D3W) [ 10.936743] scsi 1:0:34:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 10.963054] mpt3sas_cm0: detecting: handle(0x007e), sas_address(0x5000cca2525ec55a), phy(33) [ 10.971491] mpt3sas_cm0: REPORT_LUNS: handle(0x007e), retries(0) [ 10.977824] mpt3sas_cm0: TEST_UNIT_READY: handle(0x007e), lun(0) [ 10.985415] scsi 1:0:35:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 10.993749] scsi 1:0:35:0: SSP: handle(0x007e), sas_addr(0x5000cca2525ec55a), phy(33), device_name(0x5000cca2525ec55b) [ 11.004436] scsi 1:0:35:0: enclosure logical id(0x5000ccab04037180), slot(42) [ 11.011656] scsi 1:0:35:0: enclosure level(0x0000), connector name( C3 ) [ 11.018461] scsi 1:0:35:0: serial_number( 7SHP3RYW) [ 11.023948] scsi 1:0:35:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.044054] mpt3sas_cm0: detecting: handle(0x007f), sas_address(0x5000cca2525fd4a2), phy(34) [ 11.052493] mpt3sas_cm0: REPORT_LUNS: handle(0x007f), retries(0) [ 11.058632] mpt3sas_cm0: TEST_UNIT_READY: handle(0x007f), lun(0) [ 11.065390] scsi 1:0:36:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.080944] scsi 1:0:36:0: SSP: handle(0x007f), sas_addr(0x5000cca2525fd4a2), phy(34), device_name(0x5000cca2525fd4a3) [ 11.091634] scsi 1:0:36:0: enclosure logical id(0x5000ccab04037180), slot(43) [ 11.098853] scsi 1:0:36:0: enclosure level(0x0000), connector name( C3 ) [ 11.105661] scsi 1:0:36:0: serial_number( 7SHPPU0W) [ 11.111145] scsi 1:0:36:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.131054] mpt3sas_cm0: detecting: handle(0x0080), sas_address(0x5000cca2525eb5f6), phy(35) [ 11.139488] mpt3sas_cm0: REPORT_LUNS: handle(0x0080), retries(0) [ 11.145650] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0080), lun(0) [ 11.152338] scsi 1:0:37:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.160646] scsi 1:0:37:0: SSP: handle(0x0080), sas_addr(0x5000cca2525eb5f6), phy(35), device_name(0x5000cca2525eb5f7) [ 11.171335] scsi 1:0:37:0: enclosure logical id(0x5000ccab04037180), slot(44) [ 11.178553] scsi 1:0:37:0: enclosure level(0x0000), connector name( C3 ) [ 11.185358] scsi 1:0:37:0: serial_number( 7SHP2R5W) [ 11.190847] scsi 1:0:37:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.211056] mpt3sas_cm0: detecting: handle(0x0081), sas_address(0x5000cca2525ebeb2), phy(36) [ 11.219496] mpt3sas_cm0: REPORT_LUNS: handle(0x0081), retries(0) [ 11.225633] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0081), lun(0) [ 11.232233] scsi 1:0:38:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.240537] scsi 1:0:38:0: SSP: handle(0x0081), sas_addr(0x5000cca2525ebeb2), phy(36), device_name(0x5000cca2525ebeb3) [ 11.251227] scsi 1:0:38:0: enclosure logical id(0x5000ccab04037180), slot(45) [ 11.258446] scsi 1:0:38:0: enclosure level(0x0000), connector name( C3 ) [ 11.265250] scsi 1:0:38:0: serial_number( 7SHP396W) [ 11.270739] scsi 1:0:38:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.293054] mpt3sas_cm0: detecting: handle(0x0082), sas_address(0x5000cca2525f291a), phy(37) [ 11.301491] mpt3sas_cm0: REPORT_LUNS: handle(0x0082), retries(0) [ 11.307654] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0082), lun(0) [ 11.314407] scsi 1:0:39:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.322746] scsi 1:0:39:0: SSP: handle(0x0082), sas_addr(0x5000cca2525f291a), phy(37), device_name(0x5000cca2525f291b) [ 11.333433] scsi 1:0:39:0: enclosure logical id(0x5000ccab04037180), slot(46) [ 11.340652] scsi 1:0:39:0: enclosure level(0x0000), connector name( C3 ) [ 11.347457] scsi 1:0:39:0: serial_number( 7SHPABWW) [ 11.352944] scsi 1:0:39:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.373069] mpt3sas_cm0: detecting: handle(0x0083), sas_address(0x5000cca252602c0e), phy(38) [ 11.381503] mpt3sas_cm0: REPORT_LUNS: handle(0x0083), retries(0) [ 11.387645] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0083), lun(0) [ 11.394246] scsi 1:0:40:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.402548] scsi 1:0:40:0: SSP: handle(0x0083), sas_addr(0x5000cca252602c0e), phy(38), device_name(0x5000cca252602c0f) [ 11.413235] scsi 1:0:40:0: enclosure logical id(0x5000ccab04037180), slot(47) [ 11.420455] scsi 1:0:40:0: enclosure level(0x0000), connector name( C3 ) [ 11.427259] scsi 1:0:40:0: serial_number( 7SHPWMHW) [ 11.432748] scsi 1:0:40:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.455071] mpt3sas_cm0: detecting: handle(0x0084), sas_address(0x5000cca2525e7cfe), phy(39) [ 11.463507] mpt3sas_cm0: REPORT_LUNS: handle(0x0084), retries(0) [ 11.469641] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0084), lun(0) [ 11.476236] scsi 1:0:41:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.484533] scsi 1:0:41:0: SSP: handle(0x0084), sas_addr(0x5000cca2525e7cfe), phy(39), device_name(0x5000cca2525e7cff) [ 11.495217] scsi 1:0:41:0: enclosure logical id(0x5000ccab04037180), slot(48) [ 11.502437] scsi 1:0:41:0: enclosure level(0x0000), connector name( C3 ) [ 11.509241] scsi 1:0:41:0: serial_number( 7SHNYXKW) [ 11.514729] scsi 1:0:41:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.537061] mpt3sas_cm0: detecting: handle(0x0085), sas_address(0x5000cca2525f6a32), phy(40) [ 11.545498] mpt3sas_cm0: REPORT_LUNS: handle(0x0085), retries(0) [ 11.551973] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0085), lun(0) [ 11.570231] scsi 1:0:42:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.578526] scsi 1:0:42:0: SSP: handle(0x0085), sas_addr(0x5000cca2525f6a32), phy(40), device_name(0x5000cca2525f6a33) [ 11.589208] scsi 1:0:42:0: enclosure logical id(0x5000ccab04037180), slot(49) [ 11.596427] scsi 1:0:42:0: enclosure level(0x0000), connector name( C3 ) [ 11.603232] scsi 1:0:42:0: serial_number( 7SHPGR8W) [ 11.608721] scsi 1:0:42:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.631070] mpt3sas_cm0: detecting: handle(0x0086), sas_address(0x5000cca2525f7f26), phy(41) [ 11.639507] mpt3sas_cm0: REPORT_LUNS: handle(0x0086), retries(0) [ 11.645641] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0086), lun(0) [ 11.652244] scsi 1:0:43:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.660565] scsi 1:0:43:0: SSP: handle(0x0086), sas_addr(0x5000cca2525f7f26), phy(41), device_name(0x5000cca2525f7f27) [ 11.671249] scsi 1:0:43:0: enclosure logical id(0x5000ccab04037180), slot(50) [ 11.678469] scsi 1:0:43:0: enclosure level(0x0000), connector name( C3 ) [ 11.685275] scsi 1:0:43:0: serial_number( 7SHPJ3JW) [ 11.690762] scsi 1:0:43:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.711072] mpt3sas_cm0: detecting: handle(0x0087), sas_address(0x5000cca2525eb4b2), phy(42) [ 11.719510] mpt3sas_cm0: REPORT_LUNS: handle(0x0087), retries(0) [ 11.725669] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0087), lun(0) [ 11.732371] scsi 1:0:44:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.740677] scsi 1:0:44:0: SSP: handle(0x0087), sas_addr(0x5000cca2525eb4b2), phy(42), device_name(0x5000cca2525eb4b3) [ 11.751366] scsi 1:0:44:0: enclosure logical id(0x5000ccab04037180), slot(51) [ 11.758586] scsi 1:0:44:0: enclosure level(0x0000), connector name( C3 ) [ 11.765391] scsi 1:0:44:0: serial_number( 7SHP2MKW) [ 11.770877] scsi 1:0:44:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.791067] mpt3sas_cm0: detecting: handle(0x0088), sas_address(0x5000cca2525e1f9e), phy(43) [ 11.799508] mpt3sas_cm0: REPORT_LUNS: handle(0x0088), retries(0) [ 11.805646] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0088), lun(0) [ 11.812246] scsi 1:0:45:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.820543] scsi 1:0:45:0: SSP: handle(0x0088), sas_addr(0x5000cca2525e1f9e), phy(43), device_name(0x5000cca2525e1f9f) [ 11.831230] scsi 1:0:45:0: enclosure logical id(0x5000ccab04037180), slot(52) [ 11.838450] scsi 1:0:45:0: enclosure level(0x0000), connector name( C3 ) [ 11.845255] scsi 1:0:45:0: serial_number( 7SHNSPTW) [ 11.850744] scsi 1:0:45:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.871662] mpt3sas_cm0: detecting: handle(0x0089), sas_address(0x5000cca2525e52fe), phy(44) [ 11.880100] mpt3sas_cm0: REPORT_LUNS: handle(0x0089), retries(0) [ 11.886256] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0089), lun(0) [ 11.892848] scsi 1:0:46:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.901148] scsi 1:0:46:0: SSP: handle(0x0089), sas_addr(0x5000cca2525e52fe), phy(44), device_name(0x5000cca2525e52ff) [ 11.911833] scsi 1:0:46:0: enclosure logical id(0x5000ccab04037180), slot(53) [ 11.919052] scsi 1:0:46:0: enclosure level(0x0000), connector name( C3 ) [ 11.925860] scsi 1:0:46:0: serial_number( 7SHNW3VW) [ 11.931346] scsi 1:0:46:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 11.951075] mpt3sas_cm0: detecting: handle(0x008a), sas_address(0x5000cca2525f4e72), phy(45) [ 11.959520] mpt3sas_cm0: REPORT_LUNS: handle(0x008a), retries(0) [ 11.965681] mpt3sas_cm0: TEST_UNIT_READY: handle(0x008a), lun(0) [ 11.972270] scsi 1:0:47:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 11.980568] scsi 1:0:47:0: SSP: handle(0x008a), sas_addr(0x5000cca2525f4e72), phy(45), device_name(0x5000cca2525f4e73) [ 11.991258] scsi 1:0:47:0: enclosure logical id(0x5000ccab04037180), slot(54) [ 11.998477] scsi 1:0:47:0: enclosure level(0x0000), connector name( C3 ) [ 12.005281] scsi 1:0:47:0: serial_number( 7SHPDVZW) [ 12.010767] scsi 1:0:47:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.033091] mpt3sas_cm0: detecting: handle(0x008b), sas_address(0x5000cca2525fd49a), phy(46) [ 12.041529] mpt3sas_cm0: REPORT_LUNS: handle(0x008b), retries(0) [ 12.047688] mpt3sas_cm0: TEST_UNIT_READY: handle(0x008b), lun(0) [ 12.054320] scsi 1:0:48:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.062620] scsi 1:0:48:0: SSP: handle(0x008b), sas_addr(0x5000cca2525fd49a), phy(46), device_name(0x5000cca2525fd49b) [ 12.073307] scsi 1:0:48:0: enclosure logical id(0x5000ccab04037180), slot(55) [ 12.080527] scsi 1:0:48:0: enclosure level(0x0000), connector name( C3 ) [ 12.087331] scsi 1:0:48:0: serial_number( 7SHPPTYW) [ 12.092817] scsi 1:0:48:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.115074] mpt3sas_cm0: detecting: handle(0x008c), sas_address(0x5000cca2525e787a), phy(47) [ 12.123509] mpt3sas_cm0: REPORT_LUNS: handle(0x008c), retries(0) [ 12.129669] mpt3sas_cm0: TEST_UNIT_READY: handle(0x008c), lun(0) [ 12.136257] scsi 1:0:49:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.144554] scsi 1:0:49:0: SSP: handle(0x008c), sas_addr(0x5000cca2525e787a), phy(47), device_name(0x5000cca2525e787b) [ 12.155244] scsi 1:0:49:0: enclosure logical id(0x5000ccab04037180), slot(56) [ 12.162463] scsi 1:0:49:0: enclosure level(0x0000), connector name( C3 ) [ 12.169269] scsi 1:0:49:0: serial_number( 7SHNYM7W) [ 12.174756] scsi 1:0:49:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.202979] mpt3sas_cm0: detecting: handle(0x008d), sas_address(0x5000cca2525ca19a), phy(48) [ 12.211416] mpt3sas_cm0: REPORT_LUNS: handle(0x008d), retries(0) [ 12.217550] mpt3sas_cm0: TEST_UNIT_READY: handle(0x008d), lun(0) [ 12.273401] scsi 1:0:50:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.281695] scsi 1:0:50:0: SSP: handle(0x008d), sas_addr(0x5000cca2525ca19a), phy(48), device_name(0x5000cca2525ca19b) [ 12.292379] scsi 1:0:50:0: enclosure logical id(0x5000ccab04037180), slot(57) [ 12.299598] scsi 1:0:50:0: enclosure level(0x0000), connector name( C3 ) [ 12.306389] scsi 1:0:50:0: serial_number( 7SHMY83W) [ 12.311872] scsi 1:0:50:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.349544] mpt3sas_cm0: detecting: handle(0x008e), sas_address(0x5000cca2525ffb8a), phy(49) [ 12.357984] mpt3sas_cm0: REPORT_LUNS: handle(0x008e), retries(0) [ 12.364163] mpt3sas_cm0: TEST_UNIT_READY: handle(0x008e), lun(0) [ 12.371027] scsi 1:0:51:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.379344] scsi 1:0:51:0: SSP: handle(0x008e), sas_addr(0x5000cca2525ffb8a), phy(49), device_name(0x5000cca2525ffb8b) [ 12.390029] scsi 1:0:51:0: enclosure logical id(0x5000ccab04037180), slot(58) [ 12.397248] scsi 1:0:51:0: enclosure level(0x0000), connector name( C3 ) [ 12.404052] scsi 1:0:51:0: serial_number( 7SHPTDAW) [ 12.409541] scsi 1:0:51:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.432103] mpt3sas_cm0: detecting: handle(0x008f), sas_address(0x5000cca2525f266a), phy(50) [ 12.440542] mpt3sas_cm0: REPORT_LUNS: handle(0x008f), retries(0) [ 12.446685] mpt3sas_cm0: TEST_UNIT_READY: handle(0x008f), lun(0) [ 12.453503] scsi 1:0:52:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.461817] scsi 1:0:52:0: SSP: handle(0x008f), sas_addr(0x5000cca2525f266a), phy(50), device_name(0x5000cca2525f266b) [ 12.472502] scsi 1:0:52:0: enclosure logical id(0x5000ccab04037180), slot(59) [ 12.479722] scsi 1:0:52:0: enclosure level(0x0000), connector name( C3 ) [ 12.486527] scsi 1:0:52:0: serial_number( 7SHPA6AW) [ 12.492015] scsi 1:0:52:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.535558] mpt3sas_cm0: expander_add: handle(0x005b), parent(0x0058), sas_addr(0x5000ccab040371fb), phys(68) [ 12.554902] mpt3sas_cm0: detecting: handle(0x0090), sas_address(0x5000cca2525eacc2), phy(42) [ 12.563352] mpt3sas_cm0: REPORT_LUNS: handle(0x0090), retries(0) [ 12.569509] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0090), lun(0) [ 12.576175] scsi 1:0:53:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.584481] scsi 1:0:53:0: SSP: handle(0x0090), sas_addr(0x5000cca2525eacc2), phy(42), device_name(0x5000cca2525eacc3) [ 12.595166] scsi 1:0:53:0: enclosure logical id(0x5000ccab04037180), slot(1) [ 12.602298] scsi 1:0:53:0: enclosure level(0x0000), connector name( C3 ) [ 12.609101] scsi 1:0:53:0: serial_number( 7SHP235W) [ 12.614590] scsi 1:0:53:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.637095] mpt3sas_cm0: detecting: handle(0x0091), sas_address(0x5000cca2525f8152), phy(43) [ 12.645532] mpt3sas_cm0: REPORT_LUNS: handle(0x0091), retries(0) [ 12.651665] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0091), lun(0) [ 12.658264] scsi 1:0:54:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.666562] scsi 1:0:54:0: SSP: handle(0x0091), sas_addr(0x5000cca2525f8152), phy(43), device_name(0x5000cca2525f8153) [ 12.677249] scsi 1:0:54:0: enclosure logical id(0x5000ccab04037180), slot(3) [ 12.684381] scsi 1:0:54:0: enclosure level(0x0000), connector name( C3 ) [ 12.691185] scsi 1:0:54:0: serial_number( 7SHPJ80W) [ 12.696673] scsi 1:0:54:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.719681] mpt3sas_cm0: detecting: handle(0x0092), sas_address(0x5000cca2525ef83a), phy(44) [ 12.728119] mpt3sas_cm0: REPORT_LUNS: handle(0x0092), retries(0) [ 12.734251] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0092), lun(0) [ 12.740857] scsi 1:0:55:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.749158] scsi 1:0:55:0: SSP: handle(0x0092), sas_addr(0x5000cca2525ef83a), phy(44), device_name(0x5000cca2525ef83b) [ 12.759845] scsi 1:0:55:0: enclosure logical id(0x5000ccab04037180), slot(4) [ 12.766979] scsi 1:0:55:0: enclosure level(0x0000), connector name( C3 ) [ 12.773786] scsi 1:0:55:0: serial_number( 7SHP73ZW) [ 12.779270] scsi 1:0:55:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.799089] mpt3sas_cm0: detecting: handle(0x0093), sas_address(0x5000cca2525e72aa), phy(45) [ 12.807527] mpt3sas_cm0: REPORT_LUNS: handle(0x0093), retries(0) [ 12.813698] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0093), lun(0) [ 12.820484] scsi 1:0:56:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.829509] scsi 1:0:56:0: SSP: handle(0x0093), sas_addr(0x5000cca2525e72aa), phy(45), device_name(0x5000cca2525e72ab) [ 12.840196] scsi 1:0:56:0: enclosure logical id(0x5000ccab04037180), slot(5) [ 12.847329] scsi 1:0:56:0: enclosure level(0x0000), connector name( C3 ) [ 12.854134] scsi 1:0:56:0: serial_number( 7SHNY77W) [ 12.859621] scsi 1:0:56:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.882095] mpt3sas_cm0: detecting: handle(0x0094), sas_address(0x5000cca2525d3c8a), phy(46) [ 12.890532] mpt3sas_cm0: REPORT_LUNS: handle(0x0094), retries(0) [ 12.896670] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0094), lun(0) [ 12.903291] scsi 1:0:57:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.911597] scsi 1:0:57:0: SSP: handle(0x0094), sas_addr(0x5000cca2525d3c8a), phy(46), device_name(0x5000cca2525d3c8b) [ 12.922281] scsi 1:0:57:0: enclosure logical id(0x5000ccab04037180), slot(6) [ 12.929413] scsi 1:0:57:0: enclosure level(0x0000), connector name( C3 ) [ 12.936216] scsi 1:0:57:0: serial_number( 7SHN8KZW) [ 12.941705] scsi 1:0:57:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 12.964105] mpt3sas_cm0: detecting: handle(0x0095), sas_address(0x5000cca2525fae0e), phy(47) [ 12.972543] mpt3sas_cm0: REPORT_LUNS: handle(0x0095), retries(0) [ 12.978676] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0095), lun(0) [ 12.985254] scsi 1:0:58:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 12.993553] scsi 1:0:58:0: SSP: handle(0x0095), sas_addr(0x5000cca2525fae0e), phy(47), device_name(0x5000cca2525fae0f) [ 13.004242] scsi 1:0:58:0: enclosure logical id(0x5000ccab04037180), slot(7) [ 13.011375] scsi 1:0:58:0: enclosure level(0x0000), connector name( C3 ) [ 13.018179] scsi 1:0:58:0: serial_number( 7SHPM7BW) [ 13.023667] scsi 1:0:58:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.049097] mpt3sas_cm0: detecting: handle(0x0096), sas_address(0x5000cca2525efdae), phy(48) [ 13.057532] mpt3sas_cm0: REPORT_LUNS: handle(0x0096), retries(0) [ 13.063687] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0096), lun(0) [ 13.073156] scsi 1:0:59:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.083763] scsi 1:0:59:0: SSP: handle(0x0096), sas_addr(0x5000cca2525efdae), phy(48), device_name(0x5000cca2525efdaf) [ 13.094448] scsi 1:0:59:0: enclosure logical id(0x5000ccab04037180), slot(8) [ 13.101580] scsi 1:0:59:0: enclosure level(0x0000), connector name( C3 ) [ 13.108385] scsi 1:0:59:0: serial_number( 7SHP7H7W) [ 13.113871] scsi 1:0:59:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.228484] mpt3sas_cm0: detecting: handle(0x0097), sas_address(0x5000cca2525fa302), phy(49) [ 13.236918] mpt3sas_cm0: REPORT_LUNS: handle(0x0097), retries(0) [ 13.243322] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0097), lun(0) [ 13.256804] scsi 1:0:60:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.265098] scsi 1:0:60:0: SSP: handle(0x0097), sas_addr(0x5000cca2525fa302), phy(49), device_name(0x5000cca2525fa303) [ 13.275785] scsi 1:0:60:0: enclosure logical id(0x5000ccab04037180), slot(9) [ 13.282917] scsi 1:0:60:0: enclosure level(0x0000), connector name( C3 ) [ 13.289707] scsi 1:0:60:0: serial_number( 7SHPLHKW) [ 13.295191] scsi 1:0:60:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.316118] mpt3sas_cm0: detecting: handle(0x0098), sas_address(0x5000cca2525fb4be), phy(50) [ 13.324558] mpt3sas_cm0: REPORT_LUNS: handle(0x0098), retries(0) [ 13.330725] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0098), lun(0) [ 13.337397] scsi 1:0:61:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.345697] scsi 1:0:61:0: SSP: handle(0x0098), sas_addr(0x5000cca2525fb4be), phy(50), device_name(0x5000cca2525fb4bf) [ 13.356387] scsi 1:0:61:0: enclosure logical id(0x5000ccab04037180), slot(10) [ 13.363607] scsi 1:0:61:0: enclosure level(0x0000), connector name( C3 ) [ 13.370410] scsi 1:0:61:0: serial_number( 7SHPMP5W) [ 13.375900] scsi 1:0:61:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.398625] mpt3sas_cm0: expander_add: handle(0x00da), parent(0x0002), sas_addr(0x5000ccab040371bd), phys(49) [ 13.419014] mpt3sas_cm0: detecting: handle(0x00de), sas_address(0x5000ccab040371bc), phy(48) [ 13.427450] mpt3sas_cm0: REPORT_LUNS: handle(0x00de), retries(0) [ 13.435324] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00de), lun(0) [ 13.442184] scsi 1:0:62:0: Enclosure HGST H4060-J 2033 PQ: 0 ANSI: 6 [ 13.450673] scsi 1:0:62:0: set ignore_delay_remove for handle(0x00de) [ 13.457114] scsi 1:0:62:0: SES: handle(0x00de), sas_addr(0x5000ccab040371bc), phy(48), device_name(0x0000000000000000) [ 13.467800] scsi 1:0:62:0: enclosure logical id(0x5000ccab04037180), slot(60) [ 13.475018] scsi 1:0:62:0: enclosure level(0x0000), connector name( C2 ) [ 13.481822] scsi 1:0:62:0: serial_number(USWSJ03918EZ0028 ) [ 13.487659] scsi 1:0:62:0: qdepth(1), tagged(0), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.513530] mpt3sas_cm0: expander_add: handle(0x00dc), parent(0x00da), sas_addr(0x5000ccab040371bf), phys(68) [ 13.534517] mpt3sas_cm0: detecting: handle(0x00df), sas_address(0x5000cca2525f2a25), phy(0) [ 13.542870] mpt3sas_cm0: REPORT_LUNS: handle(0x00df), retries(0) [ 13.548998] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00df), lun(0) [ 13.555809] scsi 1:0:63:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.564123] scsi 1:0:63:0: SSP: handle(0x00df), sas_addr(0x5000cca2525f2a25), phy(0), device_name(0x5000cca2525f2a27) [ 13.574724] scsi 1:0:63:0: enclosure logical id(0x5000ccab04037180), slot(0) [ 13.581855] scsi 1:0:63:0: enclosure level(0x0000), connector name( C2 ) [ 13.588663] scsi 1:0:63:0: serial_number( 7SHPAG1W) [ 13.594148] scsi 1:0:63:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.614122] mpt3sas_cm0: detecting: handle(0x00e0), sas_address(0x5000cca2525e977d), phy(1) [ 13.622472] mpt3sas_cm0: REPORT_LUNS: handle(0x00e0), retries(0) [ 13.628632] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e0), lun(0) [ 13.655932] scsi 1:0:64:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.664219] scsi 1:0:64:0: SSP: handle(0x00e0), sas_addr(0x5000cca2525e977d), phy(1), device_name(0x5000cca2525e977f) [ 13.674816] scsi 1:0:64:0: enclosure logical id(0x5000ccab04037180), slot(2) [ 13.681949] scsi 1:0:64:0: enclosure level(0x0000), connector name( C2 ) [ 13.688738] scsi 1:0:64:0: serial_number( 7SHP0P8W) [ 13.694223] scsi 1:0:64:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.714119] mpt3sas_cm0: detecting: handle(0x00e1), sas_address(0x5000cca2525ed2bd), phy(2) [ 13.722472] mpt3sas_cm0: REPORT_LUNS: handle(0x00e1), retries(0) [ 13.728617] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e1), lun(0) [ 13.735337] scsi 1:0:65:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.743641] scsi 1:0:65:0: SSP: handle(0x00e1), sas_addr(0x5000cca2525ed2bd), phy(2), device_name(0x5000cca2525ed2bf) [ 13.754241] scsi 1:0:65:0: enclosure logical id(0x5000ccab04037180), slot(11) [ 13.761461] scsi 1:0:65:0: enclosure level(0x0000), connector name( C2 ) [ 13.768264] scsi 1:0:65:0: serial_number( 7SHP4MLW) [ 13.773754] scsi 1:0:65:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.796138] mpt3sas_cm0: detecting: handle(0x00e2), sas_address(0x5000cca2525ec049), phy(3) [ 13.804485] mpt3sas_cm0: REPORT_LUNS: handle(0x00e2), retries(0) [ 13.810617] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e2), lun(0) [ 13.817238] scsi 1:0:66:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.825540] scsi 1:0:66:0: SSP: handle(0x00e2), sas_addr(0x5000cca2525ec049), phy(3), device_name(0x5000cca2525ec04b) [ 13.836142] scsi 1:0:66:0: enclosure logical id(0x5000ccab04037180), slot(12) [ 13.843361] scsi 1:0:66:0: enclosure level(0x0000), connector name( C2 ) [ 13.850166] scsi 1:0:66:0: serial_number( 7SHP3DHW) [ 13.855653] scsi 1:0:66:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.878127] mpt3sas_cm0: detecting: handle(0x00e3), sas_address(0x5000cca2525ff611), phy(4) [ 13.886474] mpt3sas_cm0: REPORT_LUNS: handle(0x00e3), retries(0) [ 13.892637] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e3), lun(0) [ 13.899409] scsi 1:0:67:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 13.907720] scsi 1:0:67:0: SSP: handle(0x00e3), sas_addr(0x5000cca2525ff611), phy(4), device_name(0x5000cca2525ff613) [ 13.918320] scsi 1:0:67:0: enclosure logical id(0x5000ccab04037180), slot(13) [ 13.925540] scsi 1:0:67:0: enclosure level(0x0000), connector name( C2 ) [ 13.932344] scsi 1:0:67:0: serial_number( 7SHPT11W) [ 13.937833] scsi 1:0:67:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 13.959124] mpt3sas_cm0: detecting: handle(0x00e4), sas_address(0x5000cca2526016ed), phy(5) [ 13.967477] mpt3sas_cm0: REPORT_LUNS: handle(0x00e4), retries(0) [ 13.973660] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e4), lun(0) [ 14.005489] scsi 1:0:68:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.013799] scsi 1:0:68:0: SSP: handle(0x00e4), sas_addr(0x5000cca2526016ed), phy(5), device_name(0x5000cca2526016ef) [ 14.024396] scsi 1:0:68:0: enclosure logical id(0x5000ccab04037180), slot(14) [ 14.031616] scsi 1:0:68:0: enclosure level(0x0000), connector name( C2 ) [ 14.038407] scsi 1:0:68:0: serial_number( 7SHPV6WW) [ 14.043899] scsi 1:0:68:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.064130] mpt3sas_cm0: detecting: handle(0x00e5), sas_address(0x5000cca2525f4871), phy(6) [ 14.072485] mpt3sas_cm0: REPORT_LUNS: handle(0x00e5), retries(0) [ 14.078627] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e5), lun(0) [ 14.085235] scsi 1:0:69:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.093540] scsi 1:0:69:0: SSP: handle(0x00e5), sas_addr(0x5000cca2525f4871), phy(6), device_name(0x5000cca2525f4873) [ 14.104140] scsi 1:0:69:0: enclosure logical id(0x5000ccab04037180), slot(15) [ 14.111360] scsi 1:0:69:0: enclosure level(0x0000), connector name( C2 ) [ 14.118161] scsi 1:0:69:0: serial_number( 7SHPDGLW) [ 14.123650] scsi 1:0:69:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.146130] mpt3sas_cm0: detecting: handle(0x00e6), sas_address(0x5000cca2525f568d), phy(7) [ 14.154483] mpt3sas_cm0: REPORT_LUNS: handle(0x00e6), retries(0) [ 14.160652] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e6), lun(0) [ 14.167273] scsi 1:0:70:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.175580] scsi 1:0:70:0: SSP: handle(0x00e6), sas_addr(0x5000cca2525f568d), phy(7), device_name(0x5000cca2525f568f) [ 14.186180] scsi 1:0:70:0: enclosure logical id(0x5000ccab04037180), slot(16) [ 14.193399] scsi 1:0:70:0: enclosure level(0x0000), connector name( C2 ) [ 14.200206] scsi 1:0:70:0: serial_number( 7SHPEDRW) [ 14.205692] scsi 1:0:70:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.228132] mpt3sas_cm0: detecting: handle(0x00e7), sas_address(0x5000cca2525f6c25), phy(8) [ 14.236481] mpt3sas_cm0: REPORT_LUNS: handle(0x00e7), retries(0) [ 14.242622] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e7), lun(0) [ 14.254153] scsi 1:0:71:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.262443] scsi 1:0:71:0: SSP: handle(0x00e7), sas_addr(0x5000cca2525f6c25), phy(8), device_name(0x5000cca2525f6c27) [ 14.273040] scsi 1:0:71:0: enclosure logical id(0x5000ccab04037180), slot(17) [ 14.280259] scsi 1:0:71:0: enclosure level(0x0000), connector name( C2 ) [ 14.287050] scsi 1:0:71:0: serial_number( 7SHPGV9W) [ 14.292533] scsi 1:0:71:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.315135] mpt3sas_cm0: detecting: handle(0x00e8), sas_address(0x5000cca2525ed401), phy(9) [ 14.323488] mpt3sas_cm0: REPORT_LUNS: handle(0x00e8), retries(0) [ 14.329628] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e8), lun(0) [ 14.336258] scsi 1:0:72:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.344556] scsi 1:0:72:0: SSP: handle(0x00e8), sas_addr(0x5000cca2525ed401), phy(9), device_name(0x5000cca2525ed403) [ 14.355158] scsi 1:0:72:0: enclosure logical id(0x5000ccab04037180), slot(18) [ 14.362378] scsi 1:0:72:0: enclosure level(0x0000), connector name( C2 ) [ 14.369182] scsi 1:0:72:0: serial_number( 7SHP4R6W) [ 14.374671] scsi 1:0:72:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.397136] mpt3sas_cm0: detecting: handle(0x00e9), sas_address(0x5000cca2525e0405), phy(10) [ 14.405570] mpt3sas_cm0: REPORT_LUNS: handle(0x00e9), retries(0) [ 14.411737] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00e9), lun(0) [ 14.418352] scsi 1:0:73:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.426667] scsi 1:0:73:0: SSP: handle(0x00e9), sas_addr(0x5000cca2525e0405), phy(10), device_name(0x5000cca2525e0407) [ 14.437355] scsi 1:0:73:0: enclosure logical id(0x5000ccab04037180), slot(19) [ 14.444575] scsi 1:0:73:0: enclosure level(0x0000), connector name( C2 ) [ 14.451380] scsi 1:0:73:0: serial_number( 7SHNPVUW) [ 14.456868] scsi 1:0:73:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.478140] mpt3sas_cm0: detecting: handle(0x00ea), sas_address(0x5000cca2525ea9e5), phy(11) [ 14.486577] mpt3sas_cm0: REPORT_LUNS: handle(0x00ea), retries(0) [ 14.493255] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ea), lun(0) [ 14.499876] scsi 1:0:74:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.508186] scsi 1:0:74:0: SSP: handle(0x00ea), sas_addr(0x5000cca2525ea9e5), phy(11), device_name(0x5000cca2525ea9e7) [ 14.518875] scsi 1:0:74:0: enclosure logical id(0x5000ccab04037180), slot(20) [ 14.526095] scsi 1:0:74:0: enclosure level(0x0000), connector name( C2 ) [ 14.532904] scsi 1:0:74:0: serial_number( 7SHP1X8W) [ 14.538397] scsi 1:0:74:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.558139] mpt3sas_cm0: detecting: handle(0x00eb), sas_address(0x5000cca2525f1d39), phy(12) [ 14.566584] mpt3sas_cm0: REPORT_LUNS: handle(0x00eb), retries(0) [ 14.572745] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00eb), lun(0) [ 14.579363] scsi 1:0:75:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.587665] scsi 1:0:75:0: SSP: handle(0x00eb), sas_addr(0x5000cca2525f1d39), phy(12), device_name(0x5000cca2525f1d3b) [ 14.598352] scsi 1:0:75:0: enclosure logical id(0x5000ccab04037180), slot(21) [ 14.605569] scsi 1:0:75:0: enclosure level(0x0000), connector name( C2 ) [ 14.612376] scsi 1:0:75:0: serial_number( 7SHP9LBW) [ 14.617862] scsi 1:0:75:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.640140] mpt3sas_cm0: detecting: handle(0x00ec), sas_address(0x5000cca2525ea499), phy(13) [ 14.648580] mpt3sas_cm0: REPORT_LUNS: handle(0x00ec), retries(0) [ 14.654746] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ec), lun(0) [ 14.661569] scsi 1:0:76:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.669878] scsi 1:0:76:0: SSP: handle(0x00ec), sas_addr(0x5000cca2525ea499), phy(13), device_name(0x5000cca2525ea49b) [ 14.680565] scsi 1:0:76:0: enclosure logical id(0x5000ccab04037180), slot(22) [ 14.687784] scsi 1:0:76:0: enclosure level(0x0000), connector name( C2 ) [ 14.694592] scsi 1:0:76:0: serial_number( 7SHP1KAW) [ 14.700079] scsi 1:0:76:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.725144] mpt3sas_cm0: detecting: handle(0x00ed), sas_address(0x5000cca2525fba05), phy(14) [ 14.733584] mpt3sas_cm0: REPORT_LUNS: handle(0x00ed), retries(0) [ 14.739718] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ed), lun(0) [ 14.789076] scsi 1:0:77:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.797376] scsi 1:0:77:0: SSP: handle(0x00ed), sas_addr(0x5000cca2525fba05), phy(14), device_name(0x5000cca2525fba07) [ 14.808063] scsi 1:0:77:0: enclosure logical id(0x5000ccab04037180), slot(23) [ 14.815283] scsi 1:0:77:0: enclosure level(0x0000), connector name( C2 ) [ 14.822073] scsi 1:0:77:0: serial_number( 7SHPN12W) [ 14.827558] scsi 1:0:77:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.856154] mpt3sas_cm0: detecting: handle(0x00ee), sas_address(0x5000cca2525e121d), phy(15) [ 14.864595] mpt3sas_cm0: REPORT_LUNS: handle(0x00ee), retries(0) [ 14.870737] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ee), lun(0) [ 14.877356] scsi 1:0:78:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.885656] scsi 1:0:78:0: SSP: handle(0x00ee), sas_addr(0x5000cca2525e121d), phy(15), device_name(0x5000cca2525e121f) [ 14.896346] scsi 1:0:78:0: enclosure logical id(0x5000ccab04037180), slot(24) [ 14.903563] scsi 1:0:78:0: enclosure level(0x0000), connector name( C2 ) [ 14.910369] scsi 1:0:78:0: serial_number( 7SHNRTXW) [ 14.915855] scsi 1:0:78:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 14.938146] mpt3sas_cm0: detecting: handle(0x00ef), sas_address(0x5000cca2525e98f5), phy(16) [ 14.946585] mpt3sas_cm0: REPORT_LUNS: handle(0x00ef), retries(0) [ 14.952725] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ef), lun(0) [ 14.959338] scsi 1:0:79:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 14.967638] scsi 1:0:79:0: SSP: handle(0x00ef), sas_addr(0x5000cca2525e98f5), phy(16), device_name(0x5000cca2525e98f7) [ 14.978326] scsi 1:0:79:0: enclosure logical id(0x5000ccab04037180), slot(25) [ 14.985543] scsi 1:0:79:0: enclosure level(0x0000), connector name( C2 ) [ 14.992349] scsi 1:0:79:0: serial_number( 7SHP0T9W) [ 14.997838] scsi 1:0:79:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.021146] mpt3sas_cm0: detecting: handle(0x00f0), sas_address(0x5000cca2525f8175), phy(17) [ 15.029585] mpt3sas_cm0: REPORT_LUNS: handle(0x00f0), retries(0) [ 15.035715] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f0), lun(0) [ 15.042329] scsi 1:0:80:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.050627] scsi 1:0:80:0: SSP: handle(0x00f0), sas_addr(0x5000cca2525f8175), phy(17), device_name(0x5000cca2525f8177) [ 15.061309] scsi 1:0:80:0: enclosure logical id(0x5000ccab04037180), slot(26) [ 15.068531] scsi 1:0:80:0: enclosure level(0x0000), connector name( C2 ) [ 15.075336] scsi 1:0:80:0: serial_number( 7SHPJ89W) [ 15.080822] scsi 1:0:80:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.103149] mpt3sas_cm0: detecting: handle(0x00f1), sas_address(0x5000cca2525fb01d), phy(18) [ 15.111581] mpt3sas_cm0: REPORT_LUNS: handle(0x00f1), retries(0) [ 15.117715] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f1), lun(0) [ 15.124332] scsi 1:0:81:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.132636] scsi 1:0:81:0: SSP: handle(0x00f1), sas_addr(0x5000cca2525fb01d), phy(18), device_name(0x5000cca2525fb01f) [ 15.143326] scsi 1:0:81:0: enclosure logical id(0x5000ccab04037180), slot(27) [ 15.150545] scsi 1:0:81:0: enclosure level(0x0000), connector name( C2 ) [ 15.157349] scsi 1:0:81:0: serial_number( 7SHPMBMW) [ 15.162835] scsi 1:0:81:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.186155] mpt3sas_cm0: detecting: handle(0x00f2), sas_address(0x5000cca2525ed549), phy(19) [ 15.194593] mpt3sas_cm0: REPORT_LUNS: handle(0x00f2), retries(0) [ 15.200727] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f2), lun(0) [ 15.207334] scsi 1:0:82:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.215630] scsi 1:0:82:0: SSP: handle(0x00f2), sas_addr(0x5000cca2525ed549), phy(19), device_name(0x5000cca2525ed54b) [ 15.226320] scsi 1:0:82:0: enclosure logical id(0x5000ccab04037180), slot(28) [ 15.233539] scsi 1:0:82:0: enclosure level(0x0000), connector name( C2 ) [ 15.240345] scsi 1:0:82:0: serial_number( 7SHP4TVW) [ 15.245831] scsi 1:0:82:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.266153] mpt3sas_cm0: detecting: handle(0x00f3), sas_address(0x5000cca2525fa035), phy(20) [ 15.274590] mpt3sas_cm0: REPORT_LUNS: handle(0x00f3), retries(0) [ 15.280733] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f3), lun(0) [ 15.287347] scsi 1:0:83:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.295655] scsi 1:0:83:0: SSP: handle(0x00f3), sas_addr(0x5000cca2525fa035), phy(20), device_name(0x5000cca2525fa037) [ 15.306340] scsi 1:0:83:0: enclosure logical id(0x5000ccab04037180), slot(29) [ 15.313560] scsi 1:0:83:0: enclosure level(0x0000), connector name( C2 ) [ 15.320366] scsi 1:0:83:0: serial_number( 7SHPL9TW) [ 15.325854] scsi 1:0:83:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.349158] mpt3sas_cm0: detecting: handle(0x00f4), sas_address(0x5000cca2525fb941), phy(21) [ 15.357593] mpt3sas_cm0: REPORT_LUNS: handle(0x00f4), retries(0) [ 15.363731] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f4), lun(0) [ 15.370345] scsi 1:0:84:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.378645] scsi 1:0:84:0: SSP: handle(0x00f4), sas_addr(0x5000cca2525fb941), phy(21), device_name(0x5000cca2525fb943) [ 15.389335] scsi 1:0:84:0: enclosure logical id(0x5000ccab04037180), slot(30) [ 15.396555] scsi 1:0:84:0: enclosure level(0x0000), connector name( C2 ) [ 15.403359] scsi 1:0:84:0: serial_number( 7SHPMZHW) [ 15.408847] scsi 1:0:84:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.429747] mpt3sas_cm0: detecting: handle(0x00f5), sas_address(0x5000cca2525e22e5), phy(22) [ 15.438185] mpt3sas_cm0: REPORT_LUNS: handle(0x00f5), retries(0) [ 15.444315] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f5), lun(0) [ 15.450935] scsi 1:0:85:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.459241] scsi 1:0:85:0: SSP: handle(0x00f5), sas_addr(0x5000cca2525e22e5), phy(22), device_name(0x5000cca2525e22e7) [ 15.469927] scsi 1:0:85:0: enclosure logical id(0x5000ccab04037180), slot(31) [ 15.477147] scsi 1:0:85:0: enclosure level(0x0000), connector name( C2 ) [ 15.483951] scsi 1:0:85:0: serial_number( 7SHNSXKW) [ 15.489442] scsi 1:0:85:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.509164] mpt3sas_cm0: detecting: handle(0x00f6), sas_address(0x5000cca2525fb5bd), phy(23) [ 15.517601] mpt3sas_cm0: REPORT_LUNS: handle(0x00f6), retries(0) [ 15.523788] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f6), lun(0) [ 15.530401] scsi 1:0:86:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.538705] scsi 1:0:86:0: SSP: handle(0x00f6), sas_addr(0x5000cca2525fb5bd), phy(23), device_name(0x5000cca2525fb5bf) [ 15.549394] scsi 1:0:86:0: enclosure logical id(0x5000ccab04037180), slot(32) [ 15.556613] scsi 1:0:86:0: enclosure level(0x0000), connector name( C2 ) [ 15.563421] scsi 1:0:86:0: serial_number( 7SHPMS7W) [ 15.568909] scsi 1:0:86:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.589160] mpt3sas_cm0: detecting: handle(0x00f7), sas_address(0x5000cca2525eb77d), phy(24) [ 15.597596] mpt3sas_cm0: REPORT_LUNS: handle(0x00f7), retries(0) [ 15.603763] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f7), lun(0) [ 15.610376] scsi 1:0:87:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.618679] scsi 1:0:87:0: SSP: handle(0x00f7), sas_addr(0x5000cca2525eb77d), phy(24), device_name(0x5000cca2525eb77f) [ 15.629363] scsi 1:0:87:0: enclosure logical id(0x5000ccab04037180), slot(33) [ 15.636582] scsi 1:0:87:0: enclosure level(0x0000), connector name( C2 ) [ 15.643386] scsi 1:0:87:0: serial_number( 7SHP2UAW) [ 15.648875] scsi 1:0:87:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.671164] mpt3sas_cm0: detecting: handle(0x00f8), sas_address(0x5000cca2525e1139), phy(25) [ 15.679601] mpt3sas_cm0: REPORT_LUNS: handle(0x00f8), retries(0) [ 15.685766] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f8), lun(0) [ 15.729390] scsi 1:0:88:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.737813] scsi 1:0:88:0: SSP: handle(0x00f8), sas_addr(0x5000cca2525e1139), phy(25), device_name(0x5000cca2525e113b) [ 15.748500] scsi 1:0:88:0: enclosure logical id(0x5000ccab04037180), slot(34) [ 15.755717] scsi 1:0:88:0: enclosure level(0x0000), connector name( C2 ) [ 15.762509] scsi 1:0:88:0: serial_number( 7SHNRS2W) [ 15.767994] scsi 1:0:88:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.788166] mpt3sas_cm0: detecting: handle(0x00f9), sas_address(0x5000cca2526014f9), phy(26) [ 15.796605] mpt3sas_cm0: REPORT_LUNS: handle(0x00f9), retries(0) [ 15.802735] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00f9), lun(0) [ 15.810510] scsi 1:0:89:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.818854] scsi 1:0:89:0: SSP: handle(0x00f9), sas_addr(0x5000cca2526014f9), phy(26), device_name(0x5000cca2526014fb) [ 15.829544] scsi 1:0:89:0: enclosure logical id(0x5000ccab04037180), slot(35) [ 15.836763] scsi 1:0:89:0: enclosure level(0x0000), connector name( C2 ) [ 15.843567] scsi 1:0:89:0: serial_number( 7SHPV2VW) [ 15.849054] scsi 1:0:89:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.873233] mpt3sas_cm0: detecting: handle(0x00fa), sas_address(0x5000cca252598785), phy(27) [ 15.881670] mpt3sas_cm0: REPORT_LUNS: handle(0x00fa), retries(0) [ 15.887834] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00fa), lun(0) [ 15.894455] scsi 1:0:90:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.902803] scsi 1:0:90:0: SSP: handle(0x00fa), sas_addr(0x5000cca252598785), phy(27), device_name(0x5000cca252598787) [ 15.913490] scsi 1:0:90:0: enclosure logical id(0x5000ccab04037180), slot(36) [ 15.920710] scsi 1:0:90:0: enclosure level(0x0000), connector name( C2 ) [ 15.927515] scsi 1:0:90:0: serial_number( 7SHL7BRW) [ 15.933003] scsi 1:0:90:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 15.953179] mpt3sas_cm0: detecting: handle(0x00fb), sas_address(0x5000cca2525f5365), phy(28) [ 15.961615] mpt3sas_cm0: REPORT_LUNS: handle(0x00fb), retries(0) [ 15.967778] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00fb), lun(0) [ 15.974542] scsi 1:0:91:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 15.982847] scsi 1:0:91:0: SSP: handle(0x00fb), sas_addr(0x5000cca2525f5365), phy(28), device_name(0x5000cca2525f5367) [ 15.993529] scsi 1:0:91:0: enclosure logical id(0x5000ccab04037180), slot(37) [ 16.000749] scsi 1:0:91:0: enclosure level(0x0000), connector name( C2 ) [ 16.007553] scsi 1:0:91:0: serial_number( 7SHPE66W) [ 16.013042] scsi 1:0:91:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.033172] mpt3sas_cm0: detecting: handle(0x00fc), sas_address(0x5000cca2525e263d), phy(29) [ 16.041609] mpt3sas_cm0: REPORT_LUNS: handle(0x00fc), retries(0) [ 16.047767] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00fc), lun(0) [ 16.054436] scsi 1:0:92:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.062741] scsi 1:0:92:0: SSP: handle(0x00fc), sas_addr(0x5000cca2525e263d), phy(29), device_name(0x5000cca2525e263f) [ 16.073428] scsi 1:0:92:0: enclosure logical id(0x5000ccab04037180), slot(38) [ 16.080647] scsi 1:0:92:0: enclosure level(0x0000), connector name( C2 ) [ 16.087455] scsi 1:0:92:0: serial_number( 7SHNT4GW) [ 16.092941] scsi 1:0:92:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.127183] mpt3sas_cm0: detecting: handle(0x00fd), sas_address(0x5000cca2525f6081), phy(30) [ 16.135618] mpt3sas_cm0: REPORT_LUNS: handle(0x00fd), retries(0) [ 16.142279] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00fd), lun(0) [ 16.186312] scsi 1:0:93:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.194635] scsi 1:0:93:0: SSP: handle(0x00fd), sas_addr(0x5000cca2525f6081), phy(30), device_name(0x5000cca2525f6083) [ 16.205323] scsi 1:0:93:0: enclosure logical id(0x5000ccab04037180), slot(39) [ 16.212540] scsi 1:0:93:0: enclosure level(0x0000), connector name( C2 ) [ 16.219331] scsi 1:0:93:0: serial_number( 7SHPG28W) [ 16.224817] scsi 1:0:93:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.253189] mpt3sas_cm0: detecting: handle(0x00fe), sas_address(0x5000cca2525ec83d), phy(31) [ 16.261629] mpt3sas_cm0: REPORT_LUNS: handle(0x00fe), retries(0) [ 16.267770] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00fe), lun(0) [ 16.274376] scsi 1:0:94:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.282685] scsi 1:0:94:0: SSP: handle(0x00fe), sas_addr(0x5000cca2525ec83d), phy(31), device_name(0x5000cca2525ec83f) [ 16.293367] scsi 1:0:94:0: enclosure logical id(0x5000ccab04037180), slot(40) [ 16.300589] scsi 1:0:94:0: enclosure level(0x0000), connector name( C2 ) [ 16.307392] scsi 1:0:94:0: serial_number( 7SHP3XXW) [ 16.312881] scsi 1:0:94:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.342179] mpt3sas_cm0: detecting: handle(0x00ff), sas_address(0x5000cca2525ec019), phy(32) [ 16.350634] mpt3sas_cm0: REPORT_LUNS: handle(0x00ff), retries(0) [ 16.356755] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ff), lun(0) [ 16.363537] scsi 1:0:95:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.372787] scsi 1:0:95:0: SSP: handle(0x00ff), sas_addr(0x5000cca2525ec019), phy(32), device_name(0x5000cca2525ec01b) [ 16.383469] scsi 1:0:95:0: enclosure logical id(0x5000ccab04037180), slot(41) [ 16.390689] scsi 1:0:95:0: enclosure level(0x0000), connector name( C2 ) [ 16.397496] scsi 1:0:95:0: serial_number( 7SHP3D3W) [ 16.402982] scsi 1:0:95:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.423183] mpt3sas_cm0: detecting: handle(0x0100), sas_address(0x5000cca2525ec559), phy(33) [ 16.431618] mpt3sas_cm0: REPORT_LUNS: handle(0x0100), retries(0) [ 16.437783] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0100), lun(0) [ 16.444556] scsi 1:0:96:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.452863] scsi 1:0:96:0: SSP: handle(0x0100), sas_addr(0x5000cca2525ec559), phy(33), device_name(0x5000cca2525ec55b) [ 16.463552] scsi 1:0:96:0: enclosure logical id(0x5000ccab04037180), slot(42) [ 16.470772] scsi 1:0:96:0: enclosure level(0x0000), connector name( C2 ) [ 16.477577] scsi 1:0:96:0: serial_number( 7SHP3RYW) [ 16.483064] scsi 1:0:96:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.503185] mpt3sas_cm0: detecting: handle(0x0101), sas_address(0x5000cca2525fd4a1), phy(34) [ 16.511624] mpt3sas_cm0: REPORT_LUNS: handle(0x0101), retries(0) [ 16.517799] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0101), lun(0) [ 16.524587] scsi 1:0:97:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.532892] scsi 1:0:97:0: SSP: handle(0x0101), sas_addr(0x5000cca2525fd4a1), phy(34), device_name(0x5000cca2525fd4a3) [ 16.543582] scsi 1:0:97:0: enclosure logical id(0x5000ccab04037180), slot(43) [ 16.550801] scsi 1:0:97:0: enclosure level(0x0000), connector name( C2 ) [ 16.557607] scsi 1:0:97:0: serial_number( 7SHPPU0W) [ 16.563099] scsi 1:0:97:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.583186] mpt3sas_cm0: detecting: handle(0x0102), sas_address(0x5000cca2525eb5f5), phy(35) [ 16.591629] mpt3sas_cm0: REPORT_LUNS: handle(0x0102), retries(0) [ 16.597771] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0102), lun(0) [ 16.604392] scsi 1:0:98:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.612692] scsi 1:0:98:0: SSP: handle(0x0102), sas_addr(0x5000cca2525eb5f5), phy(35), device_name(0x5000cca2525eb5f7) [ 16.623378] scsi 1:0:98:0: enclosure logical id(0x5000ccab04037180), slot(44) [ 16.630598] scsi 1:0:98:0: enclosure level(0x0000), connector name( C2 ) [ 16.637402] scsi 1:0:98:0: serial_number( 7SHP2R5W) [ 16.642889] scsi 1:0:98:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.666186] mpt3sas_cm0: detecting: handle(0x0103), sas_address(0x5000cca2525ebeb1), phy(36) [ 16.674621] mpt3sas_cm0: REPORT_LUNS: handle(0x0103), retries(0) [ 16.680754] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0103), lun(0) [ 16.689115] scsi 1:0:99:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.697422] scsi 1:0:99:0: SSP: handle(0x0103), sas_addr(0x5000cca2525ebeb1), phy(36), device_name(0x5000cca2525ebeb3) [ 16.708106] scsi 1:0:99:0: enclosure logical id(0x5000ccab04037180), slot(45) [ 16.715323] scsi 1:0:99:0: enclosure level(0x0000), connector name( C2 ) [ 16.722128] scsi 1:0:99:0: serial_number( 7SHP396W) [ 16.727617] scsi 1:0:99:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.750191] mpt3sas_cm0: detecting: handle(0x0104), sas_address(0x5000cca2525f2919), phy(37) [ 16.758628] mpt3sas_cm0: REPORT_LUNS: handle(0x0104), retries(0) [ 16.764805] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0104), lun(0) [ 16.783723] scsi 1:0:100:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.792107] scsi 1:0:100:0: SSP: handle(0x0104), sas_addr(0x5000cca2525f2919), phy(37), device_name(0x5000cca2525f291b) [ 16.802879] scsi 1:0:100:0: enclosure logical id(0x5000ccab04037180), slot(46) [ 16.810183] scsi 1:0:100:0: enclosure level(0x0000), connector name( C2 ) [ 16.817060] scsi 1:0:100:0: serial_number( 7SHPABWW) [ 16.822631] scsi 1:0:100:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.845209] mpt3sas_cm0: detecting: handle(0x0105), sas_address(0x5000cca252602c0d), phy(38) [ 16.853644] mpt3sas_cm0: REPORT_LUNS: handle(0x0105), retries(0) [ 16.859778] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0105), lun(0) [ 16.879826] scsi 1:0:101:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.888194] scsi 1:0:101:0: SSP: handle(0x0105), sas_addr(0x5000cca252602c0d), phy(38), device_name(0x5000cca252602c0f) [ 16.898968] scsi 1:0:101:0: enclosure logical id(0x5000ccab04037180), slot(47) [ 16.906272] scsi 1:0:101:0: enclosure level(0x0000), connector name( C2 ) [ 16.913151] scsi 1:0:101:0: serial_number( 7SHPWMHW) [ 16.918721] scsi 1:0:101:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 16.939195] mpt3sas_cm0: detecting: handle(0x0106), sas_address(0x5000cca2525e7cfd), phy(39) [ 16.947635] mpt3sas_cm0: REPORT_LUNS: handle(0x0106), retries(0) [ 16.953767] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0106), lun(0) [ 16.960380] scsi 1:0:102:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 16.968765] scsi 1:0:102:0: SSP: handle(0x0106), sas_addr(0x5000cca2525e7cfd), phy(39), device_name(0x5000cca2525e7cff) [ 16.979535] scsi 1:0:102:0: enclosure logical id(0x5000ccab04037180), slot(48) [ 16.986839] scsi 1:0:102:0: enclosure level(0x0000), connector name( C2 ) [ 16.993733] scsi 1:0:102:0: serial_number( 7SHNYXKW) [ 16.999308] scsi 1:0:102:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.023198] mpt3sas_cm0: detecting: handle(0x0107), sas_address(0x5000cca2525f6a31), phy(40) [ 17.031635] mpt3sas_cm0: REPORT_LUNS: handle(0x0107), retries(0) [ 17.037794] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0107), lun(0) [ 17.045237] scsi 1:0:103:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.053623] scsi 1:0:103:0: SSP: handle(0x0107), sas_addr(0x5000cca2525f6a31), phy(40), device_name(0x5000cca2525f6a33) [ 17.064393] scsi 1:0:103:0: enclosure logical id(0x5000ccab04037180), slot(49) [ 17.071697] scsi 1:0:103:0: enclosure level(0x0000), connector name( C2 ) [ 17.078594] scsi 1:0:103:0: serial_number( 7SHPGR8W) [ 17.084173] scsi 1:0:103:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.104197] mpt3sas_cm0: detecting: handle(0x0108), sas_address(0x5000cca2525f7f25), phy(41) [ 17.112636] mpt3sas_cm0: REPORT_LUNS: handle(0x0108), retries(0) [ 17.118778] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0108), lun(0) [ 17.125418] scsi 1:0:104:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.133823] scsi 1:0:104:0: SSP: handle(0x0108), sas_addr(0x5000cca2525f7f25), phy(41), device_name(0x5000cca2525f7f27) [ 17.144595] scsi 1:0:104:0: enclosure logical id(0x5000ccab04037180), slot(50) [ 17.151902] scsi 1:0:104:0: enclosure level(0x0000), connector name( C2 ) [ 17.158793] scsi 1:0:104:0: serial_number( 7SHPJ3JW) [ 17.164367] scsi 1:0:104:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.191206] mpt3sas_cm0: detecting: handle(0x0109), sas_address(0x5000cca2525eb4b1), phy(42) [ 17.199658] mpt3sas_cm0: REPORT_LUNS: handle(0x0109), retries(0) [ 17.205822] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0109), lun(0) [ 17.212648] scsi 1:0:105:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.221083] scsi 1:0:105:0: SSP: handle(0x0109), sas_addr(0x5000cca2525eb4b1), phy(42), device_name(0x5000cca2525eb4b3) [ 17.231852] scsi 1:0:105:0: enclosure logical id(0x5000ccab04037180), slot(51) [ 17.239160] scsi 1:0:105:0: enclosure level(0x0000), connector name( C2 ) [ 17.246053] scsi 1:0:105:0: serial_number( 7SHP2MKW) [ 17.251624] scsi 1:0:105:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.274209] mpt3sas_cm0: detecting: handle(0x010a), sas_address(0x5000cca2525e1f9d), phy(43) [ 17.282646] mpt3sas_cm0: REPORT_LUNS: handle(0x010a), retries(0) [ 17.288782] mpt3sas_cm0: TEST_UNIT_READY: handle(0x010a), lun(0) [ 17.295399] scsi 1:0:106:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.303786] scsi 1:0:106:0: SSP: handle(0x010a), sas_addr(0x5000cca2525e1f9d), phy(43), device_name(0x5000cca2525e1f9f) [ 17.314563] scsi 1:0:106:0: enclosure logical id(0x5000ccab04037180), slot(52) [ 17.321867] scsi 1:0:106:0: enclosure level(0x0000), connector name( C2 ) [ 17.328758] scsi 1:0:106:0: serial_number( 7SHNSPTW) [ 17.334334] scsi 1:0:106:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.354793] mpt3sas_cm0: detecting: handle(0x010b), sas_address(0x5000cca2525e52fd), phy(44) [ 17.363231] mpt3sas_cm0: REPORT_LUNS: handle(0x010b), retries(0) [ 17.369394] mpt3sas_cm0: TEST_UNIT_READY: handle(0x010b), lun(0) [ 17.376080] scsi 1:0:107:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.384470] scsi 1:0:107:0: SSP: handle(0x010b), sas_addr(0x5000cca2525e52fd), phy(44), device_name(0x5000cca2525e52ff) [ 17.395242] scsi 1:0:107:0: enclosure logical id(0x5000ccab04037180), slot(53) [ 17.402549] scsi 1:0:107:0: enclosure level(0x0000), connector name( C2 ) [ 17.409437] scsi 1:0:107:0: serial_number( 7SHNW3VW) [ 17.415014] scsi 1:0:107:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.435209] mpt3sas_cm0: detecting: handle(0x010c), sas_address(0x5000cca2525f4e71), phy(45) [ 17.443651] mpt3sas_cm0: REPORT_LUNS: handle(0x010c), retries(0) [ 17.449804] mpt3sas_cm0: TEST_UNIT_READY: handle(0x010c), lun(0) [ 17.456405] scsi 1:0:108:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.464795] scsi 1:0:108:0: SSP: handle(0x010c), sas_addr(0x5000cca2525f4e71), phy(45), device_name(0x5000cca2525f4e73) [ 17.475567] scsi 1:0:108:0: enclosure logical id(0x5000ccab04037180), slot(54) [ 17.482871] scsi 1:0:108:0: enclosure level(0x0000), connector name( C2 ) [ 17.489763] scsi 1:0:108:0: serial_number( 7SHPDVZW) [ 17.495339] scsi 1:0:108:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.515209] mpt3sas_cm0: detecting: handle(0x010d), sas_address(0x5000cca2525fd499), phy(46) [ 17.523648] mpt3sas_cm0: REPORT_LUNS: handle(0x010d), retries(0) [ 17.529788] mpt3sas_cm0: TEST_UNIT_READY: handle(0x010d), lun(0) [ 17.536397] scsi 1:0:109:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.544789] scsi 1:0:109:0: SSP: handle(0x010d), sas_addr(0x5000cca2525fd499), phy(46), device_name(0x5000cca2525fd49b) [ 17.555562] scsi 1:0:109:0: enclosure logical id(0x5000ccab04037180), slot(55) [ 17.562866] scsi 1:0:109:0: enclosure level(0x0000), connector name( C2 ) [ 17.569763] scsi 1:0:109:0: serial_number( 7SHPPTYW) [ 17.575335] scsi 1:0:109:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.598208] mpt3sas_cm0: detecting: handle(0x010e), sas_address(0x5000cca2525e7879), phy(47) [ 17.606649] mpt3sas_cm0: REPORT_LUNS: handle(0x010e), retries(0) [ 17.612788] mpt3sas_cm0: TEST_UNIT_READY: handle(0x010e), lun(0) [ 17.619434] scsi 1:0:110:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.627828] scsi 1:0:110:0: SSP: handle(0x010e), sas_addr(0x5000cca2525e7879), phy(47), device_name(0x5000cca2525e787b) [ 17.638599] scsi 1:0:110:0: enclosure logical id(0x5000ccab04037180), slot(56) [ 17.645906] scsi 1:0:110:0: enclosure level(0x0000), connector name( C2 ) [ 17.652798] scsi 1:0:110:0: serial_number( 7SHNYM7W) [ 17.658370] scsi 1:0:110:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.678216] mpt3sas_cm0: detecting: handle(0x010f), sas_address(0x5000cca2525ca199), phy(48) [ 17.686653] mpt3sas_cm0: REPORT_LUNS: handle(0x010f), retries(0) [ 17.692792] mpt3sas_cm0: TEST_UNIT_READY: handle(0x010f), lun(0) [ 17.699410] scsi 1:0:111:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.707796] scsi 1:0:111:0: SSP: handle(0x010f), sas_addr(0x5000cca2525ca199), phy(48), device_name(0x5000cca2525ca19b) [ 17.718568] scsi 1:0:111:0: enclosure logical id(0x5000ccab04037180), slot(57) [ 17.725874] scsi 1:0:111:0: enclosure level(0x0000), connector name( C2 ) [ 17.732765] scsi 1:0:111:0: serial_number( 7SHMY83W) [ 17.738341] scsi 1:0:111:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.758213] mpt3sas_cm0: detecting: handle(0x0110), sas_address(0x5000cca2525ffb89), phy(49) [ 17.766648] mpt3sas_cm0: REPORT_LUNS: handle(0x0110), retries(0) [ 17.772782] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0110), lun(0) [ 17.779564] scsi 1:0:112:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.787965] scsi 1:0:112:0: SSP: handle(0x0110), sas_addr(0x5000cca2525ffb89), phy(49), device_name(0x5000cca2525ffb8b) [ 17.798736] scsi 1:0:112:0: enclosure logical id(0x5000ccab04037180), slot(58) [ 17.806043] scsi 1:0:112:0: enclosure level(0x0000), connector name( C2 ) [ 17.812936] scsi 1:0:112:0: serial_number( 7SHPTDAW) [ 17.818510] scsi 1:0:112:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.842215] mpt3sas_cm0: detecting: handle(0x0111), sas_address(0x5000cca2525f2669), phy(50) [ 17.850647] mpt3sas_cm0: REPORT_LUNS: handle(0x0111), retries(0) [ 17.856779] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0111), lun(0) [ 17.863397] scsi 1:0:113:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.871788] scsi 1:0:113:0: SSP: handle(0x0111), sas_addr(0x5000cca2525f2669), phy(50), device_name(0x5000cca2525f266b) [ 17.882562] scsi 1:0:113:0: enclosure logical id(0x5000ccab04037180), slot(59) [ 17.889869] scsi 1:0:113:0: enclosure level(0x0000), connector name( C2 ) [ 17.896761] scsi 1:0:113:0: serial_number( 7SHPA6AW) [ 17.902335] scsi 1:0:113:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 17.925711] mpt3sas_cm0: expander_add: handle(0x00dd), parent(0x00da), sas_addr(0x5000ccab040371ff), phys(68) [ 17.946147] mpt3sas_cm0: detecting: handle(0x0112), sas_address(0x5000cca2525eacc1), phy(42) [ 17.954580] mpt3sas_cm0: REPORT_LUNS: handle(0x0112), retries(0) [ 17.960701] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0112), lun(0) [ 17.967324] scsi 1:0:114:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 17.975719] scsi 1:0:114:0: SSP: handle(0x0112), sas_addr(0x5000cca2525eacc1), phy(42), device_name(0x5000cca2525eacc3) [ 17.986488] scsi 1:0:114:0: enclosure logical id(0x5000ccab04037180), slot(1) [ 17.993708] scsi 1:0:114:0: enclosure level(0x0000), connector name( C2 ) [ 18.000597] scsi 1:0:114:0: serial_number( 7SHP235W) [ 18.006172] scsi 1:0:114:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.026222] mpt3sas_cm0: detecting: handle(0x0113), sas_address(0x5000cca2525f8151), phy(43) [ 18.034662] mpt3sas_cm0: REPORT_LUNS: handle(0x0113), retries(0) [ 18.040793] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0113), lun(0) [ 18.047431] scsi 1:0:115:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.055824] scsi 1:0:115:0: SSP: handle(0x0113), sas_addr(0x5000cca2525f8151), phy(43), device_name(0x5000cca2525f8153) [ 18.066596] scsi 1:0:115:0: enclosure logical id(0x5000ccab04037180), slot(3) [ 18.073815] scsi 1:0:115:0: enclosure level(0x0000), connector name( C2 ) [ 18.080711] scsi 1:0:115:0: serial_number( 7SHPJ80W) [ 18.086282] scsi 1:0:115:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.106812] mpt3sas_cm0: detecting: handle(0x0114), sas_address(0x5000cca2525ef839), phy(44) [ 18.115250] mpt3sas_cm0: REPORT_LUNS: handle(0x0114), retries(0) [ 18.121390] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0114), lun(0) [ 18.128000] scsi 1:0:116:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.136391] scsi 1:0:116:0: SSP: handle(0x0114), sas_addr(0x5000cca2525ef839), phy(44), device_name(0x5000cca2525ef83b) [ 18.147163] scsi 1:0:116:0: enclosure logical id(0x5000ccab04037180), slot(4) [ 18.154383] scsi 1:0:116:0: enclosure level(0x0000), connector name( C2 ) [ 18.161277] scsi 1:0:116:0: serial_number( 7SHP73ZW) [ 18.166848] scsi 1:0:116:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.189227] mpt3sas_cm0: detecting: handle(0x0115), sas_address(0x5000cca2525e72a9), phy(45) [ 18.197662] mpt3sas_cm0: REPORT_LUNS: handle(0x0115), retries(0) [ 18.203835] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0115), lun(0) [ 18.210577] scsi 1:0:117:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.225366] scsi 1:0:117:0: SSP: handle(0x0115), sas_addr(0x5000cca2525e72a9), phy(45), device_name(0x5000cca2525e72ab) [ 18.236138] scsi 1:0:117:0: enclosure logical id(0x5000ccab04037180), slot(5) [ 18.243356] scsi 1:0:117:0: enclosure level(0x0000), connector name( C2 ) [ 18.250250] scsi 1:0:117:0: serial_number( 7SHNY77W) [ 18.255824] scsi 1:0:117:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.278226] mpt3sas_cm0: detecting: handle(0x0116), sas_address(0x5000cca2525d3c89), phy(46) [ 18.286663] mpt3sas_cm0: REPORT_LUNS: handle(0x0116), retries(0) [ 18.292829] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0116), lun(0) [ 18.299537] scsi 1:0:118:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.319935] scsi 1:0:118:0: SSP: handle(0x0116), sas_addr(0x5000cca2525d3c89), phy(46), device_name(0x5000cca2525d3c8b) [ 18.330710] scsi 1:0:118:0: enclosure logical id(0x5000ccab04037180), slot(6) [ 18.337930] scsi 1:0:118:0: enclosure level(0x0000), connector name( C2 ) [ 18.344822] scsi 1:0:118:0: serial_number( 7SHN8KZW) [ 18.350395] scsi 1:0:118:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.370231] mpt3sas_cm0: detecting: handle(0x0117), sas_address(0x5000cca2525fae0d), phy(47) [ 18.378669] mpt3sas_cm0: REPORT_LUNS: handle(0x0117), retries(0) [ 18.384834] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0117), lun(0) [ 18.411287] scsi 1:0:119:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.419660] scsi 1:0:119:0: SSP: handle(0x0117), sas_addr(0x5000cca2525fae0d), phy(47), device_name(0x5000cca2525fae0f) [ 18.430432] scsi 1:0:119:0: enclosure logical id(0x5000ccab04037180), slot(7) [ 18.437652] scsi 1:0:119:0: enclosure level(0x0000), connector name( C2 ) [ 18.444526] scsi 1:0:119:0: serial_number( 7SHPM7BW) [ 18.450099] scsi 1:0:119:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.470263] mpt3sas_cm0: detecting: handle(0x0118), sas_address(0x5000cca2525efdad), phy(48) [ 18.478701] mpt3sas_cm0: REPORT_LUNS: handle(0x0118), retries(0) [ 18.484863] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0118), lun(0) [ 18.491674] scsi 1:0:120:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.500073] scsi 1:0:120:0: SSP: handle(0x0118), sas_addr(0x5000cca2525efdad), phy(48), device_name(0x5000cca2525efdaf) [ 18.510845] scsi 1:0:120:0: enclosure logical id(0x5000ccab04037180), slot(8) [ 18.518062] scsi 1:0:120:0: enclosure level(0x0000), connector name( C2 ) [ 18.524957] scsi 1:0:120:0: serial_number( 7SHP7H7W) [ 18.530539] scsi 1:0:120:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.554234] mpt3sas_cm0: detecting: handle(0x0119), sas_address(0x5000cca2525fa301), phy(49) [ 18.562674] mpt3sas_cm0: REPORT_LUNS: handle(0x0119), retries(0) [ 18.568840] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0119), lun(0) [ 18.575693] scsi 1:0:121:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.584095] scsi 1:0:121:0: SSP: handle(0x0119), sas_addr(0x5000cca2525fa301), phy(49), device_name(0x5000cca2525fa303) [ 18.594869] scsi 1:0:121:0: enclosure logical id(0x5000ccab04037180), slot(9) [ 18.602089] scsi 1:0:121:0: enclosure level(0x0000), connector name( C2 ) [ 18.608978] scsi 1:0:121:0: serial_number( 7SHPLHKW) [ 18.614554] scsi 1:0:121:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.651238] mpt3sas_cm0: detecting: handle(0x011a), sas_address(0x5000cca2525fb4bd), phy(50) [ 18.659676] mpt3sas_cm0: REPORT_LUNS: handle(0x011a), retries(0) [ 18.665820] mpt3sas_cm0: TEST_UNIT_READY: handle(0x011a), lun(0) [ 18.750803] scsi 1:0:122:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.759178] scsi 1:0:122:0: SSP: handle(0x011a), sas_addr(0x5000cca2525fb4bd), phy(50), device_name(0x5000cca2525fb4bf) [ 18.769948] scsi 1:0:122:0: enclosure logical id(0x5000ccab04037180), slot(10) [ 18.777255] scsi 1:0:122:0: enclosure level(0x0000), connector name( C2 ) [ 18.784131] scsi 1:0:122:0: serial_number( 7SHPMP5W) [ 18.789703] scsi 1:0:122:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.815750] mpt3sas_cm0: expander_add: handle(0x0017), parent(0x0009), sas_addr(0x5000ccab0405db7d), phys(49) [ 18.836307] mpt3sas_cm0: detecting: handle(0x001b), sas_address(0x5000ccab0405db7c), phy(48) [ 18.844749] mpt3sas_cm0: REPORT_LUNS: handle(0x001b), retries(0) [ 18.851187] mpt3sas_cm0: TEST_UNIT_READY: handle(0x001b), lun(0) [ 18.858040] scsi 1:0:123:0: Enclosure HGST H4060-J 2033 PQ: 0 ANSI: 6 [ 18.866618] scsi 1:0:123:0: set ignore_delay_remove for handle(0x001b) [ 18.873146] scsi 1:0:123:0: SES: handle(0x001b), sas_addr(0x5000ccab0405db7c), phy(48), device_name(0x0000000000000000) [ 18.883917] scsi 1:0:123:0: enclosure logical id(0x5000ccab0405db00), slot(60) [ 18.891224] scsi 1:0:123:0: enclosure level(0x0000), connector name( C1 ) [ 18.898118] scsi 1:0:123:0: serial_number(USWSJ03918EZ0069 ) [ 18.904038] scsi 1:0:123:0: qdepth(1), tagged(0), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 18.928605] mpt3sas_cm0: expander_add: handle(0x0019), parent(0x0017), sas_addr(0x5000ccab0405db79), phys(68) [ 18.949726] mpt3sas_cm0: detecting: handle(0x001c), sas_address(0x5000cca252550a76), phy(0) [ 18.958079] mpt3sas_cm0: REPORT_LUNS: handle(0x001c), retries(0) [ 18.964939] mpt3sas_cm0: TEST_UNIT_READY: handle(0x001c), lun(0) [ 18.972854] scsi 1:0:124:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 18.981251] scsi 1:0:124:0: SSP: handle(0x001c), sas_addr(0x5000cca252550a76), phy(0), device_name(0x5000cca252550a77) [ 18.991933] scsi 1:0:124:0: enclosure logical id(0x5000ccab0405db00), slot(0) [ 18.999152] scsi 1:0:124:0: enclosure level(0x0000), connector name( C1 ) [ 19.006043] scsi 1:0:124:0: serial_number( 7SHHSVGG) [ 19.011620] scsi 1:0:124:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.034230] mpt3sas_cm0: detecting: handle(0x001d), sas_address(0x5000cca25253eb32), phy(1) [ 19.042578] mpt3sas_cm0: REPORT_LUNS: handle(0x001d), retries(0) [ 19.048713] mpt3sas_cm0: TEST_UNIT_READY: handle(0x001d), lun(0) [ 19.055363] scsi 1:0:125:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.063753] scsi 1:0:125:0: SSP: handle(0x001d), sas_addr(0x5000cca25253eb32), phy(1), device_name(0x5000cca25253eb33) [ 19.074441] scsi 1:0:125:0: enclosure logical id(0x5000ccab0405db00), slot(2) [ 19.081661] scsi 1:0:125:0: enclosure level(0x0000), connector name( C1 ) [ 19.088554] scsi 1:0:125:0: serial_number( 7SHH4RDG) [ 19.094127] scsi 1:0:125:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.114232] mpt3sas_cm0: detecting: handle(0x001e), sas_address(0x5000cca26b950bb6), phy(2) [ 19.122582] mpt3sas_cm0: REPORT_LUNS: handle(0x001e), retries(0) [ 19.128741] mpt3sas_cm0: TEST_UNIT_READY: handle(0x001e), lun(0) [ 19.135446] scsi 1:0:126:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.146152] scsi 1:0:126:0: SSP: handle(0x001e), sas_addr(0x5000cca26b950bb6), phy(2), device_name(0x5000cca26b950bb7) [ 19.156839] scsi 1:0:126:0: enclosure logical id(0x5000ccab0405db00), slot(11) [ 19.164144] scsi 1:0:126:0: enclosure level(0x0000), connector name( C1 ) [ 19.171038] scsi 1:0:126:0: serial_number( 1SJMZ22Z) [ 19.176611] scsi 1:0:126:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.199239] mpt3sas_cm0: detecting: handle(0x001f), sas_address(0x5000cca25253f3be), phy(3) [ 19.207591] mpt3sas_cm0: REPORT_LUNS: handle(0x001f), retries(0) [ 19.213727] mpt3sas_cm0: TEST_UNIT_READY: handle(0x001f), lun(0) [ 19.234740] scsi 1:0:127:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.243128] scsi 1:0:127:0: SSP: handle(0x001f), sas_addr(0x5000cca25253f3be), phy(3), device_name(0x5000cca25253f3bf) [ 19.253813] scsi 1:0:127:0: enclosure logical id(0x5000ccab0405db00), slot(12) [ 19.261117] scsi 1:0:127:0: enclosure level(0x0000), connector name( C1 ) [ 19.267995] scsi 1:0:127:0: serial_number( 7SHH591G) [ 19.273567] scsi 1:0:127:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.312240] mpt3sas_cm0: detecting: handle(0x0020), sas_address(0x5000cca26a2ac3da), phy(4) [ 19.320593] mpt3sas_cm0: REPORT_LUNS: handle(0x0020), retries(0) [ 19.326724] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0020), lun(0) [ 19.333344] scsi 1:0:128:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.341734] scsi 1:0:128:0: SSP: handle(0x0020), sas_addr(0x5000cca26a2ac3da), phy(4), device_name(0x5000cca26a2ac3db) [ 19.352423] scsi 1:0:128:0: enclosure logical id(0x5000ccab0405db00), slot(13) [ 19.359730] scsi 1:0:128:0: enclosure level(0x0000), connector name( C1 ) [ 19.366621] scsi 1:0:128:0: serial_number( 2TGSJ30D) [ 19.372194] scsi 1:0:128:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.395241] mpt3sas_cm0: detecting: handle(0x0021), sas_address(0x5000cca25254102a), phy(5) [ 19.403588] mpt3sas_cm0: REPORT_LUNS: handle(0x0021), retries(0) [ 19.409719] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0021), lun(0) [ 19.416340] scsi 1:0:129:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.424743] scsi 1:0:129:0: SSP: handle(0x0021), sas_addr(0x5000cca25254102a), phy(5), device_name(0x5000cca25254102b) [ 19.435425] scsi 1:0:129:0: enclosure logical id(0x5000ccab0405db00), slot(14) [ 19.442731] scsi 1:0:129:0: enclosure level(0x0000), connector name( C1 ) [ 19.449623] scsi 1:0:129:0: serial_number( 7SHH75RG) [ 19.455197] scsi 1:0:129:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.475280] mpt3sas_cm0: detecting: handle(0x0022), sas_address(0x5000cca25254534a), phy(6) [ 19.483627] mpt3sas_cm0: REPORT_LUNS: handle(0x0022), retries(0) [ 19.489787] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0022), lun(0) [ 19.496536] scsi 1:0:130:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.504934] scsi 1:0:130:0: SSP: handle(0x0022), sas_addr(0x5000cca25254534a), phy(6), device_name(0x5000cca25254534b) [ 19.515621] scsi 1:0:130:0: enclosure logical id(0x5000ccab0405db00), slot(15) [ 19.522928] scsi 1:0:130:0: enclosure level(0x0000), connector name( C1 ) [ 19.529817] scsi 1:0:130:0: serial_number( 7SHHBN9G) [ 19.535394] scsi 1:0:130:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.555247] mpt3sas_cm0: detecting: handle(0x0023), sas_address(0x5000cca2525430c6), phy(7) [ 19.563598] mpt3sas_cm0: REPORT_LUNS: handle(0x0023), retries(0) [ 19.569746] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0023), lun(0) [ 19.595611] scsi 1:0:131:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.605795] scsi 1:0:131:0: SSP: handle(0x0023), sas_addr(0x5000cca2525430c6), phy(7), device_name(0x5000cca2525430c7) [ 19.616477] scsi 1:0:131:0: enclosure logical id(0x5000ccab0405db00), slot(16) [ 19.623784] scsi 1:0:131:0: enclosure level(0x0000), connector name( C1 ) [ 19.630661] scsi 1:0:131:0: serial_number( 7SHH9B1G) [ 19.636232] scsi 1:0:131:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.683250] mpt3sas_cm0: detecting: handle(0x0024), sas_address(0x5000cca25254385e), phy(8) [ 19.691599] mpt3sas_cm0: REPORT_LUNS: handle(0x0024), retries(0) [ 19.697761] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0024), lun(0) [ 19.705623] scsi 1:0:132:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.716476] scsi 1:0:132:0: SSP: handle(0x0024), sas_addr(0x5000cca25254385e), phy(8), device_name(0x5000cca25254385f) [ 19.727164] scsi 1:0:132:0: enclosure logical id(0x5000ccab0405db00), slot(17) [ 19.734468] scsi 1:0:132:0: enclosure level(0x0000), connector name( C1 ) [ 19.741362] scsi 1:0:132:0: serial_number( 7SHH9VRG) [ 19.746935] scsi 1:0:132:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.794295] mpt3sas_cm0: detecting: handle(0x0025), sas_address(0x5000cca25253f30e), phy(9) [ 19.802648] mpt3sas_cm0: REPORT_LUNS: handle(0x0025), retries(0) [ 19.808792] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0025), lun(0) [ 19.815553] scsi 1:0:133:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.823953] scsi 1:0:133:0: SSP: handle(0x0025), sas_addr(0x5000cca25253f30e), phy(9), device_name(0x5000cca25253f30f) [ 19.834640] scsi 1:0:133:0: enclosure logical id(0x5000ccab0405db00), slot(18) [ 19.841948] scsi 1:0:133:0: enclosure level(0x0000), connector name( C1 ) [ 19.848839] scsi 1:0:133:0: serial_number( 7SHH57MG) [ 19.854412] scsi 1:0:133:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.874253] mpt3sas_cm0: detecting: handle(0x0026), sas_address(0x5000cca252545f66), phy(10) [ 19.882694] mpt3sas_cm0: REPORT_LUNS: handle(0x0026), retries(0) [ 19.888831] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0026), lun(0) [ 19.895602] scsi 1:0:134:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.904210] scsi 1:0:134:0: SSP: handle(0x0026), sas_addr(0x5000cca252545f66), phy(10), device_name(0x5000cca252545f67) [ 19.914981] scsi 1:0:134:0: enclosure logical id(0x5000ccab0405db00), slot(19) [ 19.922289] scsi 1:0:134:0: enclosure level(0x0000), connector name( C1 ) [ 19.929183] scsi 1:0:134:0: serial_number( 7SHHDG9G) [ 19.934755] scsi 1:0:134:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 19.963256] mpt3sas_cm0: detecting: handle(0x0027), sas_address(0x5000cca266daa4e6), phy(11) [ 19.971695] mpt3sas_cm0: REPORT_LUNS: handle(0x0027), retries(0) [ 19.977839] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0027), lun(0) [ 19.984445] scsi 1:0:135:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 19.992836] scsi 1:0:135:0: SSP: handle(0x0027), sas_addr(0x5000cca266daa4e6), phy(11), device_name(0x5000cca266daa4e7) [ 20.003610] scsi 1:0:135:0: enclosure logical id(0x5000ccab0405db00), slot(20) [ 20.010917] scsi 1:0:135:0: enclosure level(0x0000), connector name( C1 ) [ 20.017807] scsi 1:0:135:0: serial_number( 7JKW7MYK) [ 20.023381] scsi 1:0:135:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.043254] mpt3sas_cm0: detecting: handle(0x0028), sas_address(0x5000cca26a25167e), phy(12) [ 20.051689] mpt3sas_cm0: REPORT_LUNS: handle(0x0028), retries(0) [ 20.057850] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0028), lun(0) [ 20.064457] scsi 1:0:136:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.072850] scsi 1:0:136:0: SSP: handle(0x0028), sas_addr(0x5000cca26a25167e), phy(12), device_name(0x5000cca26a25167f) [ 20.083621] scsi 1:0:136:0: enclosure logical id(0x5000ccab0405db00), slot(21) [ 20.090928] scsi 1:0:136:0: enclosure level(0x0000), connector name( C1 ) [ 20.097820] scsi 1:0:136:0: serial_number( 2TGND9JD) [ 20.103394] scsi 1:0:136:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.123257] mpt3sas_cm0: detecting: handle(0x0029), sas_address(0x5000cca25253edaa), phy(13) [ 20.131694] mpt3sas_cm0: REPORT_LUNS: handle(0x0029), retries(0) [ 20.137823] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0029), lun(0) [ 20.144426] scsi 1:0:137:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.152818] scsi 1:0:137:0: SSP: handle(0x0029), sas_addr(0x5000cca25253edaa), phy(13), device_name(0x5000cca25253edab) [ 20.163593] scsi 1:0:137:0: enclosure logical id(0x5000ccab0405db00), slot(22) [ 20.170899] scsi 1:0:137:0: enclosure level(0x0000), connector name( C1 ) [ 20.177788] scsi 1:0:137:0: serial_number( 7SHH4WHG) [ 20.183365] scsi 1:0:137:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.204257] mpt3sas_cm0: detecting: handle(0x002a), sas_address(0x5000cca266d491a2), phy(14) [ 20.212694] mpt3sas_cm0: REPORT_LUNS: handle(0x002a), retries(0) [ 20.218822] mpt3sas_cm0: TEST_UNIT_READY: handle(0x002a), lun(0) [ 20.225451] scsi 1:0:138:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.233835] scsi 1:0:138:0: SSP: handle(0x002a), sas_addr(0x5000cca266d491a2), phy(14), device_name(0x5000cca266d491a3) [ 20.244609] scsi 1:0:138:0: enclosure logical id(0x5000ccab0405db00), slot(23) [ 20.251916] scsi 1:0:138:0: enclosure level(0x0000), connector name( C1 ) [ 20.258806] scsi 1:0:138:0: serial_number( 7JKSX22K) [ 20.264383] scsi 1:0:138:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.284260] mpt3sas_cm0: detecting: handle(0x002b), sas_address(0x5000cca26b9a709a), phy(15) [ 20.292697] mpt3sas_cm0: REPORT_LUNS: handle(0x002b), retries(0) [ 20.298833] mpt3sas_cm0: TEST_UNIT_READY: handle(0x002b), lun(0) [ 20.468617] scsi 1:0:139:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.477001] scsi 1:0:139:0: SSP: handle(0x002b), sas_addr(0x5000cca26b9a709a), phy(15), device_name(0x5000cca26b9a709b) [ 20.487778] scsi 1:0:139:0: enclosure logical id(0x5000ccab0405db00), slot(24) [ 20.495082] scsi 1:0:139:0: enclosure level(0x0000), connector name( C1 ) [ 20.501974] scsi 1:0:139:0: serial_number( 1SJRY0YZ) [ 20.507547] scsi 1:0:139:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.533272] mpt3sas_cm0: detecting: handle(0x002c), sas_address(0x5000cca25253f832), phy(16) [ 20.541707] mpt3sas_cm0: REPORT_LUNS: handle(0x002c), retries(0) [ 20.548005] mpt3sas_cm0: TEST_UNIT_READY: handle(0x002c), lun(0) [ 20.728121] scsi 1:0:140:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.736500] scsi 1:0:140:0: SSP: handle(0x002c), sas_addr(0x5000cca25253f832), phy(16), device_name(0x5000cca25253f833) [ 20.747272] scsi 1:0:140:0: enclosure logical id(0x5000ccab0405db00), slot(25) [ 20.754578] scsi 1:0:140:0: enclosure level(0x0000), connector name( C1 ) [ 20.761455] scsi 1:0:140:0: serial_number( 7SHH5L7G) [ 20.767025] scsi 1:0:140:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.787273] mpt3sas_cm0: detecting: handle(0x002d), sas_address(0x5000cca26a2ab23e), phy(17) [ 20.795705] mpt3sas_cm0: REPORT_LUNS: handle(0x002d), retries(0) [ 20.801867] mpt3sas_cm0: TEST_UNIT_READY: handle(0x002d), lun(0) [ 20.808471] scsi 1:0:141:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.816856] scsi 1:0:141:0: SSP: handle(0x002d), sas_addr(0x5000cca26a2ab23e), phy(17), device_name(0x5000cca26a2ab23f) [ 20.827632] scsi 1:0:141:0: enclosure logical id(0x5000ccab0405db00), slot(26) [ 20.834937] scsi 1:0:141:0: enclosure level(0x0000), connector name( C1 ) [ 20.841828] scsi 1:0:141:0: serial_number( 2TGSGXND) [ 20.847404] scsi 1:0:141:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.867275] mpt3sas_cm0: detecting: handle(0x002e), sas_address(0x5000cca26b9b9696), phy(18) [ 20.875710] mpt3sas_cm0: REPORT_LUNS: handle(0x002e), retries(0) [ 20.881974] mpt3sas_cm0: TEST_UNIT_READY: handle(0x002e), lun(0) [ 20.889131] scsi 1:0:142:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.897525] scsi 1:0:142:0: SSP: handle(0x002e), sas_addr(0x5000cca26b9b9696), phy(18), device_name(0x5000cca26b9b9697) [ 20.908294] scsi 1:0:142:0: enclosure logical id(0x5000ccab0405db00), slot(27) [ 20.915601] scsi 1:0:142:0: enclosure level(0x0000), connector name( C1 ) [ 20.922492] scsi 1:0:142:0: serial_number( 1SJSKLWZ) [ 20.928065] scsi 1:0:142:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 20.948277] mpt3sas_cm0: detecting: handle(0x002f), sas_address(0x5000cca252559472), phy(19) [ 20.956713] mpt3sas_cm0: REPORT_LUNS: handle(0x002f), retries(0) [ 20.962861] mpt3sas_cm0: TEST_UNIT_READY: handle(0x002f), lun(0) [ 20.975815] scsi 1:0:143:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 20.984197] scsi 1:0:143:0: SSP: handle(0x002f), sas_addr(0x5000cca252559472), phy(19), device_name(0x5000cca252559473) [ 20.994972] scsi 1:0:143:0: enclosure logical id(0x5000ccab0405db00), slot(28) [ 21.002277] scsi 1:0:143:0: enclosure level(0x0000), connector name( C1 ) [ 21.009156] scsi 1:0:143:0: serial_number( 7SHJ21AG) [ 21.014735] scsi 1:0:143:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.046290] mpt3sas_cm0: detecting: handle(0x0030), sas_address(0x5000cca25253f94e), phy(20) [ 21.054727] mpt3sas_cm0: REPORT_LUNS: handle(0x0030), retries(0) [ 21.060874] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0030), lun(0) [ 21.067593] scsi 1:0:144:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.077404] scsi 1:0:144:0: SSP: handle(0x0030), sas_addr(0x5000cca25253f94e), phy(20), device_name(0x5000cca25253f94f) [ 21.088177] scsi 1:0:144:0: enclosure logical id(0x5000ccab0405db00), slot(29) [ 21.095481] scsi 1:0:144:0: enclosure level(0x0000), connector name( C1 ) [ 21.102376] scsi 1:0:144:0: serial_number( 7SHH5NJG) [ 21.107947] scsi 1:0:144:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.138284] mpt3sas_cm0: detecting: handle(0x0031), sas_address(0x5000cca25253e69a), phy(21) [ 21.146726] mpt3sas_cm0: REPORT_LUNS: handle(0x0031), retries(0) [ 21.152865] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0031), lun(0) [ 21.162001] scsi 1:0:145:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.170392] scsi 1:0:145:0: SSP: handle(0x0031), sas_addr(0x5000cca25253e69a), phy(21), device_name(0x5000cca25253e69b) [ 21.181164] scsi 1:0:145:0: enclosure logical id(0x5000ccab0405db00), slot(30) [ 21.188468] scsi 1:0:145:0: enclosure level(0x0000), connector name( C1 ) [ 21.195359] scsi 1:0:145:0: serial_number( 7SHH4DXG) [ 21.200935] scsi 1:0:145:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.231883] mpt3sas_cm0: detecting: handle(0x0032), sas_address(0x5000cca252543cc2), phy(22) [ 21.240318] mpt3sas_cm0: REPORT_LUNS: handle(0x0032), retries(0) [ 21.246449] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0032), lun(0) [ 21.253048] scsi 1:0:146:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.261436] scsi 1:0:146:0: SSP: handle(0x0032), sas_addr(0x5000cca252543cc2), phy(22), device_name(0x5000cca252543cc3) [ 21.272208] scsi 1:0:146:0: enclosure logical id(0x5000ccab0405db00), slot(31) [ 21.279514] scsi 1:0:146:0: enclosure level(0x0000), connector name( C1 ) [ 21.286404] scsi 1:0:146:0: serial_number( 7SHHA4TG) [ 21.291980] scsi 1:0:146:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.322287] mpt3sas_cm0: detecting: handle(0x0033), sas_address(0x5000cca26a24fcde), phy(23) [ 21.330737] mpt3sas_cm0: REPORT_LUNS: handle(0x0033), retries(0) [ 21.336859] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0033), lun(0) [ 21.343466] scsi 1:0:147:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.351852] scsi 1:0:147:0: SSP: handle(0x0033), sas_addr(0x5000cca26a24fcde), phy(23), device_name(0x5000cca26a24fcdf) [ 21.362622] scsi 1:0:147:0: enclosure logical id(0x5000ccab0405db00), slot(32) [ 21.369927] scsi 1:0:147:0: enclosure level(0x0000), connector name( C1 ) [ 21.376819] scsi 1:0:147:0: serial_number( 2TGNALMD) [ 21.382393] scsi 1:0:147:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.402285] mpt3sas_cm0: detecting: handle(0x0034), sas_address(0x5000cca252543bce), phy(24) [ 21.410725] mpt3sas_cm0: REPORT_LUNS: handle(0x0034), retries(0) [ 21.416860] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0034), lun(0) [ 21.423461] scsi 1:0:148:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.431850] scsi 1:0:148:0: SSP: handle(0x0034), sas_addr(0x5000cca252543bce), phy(24), device_name(0x5000cca252543bcf) [ 21.442625] scsi 1:0:148:0: enclosure logical id(0x5000ccab0405db00), slot(33) [ 21.449932] scsi 1:0:148:0: enclosure level(0x0000), connector name( C1 ) [ 21.456822] scsi 1:0:148:0: serial_number( 7SHHA2UG) [ 21.462396] scsi 1:0:148:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.482289] mpt3sas_cm0: detecting: handle(0x0035), sas_address(0x5000cca252551266), phy(25) [ 21.490723] mpt3sas_cm0: REPORT_LUNS: handle(0x0035), retries(0) [ 21.496861] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0035), lun(0) [ 21.505027] scsi 1:0:149:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.513524] scsi 1:0:149:0: SSP: handle(0x0035), sas_addr(0x5000cca252551266), phy(25), device_name(0x5000cca252551267) [ 21.524294] scsi 1:0:149:0: enclosure logical id(0x5000ccab0405db00), slot(34) [ 21.531599] scsi 1:0:149:0: enclosure level(0x0000), connector name( C1 ) [ 21.538491] scsi 1:0:149:0: serial_number( 7SHHTBVG) [ 21.544064] scsi 1:0:149:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.572298] mpt3sas_cm0: detecting: handle(0x0036), sas_address(0x5000cca252555fca), phy(26) [ 21.580737] mpt3sas_cm0: REPORT_LUNS: handle(0x0036), retries(0) [ 21.586878] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0036), lun(0) [ 21.620949] scsi 1:0:150:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.629344] scsi 1:0:150:0: SSP: handle(0x0036), sas_addr(0x5000cca252555fca), phy(26), device_name(0x5000cca252555fcb) [ 21.640116] scsi 1:0:150:0: enclosure logical id(0x5000ccab0405db00), slot(35) [ 21.647423] scsi 1:0:150:0: enclosure level(0x0000), connector name( C1 ) [ 21.654315] scsi 1:0:150:0: serial_number( 7SHHYJMG) [ 21.659889] scsi 1:0:150:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.698909] mpt3sas_cm0: detecting: handle(0x0037), sas_address(0x5000cca252559f7e), phy(27) [ 21.707342] mpt3sas_cm0: REPORT_LUNS: handle(0x0037), retries(0) [ 21.713482] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0037), lun(0) [ 21.720293] scsi 1:0:151:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.733357] scsi 1:0:151:0: SSP: handle(0x0037), sas_addr(0x5000cca252559f7e), phy(27), device_name(0x5000cca252559f7f) [ 21.744130] scsi 1:0:151:0: enclosure logical id(0x5000ccab0405db00), slot(36) [ 21.751434] scsi 1:0:151:0: enclosure level(0x0000), connector name( C1 ) [ 21.758326] scsi 1:0:151:0: serial_number( 7SHJ2T4G) [ 21.763899] scsi 1:0:151:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.786332] mpt3sas_cm0: detecting: handle(0x0038), sas_address(0x5000cca26c244bce), phy(28) [ 21.794772] mpt3sas_cm0: REPORT_LUNS: handle(0x0038), retries(0) [ 21.800904] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0038), lun(0) [ 21.807711] scsi 1:0:152:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.816098] scsi 1:0:152:0: SSP: handle(0x0038), sas_addr(0x5000cca26c244bce), phy(28), device_name(0x5000cca26c244bcf) [ 21.826870] scsi 1:0:152:0: enclosure logical id(0x5000ccab0405db00), slot(37) [ 21.834177] scsi 1:0:152:0: enclosure level(0x0000), connector name( C1 ) [ 21.841068] scsi 1:0:152:0: serial_number( 1DGMYU2Z) [ 21.846644] scsi 1:0:152:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.867296] mpt3sas_cm0: detecting: handle(0x0039), sas_address(0x5000cca26a2aa10e), phy(29) [ 21.875731] mpt3sas_cm0: REPORT_LUNS: handle(0x0039), retries(0) [ 21.881865] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0039), lun(0) [ 21.888654] scsi 1:0:153:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.897047] scsi 1:0:153:0: SSP: handle(0x0039), sas_addr(0x5000cca26a2aa10e), phy(29), device_name(0x5000cca26a2aa10f) [ 21.907819] scsi 1:0:153:0: enclosure logical id(0x5000ccab0405db00), slot(38) [ 21.915126] scsi 1:0:153:0: enclosure level(0x0000), connector name( C1 ) [ 21.922020] scsi 1:0:153:0: serial_number( 2TGSET5D) [ 21.927591] scsi 1:0:153:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 21.950304] mpt3sas_cm0: detecting: handle(0x003a), sas_address(0x5000cca25254e236), phy(30) [ 21.958743] mpt3sas_cm0: REPORT_LUNS: handle(0x003a), retries(0) [ 21.964884] mpt3sas_cm0: TEST_UNIT_READY: handle(0x003a), lun(0) [ 21.982927] scsi 1:0:154:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 21.991500] scsi 1:0:154:0: SSP: handle(0x003a), sas_addr(0x5000cca25254e236), phy(30), device_name(0x5000cca25254e237) [ 22.002273] scsi 1:0:154:0: enclosure logical id(0x5000ccab0405db00), slot(39) [ 22.009577] scsi 1:0:154:0: enclosure level(0x0000), connector name( C1 ) [ 22.016471] scsi 1:0:154:0: serial_number( 7SHHP5BG) [ 22.022043] scsi 1:0:154:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.069313] mpt3sas_cm0: detecting: handle(0x003b), sas_address(0x5000cca25254df96), phy(31) [ 22.077749] mpt3sas_cm0: REPORT_LUNS: handle(0x003b), retries(0) [ 22.084709] mpt3sas_cm0: TEST_UNIT_READY: handle(0x003b), lun(0) [ 22.109363] scsi 1:0:155:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.117809] scsi 1:0:155:0: SSP: handle(0x003b), sas_addr(0x5000cca25254df96), phy(31), device_name(0x5000cca25254df97) [ 22.128584] scsi 1:0:155:0: enclosure logical id(0x5000ccab0405db00), slot(40) [ 22.135889] scsi 1:0:155:0: enclosure level(0x0000), connector name( C1 ) [ 22.142780] scsi 1:0:155:0: serial_number( 7SHHNZYG) [ 22.148353] scsi 1:0:155:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.183319] mpt3sas_cm0: detecting: handle(0x003c), sas_address(0x5000cca25254e9d2), phy(32) [ 22.191760] mpt3sas_cm0: REPORT_LUNS: handle(0x003c), retries(0) [ 22.197889] mpt3sas_cm0: TEST_UNIT_READY: handle(0x003c), lun(0) [ 22.239975] scsi 1:0:156:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.248362] scsi 1:0:156:0: SSP: handle(0x003c), sas_addr(0x5000cca25254e9d2), phy(32), device_name(0x5000cca25254e9d3) [ 22.259132] scsi 1:0:156:0: enclosure logical id(0x5000ccab0405db00), slot(41) [ 22.266440] scsi 1:0:156:0: enclosure level(0x0000), connector name( C1 ) [ 22.273330] scsi 1:0:156:0: serial_number( 7SHHPP2G) [ 22.278903] scsi 1:0:156:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.400322] mpt3sas_cm0: detecting: handle(0x003d), sas_address(0x5000cca26a24008a), phy(33) [ 22.408763] mpt3sas_cm0: REPORT_LUNS: handle(0x003d), retries(0) [ 22.414903] mpt3sas_cm0: TEST_UNIT_READY: handle(0x003d), lun(0) [ 22.421525] scsi 1:0:157:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.429905] scsi 1:0:157:0: SSP: handle(0x003d), sas_addr(0x5000cca26a24008a), phy(33), device_name(0x5000cca26a24008b) [ 22.440679] scsi 1:0:157:0: enclosure logical id(0x5000ccab0405db00), slot(42) [ 22.447983] scsi 1:0:157:0: enclosure level(0x0000), connector name( C1 ) [ 22.454877] scsi 1:0:157:0: serial_number( 2TGMTTPD) [ 22.460450] scsi 1:0:157:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.483317] mpt3sas_cm0: detecting: handle(0x003e), sas_address(0x5000cca26a24b9ea), phy(34) [ 22.491758] mpt3sas_cm0: REPORT_LUNS: handle(0x003e), retries(0) [ 22.497891] mpt3sas_cm0: TEST_UNIT_READY: handle(0x003e), lun(0) [ 22.506984] scsi 1:0:158:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.515363] scsi 1:0:158:0: SSP: handle(0x003e), sas_addr(0x5000cca26a24b9ea), phy(34), device_name(0x5000cca26a24b9eb) [ 22.526134] scsi 1:0:158:0: enclosure logical id(0x5000ccab0405db00), slot(43) [ 22.533438] scsi 1:0:158:0: enclosure level(0x0000), connector name( C1 ) [ 22.540331] scsi 1:0:158:0: serial_number( 2TGN64DD) [ 22.545905] scsi 1:0:158:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.572373] mpt3sas_cm0: detecting: handle(0x003f), sas_address(0x5000cca26a25aed6), phy(35) [ 22.580812] mpt3sas_cm0: REPORT_LUNS: handle(0x003f), retries(0) [ 22.586962] mpt3sas_cm0: TEST_UNIT_READY: handle(0x003f), lun(0) [ 22.593574] scsi 1:0:159:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.601988] scsi 1:0:159:0: SSP: handle(0x003f), sas_addr(0x5000cca26a25aed6), phy(35), device_name(0x5000cca26a25aed7) [ 22.612759] scsi 1:0:159:0: enclosure logical id(0x5000ccab0405db00), slot(44) [ 22.620066] scsi 1:0:159:0: enclosure level(0x0000), connector name( C1 ) [ 22.626956] scsi 1:0:159:0: serial_number( 2TGNRG1D) [ 22.632530] scsi 1:0:159:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.652320] mpt3sas_cm0: detecting: handle(0x0040), sas_address(0x5000cca266d32b6a), phy(36) [ 22.660761] mpt3sas_cm0: REPORT_LUNS: handle(0x0040), retries(0) [ 22.666902] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0040), lun(0) [ 22.674761] scsi 1:0:160:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.683145] scsi 1:0:160:0: SSP: handle(0x0040), sas_addr(0x5000cca266d32b6a), phy(36), device_name(0x5000cca266d32b6b) [ 22.693916] scsi 1:0:160:0: enclosure logical id(0x5000ccab0405db00), slot(45) [ 22.701222] scsi 1:0:160:0: enclosure level(0x0000), connector name( C1 ) [ 22.708113] scsi 1:0:160:0: serial_number( 7JKS46JK) [ 22.713687] scsi 1:0:160:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.736429] mpt3sas_cm0: detecting: handle(0x0041), sas_address(0x5000cca26b9bf886), phy(37) [ 22.744866] mpt3sas_cm0: REPORT_LUNS: handle(0x0041), retries(0) [ 22.751007] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0041), lun(0) [ 22.757858] scsi 1:0:161:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.766243] scsi 1:0:161:0: SSP: handle(0x0041), sas_addr(0x5000cca26b9bf886), phy(37), device_name(0x5000cca26b9bf887) [ 22.777013] scsi 1:0:161:0: enclosure logical id(0x5000ccab0405db00), slot(46) [ 22.784318] scsi 1:0:161:0: enclosure level(0x0000), connector name( C1 ) [ 22.791211] scsi 1:0:161:0: serial_number( 1SJST42Z) [ 22.796785] scsi 1:0:161:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.819333] mpt3sas_cm0: detecting: handle(0x0042), sas_address(0x5000cca26b9b24ca), phy(38) [ 22.827773] mpt3sas_cm0: REPORT_LUNS: handle(0x0042), retries(0) [ 22.833946] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0042), lun(0) [ 22.840765] scsi 1:0:162:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.849150] scsi 1:0:162:0: SSP: handle(0x0042), sas_addr(0x5000cca26b9b24ca), phy(38), device_name(0x5000cca26b9b24cb) [ 22.859921] scsi 1:0:162:0: enclosure logical id(0x5000ccab0405db00), slot(47) [ 22.867225] scsi 1:0:162:0: enclosure level(0x0000), connector name( C1 ) [ 22.874119] scsi 1:0:162:0: serial_number( 1SJSA0YZ) [ 22.879693] scsi 1:0:162:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.902325] mpt3sas_cm0: detecting: handle(0x0043), sas_address(0x5000cca26a21d742), phy(39) [ 22.910768] mpt3sas_cm0: REPORT_LUNS: handle(0x0043), retries(0) [ 22.916934] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0043), lun(0) [ 22.923724] scsi 1:0:163:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 22.932114] scsi 1:0:163:0: SSP: handle(0x0043), sas_addr(0x5000cca26a21d742), phy(39), device_name(0x5000cca26a21d743) [ 22.942890] scsi 1:0:163:0: enclosure logical id(0x5000ccab0405db00), slot(48) [ 22.950194] scsi 1:0:163:0: enclosure level(0x0000), connector name( C1 ) [ 22.957087] scsi 1:0:163:0: serial_number( 2TGLLYED) [ 22.962661] scsi 1:0:163:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 22.985328] mpt3sas_cm0: detecting: handle(0x0044), sas_address(0x5000cca26a27af5e), phy(40) [ 22.993771] mpt3sas_cm0: REPORT_LUNS: handle(0x0044), retries(0) [ 22.999938] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0044), lun(0) [ 23.006708] scsi 1:0:164:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.015105] scsi 1:0:164:0: SSP: handle(0x0044), sas_addr(0x5000cca26a27af5e), phy(40), device_name(0x5000cca26a27af5f) [ 23.025875] scsi 1:0:164:0: enclosure logical id(0x5000ccab0405db00), slot(49) [ 23.033179] scsi 1:0:164:0: enclosure level(0x0000), connector name( C1 ) [ 23.040072] scsi 1:0:164:0: serial_number( 2TGPUL5D) [ 23.045645] scsi 1:0:164:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.068333] mpt3sas_cm0: detecting: handle(0x0045), sas_address(0x5000cca2525552e6), phy(41) [ 23.076773] mpt3sas_cm0: REPORT_LUNS: handle(0x0045), retries(0) [ 23.082912] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0045), lun(0) [ 23.142655] scsi 1:0:165:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.151033] scsi 1:0:165:0: SSP: handle(0x0045), sas_addr(0x5000cca2525552e6), phy(41), device_name(0x5000cca2525552e7) [ 23.161806] scsi 1:0:165:0: enclosure logical id(0x5000ccab0405db00), slot(50) [ 23.169111] scsi 1:0:165:0: enclosure level(0x0000), connector name( C1 ) [ 23.176004] scsi 1:0:165:0: serial_number( 7SHHXP0G) [ 23.181578] scsi 1:0:165:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.227333] mpt3sas_cm0: detecting: handle(0x0046), sas_address(0x5000cca26a26dff2), phy(42) [ 23.235774] mpt3sas_cm0: REPORT_LUNS: handle(0x0046), retries(0) [ 23.241931] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0046), lun(0) [ 23.248548] scsi 1:0:166:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.256933] scsi 1:0:166:0: SSP: handle(0x0046), sas_addr(0x5000cca26a26dff2), phy(42), device_name(0x5000cca26a26dff3) [ 23.267707] scsi 1:0:166:0: enclosure logical id(0x5000ccab0405db00), slot(51) [ 23.275011] scsi 1:0:166:0: enclosure level(0x0000), connector name( C1 ) [ 23.281905] scsi 1:0:166:0: serial_number( 2TGPBSYD) [ 23.287479] scsi 1:0:166:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.307329] mpt3sas_cm0: detecting: handle(0x0047), sas_address(0x5000cca26b9c5d52), phy(43) [ 23.315772] mpt3sas_cm0: REPORT_LUNS: handle(0x0047), retries(0) [ 23.321910] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0047), lun(0) [ 23.328636] scsi 1:0:167:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.337013] scsi 1:0:167:0: SSP: handle(0x0047), sas_addr(0x5000cca26b9c5d52), phy(43), device_name(0x5000cca26b9c5d53) [ 23.347789] scsi 1:0:167:0: enclosure logical id(0x5000ccab0405db00), slot(52) [ 23.355093] scsi 1:0:167:0: enclosure level(0x0000), connector name( C1 ) [ 23.361987] scsi 1:0:167:0: serial_number( 1SJSZV5Z) [ 23.367561] scsi 1:0:167:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.390963] mpt3sas_cm0: detecting: handle(0x0048), sas_address(0x5000cca26b9602c6), phy(44) [ 23.399405] mpt3sas_cm0: REPORT_LUNS: handle(0x0048), retries(0) [ 23.405550] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0048), lun(0) [ 23.421839] scsi 1:0:168:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.430254] scsi 1:0:168:0: SSP: handle(0x0048), sas_addr(0x5000cca26b9602c6), phy(44), device_name(0x5000cca26b9602c7) [ 23.441026] scsi 1:0:168:0: enclosure logical id(0x5000ccab0405db00), slot(53) [ 23.448333] scsi 1:0:168:0: enclosure level(0x0000), connector name( C1 ) [ 23.455227] scsi 1:0:168:0: serial_number( 1SJNHJ4Z) [ 23.460798] scsi 1:0:168:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.483345] mpt3sas_cm0: detecting: handle(0x0049), sas_address(0x5000cca252544a02), phy(45) [ 23.491788] mpt3sas_cm0: REPORT_LUNS: handle(0x0049), retries(0) [ 23.497955] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0049), lun(0) [ 23.521496] scsi 1:0:169:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.529884] scsi 1:0:169:0: SSP: handle(0x0049), sas_addr(0x5000cca252544a02), phy(45), device_name(0x5000cca252544a03) [ 23.540660] scsi 1:0:169:0: enclosure logical id(0x5000ccab0405db00), slot(54) [ 23.547968] scsi 1:0:169:0: enclosure level(0x0000), connector name( C1 ) [ 23.554862] scsi 1:0:169:0: serial_number( 7SHHB14G) [ 23.560434] scsi 1:0:169:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.580344] mpt3sas_cm0: detecting: handle(0x004a), sas_address(0x5000cca252559f9e), phy(46) [ 23.588787] mpt3sas_cm0: REPORT_LUNS: handle(0x004a), retries(0) [ 23.594926] mpt3sas_cm0: TEST_UNIT_READY: handle(0x004a), lun(0) [ 23.609780] scsi 1:0:170:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.618175] scsi 1:0:170:0: SSP: handle(0x004a), sas_addr(0x5000cca252559f9e), phy(46), device_name(0x5000cca252559f9f) [ 23.628950] scsi 1:0:170:0: enclosure logical id(0x5000ccab0405db00), slot(55) [ 23.636257] scsi 1:0:170:0: enclosure level(0x0000), connector name( C1 ) [ 23.643151] scsi 1:0:170:0: serial_number( 7SHJ2TDG) [ 23.648723] scsi 1:0:170:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.678375] mpt3sas_cm0: detecting: handle(0x004b), sas_address(0x5000cca25255571e), phy(47) [ 23.686816] mpt3sas_cm0: REPORT_LUNS: handle(0x004b), retries(0) [ 23.692956] mpt3sas_cm0: TEST_UNIT_READY: handle(0x004b), lun(0) [ 23.707569] scsi 1:0:171:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.715945] scsi 1:0:171:0: SSP: handle(0x004b), sas_addr(0x5000cca25255571e), phy(47), device_name(0x5000cca25255571f) [ 23.726723] scsi 1:0:171:0: enclosure logical id(0x5000ccab0405db00), slot(56) [ 23.734028] scsi 1:0:171:0: enclosure level(0x0000), connector name( C1 ) [ 23.740921] scsi 1:0:171:0: serial_number( 7SHHXYRG) [ 23.746494] scsi 1:0:171:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.766347] mpt3sas_cm0: detecting: handle(0x004c), sas_address(0x5000cca26b9bf57e), phy(48) [ 23.774786] mpt3sas_cm0: REPORT_LUNS: handle(0x004c), retries(0) [ 23.780923] mpt3sas_cm0: TEST_UNIT_READY: handle(0x004c), lun(0) [ 23.805687] scsi 1:0:172:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.814070] scsi 1:0:172:0: SSP: handle(0x004c), sas_addr(0x5000cca26b9bf57e), phy(48), device_name(0x5000cca26b9bf57f) [ 23.824840] scsi 1:0:172:0: enclosure logical id(0x5000ccab0405db00), slot(57) [ 23.832147] scsi 1:0:172:0: enclosure level(0x0000), connector name( C1 ) [ 23.839039] scsi 1:0:172:0: serial_number( 1SJSSXUZ) [ 23.844613] scsi 1:0:172:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.867347] mpt3sas_cm0: detecting: handle(0x004d), sas_address(0x5000cca252555372), phy(49) [ 23.875788] mpt3sas_cm0: REPORT_LUNS: handle(0x004d), retries(0) [ 23.881952] mpt3sas_cm0: TEST_UNIT_READY: handle(0x004d), lun(0) [ 23.888580] scsi 1:0:173:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.896952] scsi 1:0:173:0: SSP: handle(0x004d), sas_addr(0x5000cca252555372), phy(49), device_name(0x5000cca252555373) [ 23.907722] scsi 1:0:173:0: enclosure logical id(0x5000ccab0405db00), slot(58) [ 23.915026] scsi 1:0:173:0: enclosure level(0x0000), connector name( C1 ) [ 23.921920] scsi 1:0:173:0: serial_number( 7SHHXR4G) [ 23.927494] scsi 1:0:173:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 23.947349] mpt3sas_cm0: detecting: handle(0x004e), sas_address(0x5000cca25253eefe), phy(50) [ 23.955786] mpt3sas_cm0: REPORT_LUNS: handle(0x004e), retries(0) [ 23.961956] mpt3sas_cm0: TEST_UNIT_READY: handle(0x004e), lun(0) [ 23.968592] scsi 1:0:174:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 23.976965] scsi 1:0:174:0: SSP: handle(0x004e), sas_addr(0x5000cca25253eefe), phy(50), device_name(0x5000cca25253eeff) [ 23.987734] scsi 1:0:174:0: enclosure logical id(0x5000ccab0405db00), slot(59) [ 23.995039] scsi 1:0:174:0: enclosure level(0x0000), connector name( C1 ) [ 24.001933] scsi 1:0:174:0: serial_number( 7SHH4Z7G) [ 24.007506] scsi 1:0:174:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 24.029845] mpt3sas_cm0: expander_add: handle(0x001a), parent(0x0017), sas_addr(0x5000ccab0405db7b), phys(68) [ 24.050568] mpt3sas_cm0: detecting: handle(0x004f), sas_address(0x5000cca26b9cbb06), phy(42) [ 24.059024] mpt3sas_cm0: REPORT_LUNS: handle(0x004f), retries(0) [ 24.065149] mpt3sas_cm0: TEST_UNIT_READY: handle(0x004f), lun(0) [ 24.071795] scsi 1:0:175:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 24.080191] scsi 1:0:175:0: SSP: handle(0x004f), sas_addr(0x5000cca26b9cbb06), phy(42), device_name(0x5000cca26b9cbb07) [ 24.090967] scsi 1:0:175:0: enclosure logical id(0x5000ccab0405db00), slot(1) [ 24.098184] scsi 1:0:175:0: enclosure level(0x0000), connector name( C1 ) [ 24.105078] scsi 1:0:175:0: serial_number( 1SJT62MZ) [ 24.110649] scsi 1:0:175:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 24.147349] mpt3sas_cm0: detecting: handle(0x0050), sas_address(0x5000cca252544476), phy(43) [ 24.155805] mpt3sas_cm0: REPORT_LUNS: handle(0x0050), retries(0) [ 24.161950] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0050), lun(0) [ 24.168678] scsi 1:0:176:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 24.184932] scsi 1:0:176:0: SSP: handle(0x0050), sas_addr(0x5000cca252544476), phy(43), device_name(0x5000cca252544477) [ 24.195704] scsi 1:0:176:0: enclosure logical id(0x5000ccab0405db00), slot(3) [ 24.202924] scsi 1:0:176:0: enclosure level(0x0000), connector name( C1 ) [ 24.209815] scsi 1:0:176:0: serial_number( 7SHHANPG) [ 24.215389] scsi 1:0:176:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 24.235948] mpt3sas_cm0: detecting: handle(0x0051), sas_address(0x5000cca26a26173e), phy(44) [ 24.244384] mpt3sas_cm0: REPORT_LUNS: handle(0x0051), retries(0) [ 24.250543] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0051), lun(0) [ 24.275589] scsi 1:0:177:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 24.283977] scsi 1:0:177:0: SSP: handle(0x0051), sas_addr(0x5000cca26a26173e), phy(44), device_name(0x5000cca26a26173f) [ 24.294751] scsi 1:0:177:0: enclosure logical id(0x5000ccab0405db00), slot(4) [ 24.301968] scsi 1:0:177:0: enclosure level(0x0000), connector name( C1 ) [ 24.308861] scsi 1:0:177:0: serial_number( 2TGNYDLD) [ 24.314435] scsi 1:0:177:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 24.334358] mpt3sas_cm0: detecting: handle(0x0052), sas_address(0x5000cca252544cb6), phy(45) [ 24.342796] mpt3sas_cm0: REPORT_LUNS: handle(0x0052), retries(0) [ 24.348954] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0052), lun(0) [ 24.355768] scsi 1:0:178:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 24.364145] scsi 1:0:178:0: SSP: handle(0x0052), sas_addr(0x5000cca252544cb6), phy(45), device_name(0x5000cca252544cb7) [ 24.374919] scsi 1:0:178:0: enclosure logical id(0x5000ccab0405db00), slot(5) [ 24.382136] scsi 1:0:178:0: enclosure level(0x0000), connector name( C1 ) [ 24.389029] scsi 1:0:178:0: serial_number( 7SHHB6RG) [ 24.394602] scsi 1:0:178:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 24.446367] mpt3sas_cm0: detecting: handle(0x0053), sas_address(0x5000cca26c238692), phy(46) [ 24.454805] mpt3sas_cm0: REPORT_LUNS: handle(0x0053), retries(0) [ 24.460952] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0053), lun(0) [ 24.706352] scsi 1:0:179:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 24.714726] scsi 1:0:179:0: SSP: handle(0x0053), sas_addr(0x5000cca26c238692), phy(46), device_name(0x5000cca26c238693) [ 24.725502] scsi 1:0:179:0: enclosure logical id(0x5000ccab0405db00), slot(6) [ 24.732722] scsi 1:0:179:0: enclosure level(0x0000), connector name( C1 ) [ 24.739615] scsi 1:0:179:0: serial_number( 1DGMJNWZ) [ 24.745188] scsi 1:0:179:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 24.931367] mpt3sas_cm0: detecting: handle(0x0054), sas_address(0x5000cca26a2ac96a), phy(47) [ 24.939820] mpt3sas_cm0: REPORT_LUNS: handle(0x0054), retries(0) [ 24.945945] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0054), lun(0) [ 24.999733] scsi 1:0:180:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.008148] scsi 1:0:180:0: SSP: handle(0x0054), sas_addr(0x5000cca26a2ac96a), phy(47), device_name(0x5000cca26a2ac96b) [ 25.018921] scsi 1:0:180:0: enclosure logical id(0x5000ccab0405db00), slot(7) [ 25.026138] scsi 1:0:180:0: enclosure level(0x0000), connector name( C1 ) [ 25.033030] scsi 1:0:180:0: serial_number( 2TGSJGHD) [ 25.038603] scsi 1:0:180:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.155379] mpt3sas_cm0: detecting: handle(0x0055), sas_address(0x5000cca25253e61a), phy(48) [ 25.163819] mpt3sas_cm0: REPORT_LUNS: handle(0x0055), retries(0) [ 25.169993] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0055), lun(0) [ 25.176816] scsi 1:0:181:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.185205] scsi 1:0:181:0: SSP: handle(0x0055), sas_addr(0x5000cca25253e61a), phy(48), device_name(0x5000cca25253e61b) [ 25.195976] scsi 1:0:181:0: enclosure logical id(0x5000ccab0405db00), slot(8) [ 25.203195] scsi 1:0:181:0: enclosure level(0x0000), connector name( C1 ) [ 25.210094] scsi 1:0:181:0: serial_number( 7SHH4BWG) [ 25.215670] scsi 1:0:181:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.238382] mpt3sas_cm0: detecting: handle(0x0056), sas_address(0x5000cca252542cfe), phy(49) [ 25.246821] mpt3sas_cm0: REPORT_LUNS: handle(0x0056), retries(0) [ 25.252993] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0056), lun(0) [ 25.259851] scsi 1:0:182:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.268284] scsi 1:0:182:0: SSP: handle(0x0056), sas_addr(0x5000cca252542cfe), phy(49), device_name(0x5000cca252542cff) [ 25.279056] scsi 1:0:182:0: enclosure logical id(0x5000ccab0405db00), slot(9) [ 25.286276] scsi 1:0:182:0: enclosure level(0x0000), connector name( C1 ) [ 25.293168] scsi 1:0:182:0: serial_number( 7SHH937G) [ 25.298744] scsi 1:0:182:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.321384] mpt3sas_cm0: detecting: handle(0x0057), sas_address(0x5000cca26a3181fe), phy(50) [ 25.329825] mpt3sas_cm0: REPORT_LUNS: handle(0x0057), retries(0) [ 25.335991] mpt3sas_cm0: TEST_UNIT_READY: handle(0x0057), lun(0) [ 25.342817] scsi 1:0:183:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.351209] scsi 1:0:183:0: SSP: handle(0x0057), sas_addr(0x5000cca26a3181fe), phy(50), device_name(0x5000cca26a3181ff) [ 25.361981] scsi 1:0:183:0: enclosure logical id(0x5000ccab0405db00), slot(10) [ 25.369287] scsi 1:0:183:0: enclosure level(0x0000), connector name( C1 ) [ 25.376181] scsi 1:0:183:0: serial_number( 2TGW71ND) [ 25.381753] scsi 1:0:183:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.406892] mpt3sas_cm0: expander_add: handle(0x0099), parent(0x000a), sas_addr(0x5000ccab0405db3d), phys(49) [ 25.428327] mpt3sas_cm0: detecting: handle(0x009d), sas_address(0x5000ccab0405db3c), phy(48) [ 25.436763] mpt3sas_cm0: REPORT_LUNS: handle(0x009d), retries(0) [ 25.444026] mpt3sas_cm0: TEST_UNIT_READY: handle(0x009d), lun(0) [ 25.450935] scsi 1:0:184:0: Enclosure HGST H4060-J 2033 PQ: 0 ANSI: 6 [ 25.459522] scsi 1:0:184:0: set ignore_delay_remove for handle(0x009d) [ 25.466046] scsi 1:0:184:0: SES: handle(0x009d), sas_addr(0x5000ccab0405db3c), phy(48), device_name(0x0000000000000000) [ 25.476817] scsi 1:0:184:0: enclosure logical id(0x5000ccab0405db00), slot(60) [ 25.484122] scsi 1:0:184:0: enclosure level(0x0000), connector name( C0 ) [ 25.491014] scsi 1:0:184:0: serial_number(USWSJ03918EZ0069 ) [ 25.496936] scsi 1:0:184:0: qdepth(1), tagged(0), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.521756] mpt3sas_cm0: expander_add: handle(0x009b), parent(0x0099), sas_addr(0x5000ccab0405db3f), phys(68) [ 25.543977] mpt3sas_cm0: detecting: handle(0x009e), sas_address(0x5000cca252550a75), phy(0) [ 25.552334] mpt3sas_cm0: REPORT_LUNS: handle(0x009e), retries(0) [ 25.558455] mpt3sas_cm0: TEST_UNIT_READY: handle(0x009e), lun(0) [ 25.565084] scsi 1:0:185:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.573487] scsi 1:0:185:0: SSP: handle(0x009e), sas_addr(0x5000cca252550a75), phy(0), device_name(0x5000cca252550a77) [ 25.584174] scsi 1:0:185:0: enclosure logical id(0x5000ccab0405db00), slot(0) [ 25.591393] scsi 1:0:185:0: enclosure level(0x0000), connector name( C0 ) [ 25.598287] scsi 1:0:185:0: serial_number( 7SHHSVGG) [ 25.603860] scsi 1:0:185:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.626393] mpt3sas_cm0: detecting: handle(0x009f), sas_address(0x5000cca25253eb31), phy(1) [ 25.634742] mpt3sas_cm0: REPORT_LUNS: handle(0x009f), retries(0) [ 25.640883] mpt3sas_cm0: TEST_UNIT_READY: handle(0x009f), lun(0) [ 25.647533] scsi 1:0:186:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.655910] scsi 1:0:186:0: SSP: handle(0x009f), sas_addr(0x5000cca25253eb31), phy(1), device_name(0x5000cca25253eb33) [ 25.666596] scsi 1:0:186:0: enclosure logical id(0x5000ccab0405db00), slot(2) [ 25.673816] scsi 1:0:186:0: enclosure level(0x0000), connector name( C0 ) [ 25.680709] scsi 1:0:186:0: serial_number( 7SHH4RDG) [ 25.686280] scsi 1:0:186:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.706392] mpt3sas_cm0: detecting: handle(0x00a0), sas_address(0x5000cca26b950bb5), phy(2) [ 25.714746] mpt3sas_cm0: REPORT_LUNS: handle(0x00a0), retries(0) [ 25.720892] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a0), lun(0) [ 25.764588] scsi 1:0:187:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.772971] scsi 1:0:187:0: SSP: handle(0x00a0), sas_addr(0x5000cca26b950bb5), phy(2), device_name(0x5000cca26b950bb7) [ 25.783659] scsi 1:0:187:0: enclosure logical id(0x5000ccab0405db00), slot(11) [ 25.790967] scsi 1:0:187:0: enclosure level(0x0000), connector name( C0 ) [ 25.797860] scsi 1:0:187:0: serial_number( 1SJMZ22Z) [ 25.803431] scsi 1:0:187:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.832397] mpt3sas_cm0: detecting: handle(0x00a1), sas_address(0x5000cca25253f3bd), phy(3) [ 25.840745] mpt3sas_cm0: REPORT_LUNS: handle(0x00a1), retries(0) [ 25.846885] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a1), lun(0) [ 25.853530] scsi 1:0:188:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.861915] scsi 1:0:188:0: SSP: handle(0x00a1), sas_addr(0x5000cca25253f3bd), phy(3), device_name(0x5000cca25253f3bf) [ 25.872598] scsi 1:0:188:0: enclosure logical id(0x5000ccab0405db00), slot(12) [ 25.879905] scsi 1:0:188:0: enclosure level(0x0000), connector name( C0 ) [ 25.886798] scsi 1:0:188:0: serial_number( 7SHH591G) [ 25.892372] scsi 1:0:188:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.912398] mpt3sas_cm0: detecting: handle(0x00a2), sas_address(0x5000cca26a2ac3d9), phy(4) [ 25.920749] mpt3sas_cm0: REPORT_LUNS: handle(0x00a2), retries(0) [ 25.926922] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a2), lun(0) [ 25.933712] scsi 1:0:189:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 25.942091] scsi 1:0:189:0: SSP: handle(0x00a2), sas_addr(0x5000cca26a2ac3d9), phy(4), device_name(0x5000cca26a2ac3db) [ 25.952776] scsi 1:0:189:0: enclosure logical id(0x5000ccab0405db00), slot(13) [ 25.960084] scsi 1:0:189:0: enclosure level(0x0000), connector name( C0 ) [ 25.966974] scsi 1:0:189:0: serial_number( 2TGSJ30D) [ 25.972548] scsi 1:0:189:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 25.992400] mpt3sas_cm0: detecting: handle(0x00a3), sas_address(0x5000cca252541029), phy(5) [ 26.000753] mpt3sas_cm0: REPORT_LUNS: handle(0x00a3), retries(0) [ 26.006920] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a3), lun(0) [ 26.013633] scsi 1:0:190:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.022009] scsi 1:0:190:0: SSP: handle(0x00a3), sas_addr(0x5000cca252541029), phy(5), device_name(0x5000cca25254102b) [ 26.032695] scsi 1:0:190:0: enclosure logical id(0x5000ccab0405db00), slot(14) [ 26.040000] scsi 1:0:190:0: enclosure level(0x0000), connector name( C0 ) [ 26.046892] scsi 1:0:190:0: serial_number( 7SHH75RG) [ 26.052465] scsi 1:0:190:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.072401] mpt3sas_cm0: detecting: handle(0x00a4), sas_address(0x5000cca252545349), phy(6) [ 26.080749] mpt3sas_cm0: REPORT_LUNS: handle(0x00a4), retries(0) [ 26.086889] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a4), lun(0) [ 26.093539] scsi 1:0:191:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.101914] scsi 1:0:191:0: SSP: handle(0x00a4), sas_addr(0x5000cca252545349), phy(6), device_name(0x5000cca25254534b) [ 26.112602] scsi 1:0:191:0: enclosure logical id(0x5000ccab0405db00), slot(15) [ 26.119909] scsi 1:0:191:0: enclosure level(0x0000), connector name( C0 ) [ 26.126801] scsi 1:0:191:0: serial_number( 7SHHBN9G) [ 26.132376] scsi 1:0:191:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.152404] mpt3sas_cm0: detecting: handle(0x00a5), sas_address(0x5000cca2525430c5), phy(7) [ 26.160752] mpt3sas_cm0: REPORT_LUNS: handle(0x00a5), retries(0) [ 26.167075] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a5), lun(0) [ 26.178924] scsi 1:0:192:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.187308] scsi 1:0:192:0: SSP: handle(0x00a5), sas_addr(0x5000cca2525430c5), phy(7), device_name(0x5000cca2525430c7) [ 26.197997] scsi 1:0:192:0: enclosure logical id(0x5000ccab0405db00), slot(16) [ 26.205304] scsi 1:0:192:0: enclosure level(0x0000), connector name( C0 ) [ 26.212197] scsi 1:0:192:0: serial_number( 7SHH9B1G) [ 26.217770] scsi 1:0:192:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.240414] mpt3sas_cm0: detecting: handle(0x00a6), sas_address(0x5000cca25254385d), phy(8) [ 26.248764] mpt3sas_cm0: REPORT_LUNS: handle(0x00a6), retries(0) [ 26.254897] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a6), lun(0) [ 26.266038] scsi 1:0:193:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.274419] scsi 1:0:193:0: SSP: handle(0x00a6), sas_addr(0x5000cca25254385d), phy(8), device_name(0x5000cca25254385f) [ 26.285107] scsi 1:0:193:0: enclosure logical id(0x5000ccab0405db00), slot(17) [ 26.292414] scsi 1:0:193:0: enclosure level(0x0000), connector name( C0 ) [ 26.299306] scsi 1:0:193:0: serial_number( 7SHH9VRG) [ 26.304879] scsi 1:0:193:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.333425] mpt3sas_cm0: detecting: handle(0x00a7), sas_address(0x5000cca25253f30d), phy(9) [ 26.341777] mpt3sas_cm0: REPORT_LUNS: handle(0x00a7), retries(0) [ 26.347919] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a7), lun(0) [ 26.354730] scsi 1:0:194:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.363122] scsi 1:0:194:0: SSP: handle(0x00a7), sas_addr(0x5000cca25253f30d), phy(9), device_name(0x5000cca25253f30f) [ 26.373805] scsi 1:0:194:0: enclosure logical id(0x5000ccab0405db00), slot(18) [ 26.381110] scsi 1:0:194:0: enclosure level(0x0000), connector name( C0 ) [ 26.388004] scsi 1:0:194:0: serial_number( 7SHH57MG) [ 26.393578] scsi 1:0:194:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.413444] mpt3sas_cm0: detecting: handle(0x00a8), sas_address(0x5000cca252545f65), phy(10) [ 26.421884] mpt3sas_cm0: REPORT_LUNS: handle(0x00a8), retries(0) [ 26.428044] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a8), lun(0) [ 26.434872] scsi 1:0:195:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.443275] scsi 1:0:195:0: SSP: handle(0x00a8), sas_addr(0x5000cca252545f65), phy(10), device_name(0x5000cca252545f67) [ 26.454044] scsi 1:0:195:0: enclosure logical id(0x5000ccab0405db00), slot(19) [ 26.461350] scsi 1:0:195:0: enclosure level(0x0000), connector name( C0 ) [ 26.468240] scsi 1:0:195:0: serial_number( 7SHHDG9G) [ 26.473815] scsi 1:0:195:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.496412] mpt3sas_cm0: detecting: handle(0x00a9), sas_address(0x5000cca266daa4e5), phy(11) [ 26.504852] mpt3sas_cm0: REPORT_LUNS: handle(0x00a9), retries(0) [ 26.510986] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00a9), lun(0) [ 26.525513] scsi 1:0:196:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.533900] scsi 1:0:196:0: SSP: handle(0x00a9), sas_addr(0x5000cca266daa4e5), phy(11), device_name(0x5000cca266daa4e7) [ 26.544673] scsi 1:0:196:0: enclosure logical id(0x5000ccab0405db00), slot(20) [ 26.551978] scsi 1:0:196:0: enclosure level(0x0000), connector name( C0 ) [ 26.558872] scsi 1:0:196:0: serial_number( 7JKW7MYK) [ 26.564450] scsi 1:0:196:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.594415] mpt3sas_cm0: detecting: handle(0x00aa), sas_address(0x5000cca26a25167d), phy(12) [ 26.602849] mpt3sas_cm0: REPORT_LUNS: handle(0x00aa), retries(0) [ 26.609007] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00aa), lun(0) [ 26.615817] scsi 1:0:197:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.624208] scsi 1:0:197:0: SSP: handle(0x00aa), sas_addr(0x5000cca26a25167d), phy(12), device_name(0x5000cca26a25167f) [ 26.634981] scsi 1:0:197:0: enclosure logical id(0x5000ccab0405db00), slot(21) [ 26.642288] scsi 1:0:197:0: enclosure level(0x0000), connector name( C0 ) [ 26.649182] scsi 1:0:197:0: serial_number( 2TGND9JD) [ 26.654752] scsi 1:0:197:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.677415] mpt3sas_cm0: detecting: handle(0x00ab), sas_address(0x5000cca25253eda9), phy(13) [ 26.685852] mpt3sas_cm0: REPORT_LUNS: handle(0x00ab), retries(0) [ 26.691985] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ab), lun(0) [ 26.701296] scsi 1:0:198:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.709679] scsi 1:0:198:0: SSP: handle(0x00ab), sas_addr(0x5000cca25253eda9), phy(13), device_name(0x5000cca25253edab) [ 26.720456] scsi 1:0:198:0: enclosure logical id(0x5000ccab0405db00), slot(22) [ 26.727760] scsi 1:0:198:0: enclosure level(0x0000), connector name( C0 ) [ 26.734652] scsi 1:0:198:0: serial_number( 7SHH4WHG) [ 26.740225] scsi 1:0:198:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.763416] mpt3sas_cm0: detecting: handle(0x00ac), sas_address(0x5000cca266d491a1), phy(14) [ 26.771853] mpt3sas_cm0: REPORT_LUNS: handle(0x00ac), retries(0) [ 26.777986] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ac), lun(0) [ 26.784642] scsi 1:0:199:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.793028] scsi 1:0:199:0: SSP: handle(0x00ac), sas_addr(0x5000cca266d491a1), phy(14), device_name(0x5000cca266d491a3) [ 26.803805] scsi 1:0:199:0: enclosure logical id(0x5000ccab0405db00), slot(23) [ 26.811109] scsi 1:0:199:0: enclosure level(0x0000), connector name( C0 ) [ 26.818001] scsi 1:0:199:0: serial_number( 7JKSX22K) [ 26.823577] scsi 1:0:199:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.843419] mpt3sas_cm0: detecting: handle(0x00ad), sas_address(0x5000cca26b9a7099), phy(15) [ 26.851857] mpt3sas_cm0: REPORT_LUNS: handle(0x00ad), retries(0) [ 26.858031] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ad), lun(0) [ 26.874100] scsi 1:0:200:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.882478] scsi 1:0:200:0: SSP: handle(0x00ad), sas_addr(0x5000cca26b9a7099), phy(15), device_name(0x5000cca26b9a709b) [ 26.893254] scsi 1:0:200:0: enclosure logical id(0x5000ccab0405db00), slot(24) [ 26.900561] scsi 1:0:200:0: enclosure level(0x0000), connector name( C0 ) [ 26.907454] scsi 1:0:200:0: serial_number( 1SJRY0YZ) [ 26.913026] scsi 1:0:200:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 26.935424] mpt3sas_cm0: detecting: handle(0x00ae), sas_address(0x5000cca25253f831), phy(16) [ 26.943866] mpt3sas_cm0: REPORT_LUNS: handle(0x00ae), retries(0) [ 26.957487] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ae), lun(0) [ 26.965975] scsi 1:0:201:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 26.974368] scsi 1:0:201:0: SSP: handle(0x00ae), sas_addr(0x5000cca25253f831), phy(16), device_name(0x5000cca25253f833) [ 26.985141] scsi 1:0:201:0: enclosure logical id(0x5000ccab0405db00), slot(25) [ 26.992448] scsi 1:0:201:0: enclosure level(0x0000), connector name( C0 ) [ 26.999341] scsi 1:0:201:0: serial_number( 7SHH5L7G) [ 27.004912] scsi 1:0:201:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.035435] mpt3sas_cm0: detecting: handle(0x00af), sas_address(0x5000cca26a2ab23d), phy(17) [ 27.043873] mpt3sas_cm0: REPORT_LUNS: handle(0x00af), retries(0) [ 27.050033] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00af), lun(0) [ 27.056681] scsi 1:0:202:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.065071] scsi 1:0:202:0: SSP: handle(0x00af), sas_addr(0x5000cca26a2ab23d), phy(17), device_name(0x5000cca26a2ab23f) [ 27.075839] scsi 1:0:202:0: enclosure logical id(0x5000ccab0405db00), slot(26) [ 27.083146] scsi 1:0:202:0: enclosure level(0x0000), connector name( C0 ) [ 27.090037] scsi 1:0:202:0: serial_number( 2TGSGXND) [ 27.095615] scsi 1:0:202:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.116475] mpt3sas_cm0: detecting: handle(0x00b0), sas_address(0x5000cca26b9b9695), phy(18) [ 27.124907] mpt3sas_cm0: REPORT_LUNS: handle(0x00b0), retries(0) [ 27.131074] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b0), lun(0) [ 27.137769] scsi 1:0:203:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.146201] scsi 1:0:203:0: SSP: handle(0x00b0), sas_addr(0x5000cca26b9b9695), phy(18), device_name(0x5000cca26b9b9697) [ 27.156970] scsi 1:0:203:0: enclosure logical id(0x5000ccab0405db00), slot(27) [ 27.164278] scsi 1:0:203:0: enclosure level(0x0000), connector name( C0 ) [ 27.171169] scsi 1:0:203:0: serial_number( 1SJSKLWZ) [ 27.176743] scsi 1:0:203:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.197430] mpt3sas_cm0: detecting: handle(0x00b1), sas_address(0x5000cca252559471), phy(19) [ 27.205864] mpt3sas_cm0: REPORT_LUNS: handle(0x00b1), retries(0) [ 27.212023] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b1), lun(0) [ 27.218682] scsi 1:0:204:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.227066] scsi 1:0:204:0: SSP: handle(0x00b1), sas_addr(0x5000cca252559471), phy(19), device_name(0x5000cca252559473) [ 27.237843] scsi 1:0:204:0: enclosure logical id(0x5000ccab0405db00), slot(28) [ 27.245146] scsi 1:0:204:0: enclosure level(0x0000), connector name( C0 ) [ 27.252038] scsi 1:0:204:0: serial_number( 7SHJ21AG) [ 27.257614] scsi 1:0:204:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.312439] mpt3sas_cm0: detecting: handle(0x00b2), sas_address(0x5000cca25253f94d), phy(20) [ 27.320874] mpt3sas_cm0: REPORT_LUNS: handle(0x00b2), retries(0) [ 27.327010] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b2), lun(0) [ 27.345304] scsi 1:0:205:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.353687] scsi 1:0:205:0: SSP: handle(0x00b2), sas_addr(0x5000cca25253f94d), phy(20), device_name(0x5000cca25253f94f) [ 27.364456] scsi 1:0:205:0: enclosure logical id(0x5000ccab0405db00), slot(29) [ 27.371762] scsi 1:0:205:0: enclosure level(0x0000), connector name( C0 ) [ 27.378640] scsi 1:0:205:0: serial_number( 7SHH5NJG) [ 27.384210] scsi 1:0:205:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.426450] mpt3sas_cm0: detecting: handle(0x00b3), sas_address(0x5000cca25253e699), phy(21) [ 27.434887] mpt3sas_cm0: REPORT_LUNS: handle(0x00b3), retries(0) [ 27.441017] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b3), lun(0) [ 27.464744] scsi 1:0:206:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.473116] scsi 1:0:206:0: SSP: handle(0x00b3), sas_addr(0x5000cca25253e699), phy(21), device_name(0x5000cca25253e69b) [ 27.483887] scsi 1:0:206:0: enclosure logical id(0x5000ccab0405db00), slot(30) [ 27.491191] scsi 1:0:206:0: enclosure level(0x0000), connector name( C0 ) [ 27.498068] scsi 1:0:206:0: serial_number( 7SHH4DXG) [ 27.503641] scsi 1:0:206:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.525026] mpt3sas_cm0: detecting: handle(0x00b4), sas_address(0x5000cca252543cc1), phy(22) [ 27.533466] mpt3sas_cm0: REPORT_LUNS: handle(0x00b4), retries(0) [ 27.539607] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b4), lun(0) [ 27.546220] scsi 1:0:207:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.554616] scsi 1:0:207:0: SSP: handle(0x00b4), sas_addr(0x5000cca252543cc1), phy(22), device_name(0x5000cca252543cc3) [ 27.565390] scsi 1:0:207:0: enclosure logical id(0x5000ccab0405db00), slot(31) [ 27.572695] scsi 1:0:207:0: enclosure level(0x0000), connector name( C0 ) [ 27.579590] scsi 1:0:207:0: serial_number( 7SHHA4TG) [ 27.585163] scsi 1:0:207:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.605437] mpt3sas_cm0: detecting: handle(0x00b5), sas_address(0x5000cca26a24fcdd), phy(23) [ 27.613875] mpt3sas_cm0: REPORT_LUNS: handle(0x00b5), retries(0) [ 27.620034] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b5), lun(0) [ 27.626760] scsi 1:0:208:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.635194] scsi 1:0:208:0: SSP: handle(0x00b5), sas_addr(0x5000cca26a24fcdd), phy(23), device_name(0x5000cca26a24fcdf) [ 27.645965] scsi 1:0:208:0: enclosure logical id(0x5000ccab0405db00), slot(32) [ 27.653272] scsi 1:0:208:0: enclosure level(0x0000), connector name( C0 ) [ 27.660161] scsi 1:0:208:0: serial_number( 2TGNALMD) [ 27.665737] scsi 1:0:208:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.688439] mpt3sas_cm0: detecting: handle(0x00b6), sas_address(0x5000cca252543bcd), phy(24) [ 27.696880] mpt3sas_cm0: REPORT_LUNS: handle(0x00b6), retries(0) [ 27.703020] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b6), lun(0) [ 27.709721] scsi 1:0:209:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.718108] scsi 1:0:209:0: SSP: handle(0x00b6), sas_addr(0x5000cca252543bcd), phy(24), device_name(0x5000cca252543bcf) [ 27.728882] scsi 1:0:209:0: enclosure logical id(0x5000ccab0405db00), slot(33) [ 27.736186] scsi 1:0:209:0: enclosure level(0x0000), connector name( C0 ) [ 27.743079] scsi 1:0:209:0: serial_number( 7SHHA2UG) [ 27.748652] scsi 1:0:209:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.777438] mpt3sas_cm0: detecting: handle(0x00b7), sas_address(0x5000cca252551265), phy(25) [ 27.785879] mpt3sas_cm0: REPORT_LUNS: handle(0x00b7), retries(0) [ 27.792011] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b7), lun(0) [ 27.798652] scsi 1:0:210:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.807035] scsi 1:0:210:0: SSP: handle(0x00b7), sas_addr(0x5000cca252551265), phy(25), device_name(0x5000cca252551267) [ 27.817812] scsi 1:0:210:0: enclosure logical id(0x5000ccab0405db00), slot(34) [ 27.825119] scsi 1:0:210:0: enclosure level(0x0000), connector name( C0 ) [ 27.832007] scsi 1:0:210:0: serial_number( 7SHHTBVG) [ 27.837583] scsi 1:0:210:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.857442] mpt3sas_cm0: detecting: handle(0x00b8), sas_address(0x5000cca252555fc9), phy(26) [ 27.865882] mpt3sas_cm0: REPORT_LUNS: handle(0x00b8), retries(0) [ 27.872017] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b8), lun(0) [ 27.878631] scsi 1:0:211:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.887014] scsi 1:0:211:0: SSP: handle(0x00b8), sas_addr(0x5000cca252555fc9), phy(26), device_name(0x5000cca252555fcb) [ 27.897789] scsi 1:0:211:0: enclosure logical id(0x5000ccab0405db00), slot(35) [ 27.905096] scsi 1:0:211:0: enclosure level(0x0000), connector name( C0 ) [ 27.911988] scsi 1:0:211:0: serial_number( 7SHHYJMG) [ 27.917561] scsi 1:0:211:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 27.937443] mpt3sas_cm0: detecting: handle(0x00b9), sas_address(0x5000cca252559f7d), phy(27) [ 27.945878] mpt3sas_cm0: REPORT_LUNS: handle(0x00b9), retries(0) [ 27.952037] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00b9), lun(0) [ 27.966904] scsi 1:0:212:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 27.975275] scsi 1:0:212:0: SSP: handle(0x00b9), sas_addr(0x5000cca252559f7d), phy(27), device_name(0x5000cca252559f7f) [ 27.986044] scsi 1:0:212:0: enclosure logical id(0x5000ccab0405db00), slot(36) [ 27.993351] scsi 1:0:212:0: enclosure level(0x0000), connector name( C0 ) [ 28.000229] scsi 1:0:212:0: serial_number( 7SHJ2T4G) [ 28.005810] scsi 1:0:212:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.028446] mpt3sas_cm0: detecting: handle(0x00ba), sas_address(0x5000cca26c244bcd), phy(28) [ 28.036881] mpt3sas_cm0: REPORT_LUNS: handle(0x00ba), retries(0) [ 28.043022] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ba), lun(0) [ 28.049655] scsi 1:0:213:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.058039] scsi 1:0:213:0: SSP: handle(0x00ba), sas_addr(0x5000cca26c244bcd), phy(28), device_name(0x5000cca26c244bcf) [ 28.068812] scsi 1:0:213:0: enclosure logical id(0x5000ccab0405db00), slot(37) [ 28.076120] scsi 1:0:213:0: enclosure level(0x0000), connector name( C0 ) [ 28.083010] scsi 1:0:213:0: serial_number( 1DGMYU2Z) [ 28.088586] scsi 1:0:213:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.121449] mpt3sas_cm0: detecting: handle(0x00bb), sas_address(0x5000cca26a2aa10d), phy(29) [ 28.129884] mpt3sas_cm0: REPORT_LUNS: handle(0x00bb), retries(0) [ 28.136025] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00bb), lun(0) [ 28.233702] scsi 1:0:214:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.242088] scsi 1:0:214:0: SSP: handle(0x00bb), sas_addr(0x5000cca26a2aa10d), phy(29), device_name(0x5000cca26a2aa10f) [ 28.252863] scsi 1:0:214:0: enclosure logical id(0x5000ccab0405db00), slot(38) [ 28.260170] scsi 1:0:214:0: enclosure level(0x0000), connector name( C0 ) [ 28.267060] scsi 1:0:214:0: serial_number( 2TGSET5D) [ 28.272635] scsi 1:0:214:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.298454] mpt3sas_cm0: detecting: handle(0x00bc), sas_address(0x5000cca25254e235), phy(30) [ 28.306889] mpt3sas_cm0: REPORT_LUNS: handle(0x00bc), retries(0) [ 28.313051] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00bc), lun(0) [ 28.319670] scsi 1:0:215:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.328055] scsi 1:0:215:0: SSP: handle(0x00bc), sas_addr(0x5000cca25254e235), phy(30), device_name(0x5000cca25254e237) [ 28.338830] scsi 1:0:215:0: enclosure logical id(0x5000ccab0405db00), slot(39) [ 28.346138] scsi 1:0:215:0: enclosure level(0x0000), connector name( C0 ) [ 28.353028] scsi 1:0:215:0: serial_number( 7SHHP5BG) [ 28.358602] scsi 1:0:215:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.379459] mpt3sas_cm0: detecting: handle(0x00bd), sas_address(0x5000cca25254df95), phy(31) [ 28.387899] mpt3sas_cm0: REPORT_LUNS: handle(0x00bd), retries(0) [ 28.394037] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00bd), lun(0) [ 28.439541] scsi 1:0:216:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.447922] scsi 1:0:216:0: SSP: handle(0x00bd), sas_addr(0x5000cca25254df95), phy(31), device_name(0x5000cca25254df97) [ 28.458694] scsi 1:0:216:0: enclosure logical id(0x5000ccab0405db00), slot(40) [ 28.466001] scsi 1:0:216:0: enclosure level(0x0000), connector name( C0 ) [ 28.472890] scsi 1:0:216:0: serial_number( 7SHHNZYG) [ 28.478466] scsi 1:0:216:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.568112] mpt3sas_cm0: detecting: handle(0x00be), sas_address(0x5000cca25254e9d1), phy(32) [ 28.576557] mpt3sas_cm0: REPORT_LUNS: handle(0x00be), retries(0) [ 28.582697] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00be), lun(0) [ 28.594947] scsi 1:0:217:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.603638] scsi 1:0:217:0: SSP: handle(0x00be), sas_addr(0x5000cca25254e9d1), phy(32), device_name(0x5000cca25254e9d3) [ 28.614413] scsi 1:0:217:0: enclosure logical id(0x5000ccab0405db00), slot(41) [ 28.621717] scsi 1:0:217:0: enclosure level(0x0000), connector name( C0 ) [ 28.628596] scsi 1:0:217:0: serial_number( 7SHHPP2G) [ 28.634174] scsi 1:0:217:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.663469] mpt3sas_cm0: detecting: handle(0x00bf), sas_address(0x5000cca26a240089), phy(33) [ 28.671904] mpt3sas_cm0: REPORT_LUNS: handle(0x00bf), retries(0) [ 28.678034] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00bf), lun(0) [ 28.722599] scsi 1:0:218:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.730985] scsi 1:0:218:0: SSP: handle(0x00bf), sas_addr(0x5000cca26a240089), phy(33), device_name(0x5000cca26a24008b) [ 28.741754] scsi 1:0:218:0: enclosure logical id(0x5000ccab0405db00), slot(42) [ 28.749059] scsi 1:0:218:0: enclosure level(0x0000), connector name( C0 ) [ 28.755937] scsi 1:0:218:0: serial_number( 2TGMTTPD) [ 28.761509] scsi 1:0:218:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.787473] mpt3sas_cm0: detecting: handle(0x00c0), sas_address(0x5000cca26a24b9e9), phy(34) [ 28.795909] mpt3sas_cm0: REPORT_LUNS: handle(0x00c0), retries(0) [ 28.802042] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c0), lun(0) [ 28.813348] scsi 1:0:219:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.821732] scsi 1:0:219:0: SSP: handle(0x00c0), sas_addr(0x5000cca26a24b9e9), phy(34), device_name(0x5000cca26a24b9eb) [ 28.832506] scsi 1:0:219:0: enclosure logical id(0x5000ccab0405db00), slot(43) [ 28.839810] scsi 1:0:219:0: enclosure level(0x0000), connector name( C0 ) [ 28.846701] scsi 1:0:219:0: serial_number( 2TGN64DD) [ 28.852277] scsi 1:0:219:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.872472] mpt3sas_cm0: detecting: handle(0x00c1), sas_address(0x5000cca26a25aed5), phy(35) [ 28.880904] mpt3sas_cm0: REPORT_LUNS: handle(0x00c1), retries(0) [ 28.887037] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c1), lun(0) [ 28.912813] scsi 1:0:220:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 28.921185] scsi 1:0:220:0: SSP: handle(0x00c1), sas_addr(0x5000cca26a25aed5), phy(35), device_name(0x5000cca26a25aed7) [ 28.931956] scsi 1:0:220:0: enclosure logical id(0x5000ccab0405db00), slot(44) [ 28.939263] scsi 1:0:220:0: enclosure level(0x0000), connector name( C0 ) [ 28.946142] scsi 1:0:220:0: serial_number( 2TGNRG1D) [ 28.951720] scsi 1:0:220:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 28.972469] mpt3sas_cm0: detecting: handle(0x00c2), sas_address(0x5000cca266d32b69), phy(36) [ 28.980905] mpt3sas_cm0: REPORT_LUNS: handle(0x00c2), retries(0) [ 28.987076] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c2), lun(0) [ 28.993719] scsi 1:0:221:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.002108] scsi 1:0:221:0: SSP: handle(0x00c2), sas_addr(0x5000cca266d32b69), phy(36), device_name(0x5000cca266d32b6b) [ 29.012880] scsi 1:0:221:0: enclosure logical id(0x5000ccab0405db00), slot(45) [ 29.020187] scsi 1:0:221:0: enclosure level(0x0000), connector name( C0 ) [ 29.027080] scsi 1:0:221:0: serial_number( 7JKS46JK) [ 29.032654] scsi 1:0:221:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.052474] mpt3sas_cm0: detecting: handle(0x00c3), sas_address(0x5000cca26b9bf885), phy(37) [ 29.060915] mpt3sas_cm0: REPORT_LUNS: handle(0x00c3), retries(0) [ 29.067055] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c3), lun(0) [ 29.073696] scsi 1:0:222:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.082084] scsi 1:0:222:0: SSP: handle(0x00c3), sas_addr(0x5000cca26b9bf885), phy(37), device_name(0x5000cca26b9bf887) [ 29.092858] scsi 1:0:222:0: enclosure logical id(0x5000ccab0405db00), slot(46) [ 29.100165] scsi 1:0:222:0: enclosure level(0x0000), connector name( C0 ) [ 29.107056] scsi 1:0:222:0: serial_number( 1SJST42Z) [ 29.112630] scsi 1:0:222:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.188509] mpt3sas_cm0: detecting: handle(0x00c4), sas_address(0x5000cca26b9b24c9), phy(38) [ 29.196945] mpt3sas_cm0: REPORT_LUNS: handle(0x00c4), retries(0) [ 29.203080] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c4), lun(0) [ 29.212591] scsi 1:0:223:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.220974] scsi 1:0:223:0: SSP: handle(0x00c4), sas_addr(0x5000cca26b9b24c9), phy(38), device_name(0x5000cca26b9b24cb) [ 29.231746] scsi 1:0:223:0: enclosure logical id(0x5000ccab0405db00), slot(47) [ 29.239050] scsi 1:0:223:0: enclosure level(0x0000), connector name( C0 ) [ 29.245928] scsi 1:0:223:0: serial_number( 1SJSA0YZ) [ 29.251499] scsi 1:0:223:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.271480] mpt3sas_cm0: detecting: handle(0x00c5), sas_address(0x5000cca26a21d741), phy(39) [ 29.279923] mpt3sas_cm0: REPORT_LUNS: handle(0x00c5), retries(0) [ 29.286086] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c5), lun(0) [ 29.292881] scsi 1:0:224:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.301275] scsi 1:0:224:0: SSP: handle(0x00c5), sas_addr(0x5000cca26a21d741), phy(39), device_name(0x5000cca26a21d743) [ 29.312043] scsi 1:0:224:0: enclosure logical id(0x5000ccab0405db00), slot(48) [ 29.319350] scsi 1:0:224:0: enclosure level(0x0000), connector name( C0 ) [ 29.326240] scsi 1:0:224:0: serial_number( 2TGLLYED) [ 29.331818] scsi 1:0:224:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.352480] mpt3sas_cm0: detecting: handle(0x00c6), sas_address(0x5000cca26a27af5d), phy(40) [ 29.360920] mpt3sas_cm0: REPORT_LUNS: handle(0x00c6), retries(0) [ 29.367084] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c6), lun(0) [ 29.373724] scsi 1:0:225:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.382112] scsi 1:0:225:0: SSP: handle(0x00c6), sas_addr(0x5000cca26a27af5d), phy(40), device_name(0x5000cca26a27af5f) [ 29.392889] scsi 1:0:225:0: enclosure logical id(0x5000ccab0405db00), slot(49) [ 29.400196] scsi 1:0:225:0: enclosure level(0x0000), connector name( C0 ) [ 29.407087] scsi 1:0:225:0: serial_number( 2TGPUL5D) [ 29.412660] scsi 1:0:225:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.432479] mpt3sas_cm0: detecting: handle(0x00c7), sas_address(0x5000cca2525552e5), phy(41) [ 29.440917] mpt3sas_cm0: REPORT_LUNS: handle(0x00c7), retries(0) [ 29.447061] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c7), lun(0) [ 29.453709] scsi 1:0:226:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.462105] scsi 1:0:226:0: SSP: handle(0x00c7), sas_addr(0x5000cca2525552e5), phy(41), device_name(0x5000cca2525552e7) [ 29.472876] scsi 1:0:226:0: enclosure logical id(0x5000ccab0405db00), slot(50) [ 29.480180] scsi 1:0:226:0: enclosure level(0x0000), connector name( C0 ) [ 29.487073] scsi 1:0:226:0: serial_number( 7SHHXP0G) [ 29.492648] scsi 1:0:226:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.512486] mpt3sas_cm0: detecting: handle(0x00c8), sas_address(0x5000cca26a26dff1), phy(42) [ 29.520923] mpt3sas_cm0: REPORT_LUNS: handle(0x00c8), retries(0) [ 29.527070] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c8), lun(0) [ 29.533745] scsi 1:0:227:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.542134] scsi 1:0:227:0: SSP: handle(0x00c8), sas_addr(0x5000cca26a26dff1), phy(42), device_name(0x5000cca26a26dff3) [ 29.552904] scsi 1:0:227:0: enclosure logical id(0x5000ccab0405db00), slot(51) [ 29.560211] scsi 1:0:227:0: enclosure level(0x0000), connector name( C0 ) [ 29.567108] scsi 1:0:227:0: serial_number( 2TGPBSYD) [ 29.572687] scsi 1:0:227:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.595491] mpt3sas_cm0: detecting: handle(0x00c9), sas_address(0x5000cca26b9c5d51), phy(43) [ 29.603934] mpt3sas_cm0: REPORT_LUNS: handle(0x00c9), retries(0) [ 29.610741] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00c9), lun(0) [ 29.636259] scsi 1:0:228:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.644642] scsi 1:0:228:0: SSP: handle(0x00c9), sas_addr(0x5000cca26b9c5d51), phy(43), device_name(0x5000cca26b9c5d53) [ 29.655417] scsi 1:0:228:0: enclosure logical id(0x5000ccab0405db00), slot(52) [ 29.662724] scsi 1:0:228:0: enclosure level(0x0000), connector name( C0 ) [ 29.669599] scsi 1:0:228:0: serial_number( 1SJSZV5Z) [ 29.675172] scsi 1:0:228:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.700086] mpt3sas_cm0: detecting: handle(0x00ca), sas_address(0x5000cca26b9602c5), phy(44) [ 29.708538] mpt3sas_cm0: REPORT_LUNS: handle(0x00ca), retries(0) [ 29.714655] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ca), lun(0) [ 29.726616] scsi 1:0:229:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.736415] scsi 1:0:229:0: SSP: handle(0x00ca), sas_addr(0x5000cca26b9602c5), phy(44), device_name(0x5000cca26b9602c7) [ 29.747190] scsi 1:0:229:0: enclosure logical id(0x5000ccab0405db00), slot(53) [ 29.754497] scsi 1:0:229:0: enclosure level(0x0000), connector name( C0 ) [ 29.761390] scsi 1:0:229:0: serial_number( 1SJNHJ4Z) [ 29.766961] scsi 1:0:229:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.810615] mpt3sas_cm0: detecting: handle(0x00cb), sas_address(0x5000cca252544a01), phy(45) [ 29.819053] mpt3sas_cm0: REPORT_LUNS: handle(0x00cb), retries(0) [ 29.825193] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00cb), lun(0) [ 29.831829] scsi 1:0:230:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.840220] scsi 1:0:230:0: SSP: handle(0x00cb), sas_addr(0x5000cca252544a01), phy(45), device_name(0x5000cca252544a03) [ 29.850994] scsi 1:0:230:0: enclosure logical id(0x5000ccab0405db00), slot(54) [ 29.858301] scsi 1:0:230:0: enclosure level(0x0000), connector name( C0 ) [ 29.865191] scsi 1:0:230:0: serial_number( 7SHHB14G) [ 29.870764] scsi 1:0:230:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 29.893527] mpt3sas_cm0: detecting: handle(0x00cc), sas_address(0x5000cca252559f9d), phy(46) [ 29.901968] mpt3sas_cm0: REPORT_LUNS: handle(0x00cc), retries(0) [ 29.908099] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00cc), lun(0) [ 29.980081] scsi 1:0:231:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 29.988467] scsi 1:0:231:0: SSP: handle(0x00cc), sas_addr(0x5000cca252559f9d), phy(46), device_name(0x5000cca252559f9f) [ 29.999241] scsi 1:0:231:0: enclosure logical id(0x5000ccab0405db00), slot(55) [ 30.006546] scsi 1:0:231:0: enclosure level(0x0000), connector name( C0 ) [ 30.013424] scsi 1:0:231:0: serial_number( 7SHJ2TDG) [ 30.018995] scsi 1:0:231:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.039495] mpt3sas_cm0: detecting: handle(0x00cd), sas_address(0x5000cca25255571d), phy(47) [ 30.047935] mpt3sas_cm0: REPORT_LUNS: handle(0x00cd), retries(0) [ 30.054075] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00cd), lun(0) [ 30.060757] scsi 1:0:232:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.069152] scsi 1:0:232:0: SSP: handle(0x00cd), sas_addr(0x5000cca25255571d), phy(47), device_name(0x5000cca25255571f) [ 30.079921] scsi 1:0:232:0: enclosure logical id(0x5000ccab0405db00), slot(56) [ 30.087228] scsi 1:0:232:0: enclosure level(0x0000), connector name( C0 ) [ 30.094118] scsi 1:0:232:0: serial_number( 7SHHXYRG) [ 30.099694] scsi 1:0:232:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.122499] mpt3sas_cm0: detecting: handle(0x00ce), sas_address(0x5000cca26b9bf57d), phy(48) [ 30.130939] mpt3sas_cm0: REPORT_LUNS: handle(0x00ce), retries(0) [ 30.137072] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00ce), lun(0) [ 30.154253] scsi 1:0:233:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.162643] scsi 1:0:233:0: SSP: handle(0x00ce), sas_addr(0x5000cca26b9bf57d), phy(48), device_name(0x5000cca26b9bf57f) [ 30.173419] scsi 1:0:233:0: enclosure logical id(0x5000ccab0405db00), slot(57) [ 30.180726] scsi 1:0:233:0: enclosure level(0x0000), connector name( C0 ) [ 30.187604] scsi 1:0:233:0: serial_number( 1SJSSXUZ) [ 30.193184] scsi 1:0:233:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.213502] mpt3sas_cm0: detecting: handle(0x00cf), sas_address(0x5000cca252555371), phy(49) [ 30.221944] mpt3sas_cm0: REPORT_LUNS: handle(0x00cf), retries(0) [ 30.228150] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00cf), lun(0) [ 30.235011] scsi 1:0:234:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.243408] scsi 1:0:234:0: SSP: handle(0x00cf), sas_addr(0x5000cca252555371), phy(49), device_name(0x5000cca252555373) [ 30.254177] scsi 1:0:234:0: enclosure logical id(0x5000ccab0405db00), slot(58) [ 30.261484] scsi 1:0:234:0: enclosure level(0x0000), connector name( C0 ) [ 30.268376] scsi 1:0:234:0: serial_number( 7SHHXR4G) [ 30.273949] scsi 1:0:234:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.296498] mpt3sas_cm0: detecting: handle(0x00d0), sas_address(0x5000cca25253eefd), phy(50) [ 30.304936] mpt3sas_cm0: REPORT_LUNS: handle(0x00d0), retries(0) [ 30.311102] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d0), lun(0) [ 30.317742] scsi 1:0:235:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.326128] scsi 1:0:235:0: SSP: handle(0x00d0), sas_addr(0x5000cca25253eefd), phy(50), device_name(0x5000cca25253eeff) [ 30.336902] scsi 1:0:235:0: enclosure logical id(0x5000ccab0405db00), slot(59) [ 30.344210] scsi 1:0:235:0: enclosure level(0x0000), connector name( C0 ) [ 30.351101] scsi 1:0:235:0: serial_number( 7SHH4Z7G) [ 30.356676] scsi 1:0:235:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.378985] mpt3sas_cm0: expander_add: handle(0x009c), parent(0x0099), sas_addr(0x5000ccab0405db7f), phys(68) [ 30.400920] mpt3sas_cm0: detecting: handle(0x00d1), sas_address(0x5000cca26b9cbb05), phy(42) [ 30.409354] mpt3sas_cm0: REPORT_LUNS: handle(0x00d1), retries(0) [ 30.415472] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d1), lun(0) [ 30.422288] scsi 1:0:236:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.430687] scsi 1:0:236:0: SSP: handle(0x00d1), sas_addr(0x5000cca26b9cbb05), phy(42), device_name(0x5000cca26b9cbb07) [ 30.441460] scsi 1:0:236:0: enclosure logical id(0x5000ccab0405db00), slot(1) [ 30.448679] scsi 1:0:236:0: enclosure level(0x0000), connector name( C0 ) [ 30.455570] scsi 1:0:236:0: serial_number( 1SJT62MZ) [ 30.461146] scsi 1:0:236:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.483507] mpt3sas_cm0: detecting: handle(0x00d2), sas_address(0x5000cca252544475), phy(43) [ 30.491940] mpt3sas_cm0: REPORT_LUNS: handle(0x00d2), retries(0) [ 30.498097] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d2), lun(0) [ 30.504891] scsi 1:0:237:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.518314] scsi 1:0:237:0: SSP: handle(0x00d2), sas_addr(0x5000cca252544475), phy(43), device_name(0x5000cca252544477) [ 30.529082] scsi 1:0:237:0: enclosure logical id(0x5000ccab0405db00), slot(3) [ 30.536302] scsi 1:0:237:0: enclosure level(0x0000), connector name( C0 ) [ 30.543195] scsi 1:0:237:0: serial_number( 7SHHANPG) [ 30.548769] scsi 1:0:237:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.572101] mpt3sas_cm0: detecting: handle(0x00d3), sas_address(0x5000cca26a26173d), phy(44) [ 30.580549] mpt3sas_cm0: REPORT_LUNS: handle(0x00d3), retries(0) [ 30.586692] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d3), lun(0) [ 30.614858] scsi 1:0:238:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.623251] scsi 1:0:238:0: SSP: handle(0x00d3), sas_addr(0x5000cca26a26173d), phy(44), device_name(0x5000cca26a26173f) [ 30.634022] scsi 1:0:238:0: enclosure logical id(0x5000ccab0405db00), slot(4) [ 30.641240] scsi 1:0:238:0: enclosure level(0x0000), connector name( C0 ) [ 30.648117] scsi 1:0:238:0: serial_number( 2TGNYDLD) [ 30.653688] scsi 1:0:238:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.691516] mpt3sas_cm0: detecting: handle(0x00d4), sas_address(0x5000cca252544cb5), phy(45) [ 30.699956] mpt3sas_cm0: REPORT_LUNS: handle(0x00d4), retries(0) [ 30.706109] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d4), lun(0) [ 30.752782] scsi 1:0:239:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.761165] scsi 1:0:239:0: SSP: handle(0x00d4), sas_addr(0x5000cca252544cb5), phy(45), device_name(0x5000cca252544cb7) [ 30.771937] scsi 1:0:239:0: enclosure logical id(0x5000ccab0405db00), slot(5) [ 30.779158] scsi 1:0:239:0: enclosure level(0x0000), connector name( C0 ) [ 30.786033] scsi 1:0:239:0: serial_number( 7SHHB6RG) [ 30.791603] scsi 1:0:239:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.811514] mpt3sas_cm0: detecting: handle(0x00d5), sas_address(0x5000cca26c238691), phy(46) [ 30.819947] mpt3sas_cm0: REPORT_LUNS: handle(0x00d5), retries(0) [ 30.826094] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d5), lun(0) [ 30.832906] scsi 1:0:240:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.841307] scsi 1:0:240:0: SSP: handle(0x00d5), sas_addr(0x5000cca26c238691), phy(46), device_name(0x5000cca26c238693) [ 30.852081] scsi 1:0:240:0: enclosure logical id(0x5000ccab0405db00), slot(6) [ 30.859298] scsi 1:0:240:0: enclosure level(0x0000), connector name( C0 ) [ 30.866191] scsi 1:0:240:0: serial_number( 1DGMJNWZ) [ 30.871765] scsi 1:0:240:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.894550] mpt3sas_cm0: detecting: handle(0x00d6), sas_address(0x5000cca26a2ac969), phy(47) [ 30.902984] mpt3sas_cm0: REPORT_LUNS: handle(0x00d6), retries(0) [ 30.909118] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d6), lun(0) [ 30.926099] scsi 1:0:241:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 30.934473] scsi 1:0:241:0: SSP: handle(0x00d6), sas_addr(0x5000cca26a2ac969), phy(47), device_name(0x5000cca26a2ac96b) [ 30.945250] scsi 1:0:241:0: enclosure logical id(0x5000ccab0405db00), slot(7) [ 30.952469] scsi 1:0:241:0: enclosure level(0x0000), connector name( C0 ) [ 30.959344] scsi 1:0:241:0: serial_number( 2TGSJGHD) [ 30.964916] scsi 1:0:241:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 30.985518] mpt3sas_cm0: detecting: handle(0x00d7), sas_address(0x5000cca25253e619), phy(48) [ 30.993952] mpt3sas_cm0: REPORT_LUNS: handle(0x00d7), retries(0) [ 31.000112] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d7), lun(0) [ 31.006862] scsi 1:0:242:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 31.015252] scsi 1:0:242:0: SSP: handle(0x00d7), sas_addr(0x5000cca25253e619), phy(48), device_name(0x5000cca25253e61b) [ 31.026023] scsi 1:0:242:0: enclosure logical id(0x5000ccab0405db00), slot(8) [ 31.033243] scsi 1:0:242:0: enclosure level(0x0000), connector name( C0 ) [ 31.040136] scsi 1:0:242:0: serial_number( 7SHH4BWG) [ 31.045709] scsi 1:0:242:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 31.066518] mpt3sas_cm0: detecting: handle(0x00d8), sas_address(0x5000cca252542cfd), phy(49) [ 31.074953] mpt3sas_cm0: REPORT_LUNS: handle(0x00d8), retries(0) [ 31.081085] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d8), lun(0) [ 31.087744] scsi 1:0:243:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 31.096134] scsi 1:0:243:0: SSP: handle(0x00d8), sas_addr(0x5000cca252542cfd), phy(49), device_name(0x5000cca252542cff) [ 31.106903] scsi 1:0:243:0: enclosure logical id(0x5000ccab0405db00), slot(9) [ 31.114123] scsi 1:0:243:0: enclosure level(0x0000), connector name( C0 ) [ 31.121015] scsi 1:0:243:0: serial_number( 7SHH937G) [ 31.126588] scsi 1:0:243:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 31.146520] mpt3sas_cm0: detecting: handle(0x00d9), sas_address(0x5000cca26a3181fd), phy(50) [ 31.154958] mpt3sas_cm0: REPORT_LUNS: handle(0x00d9), retries(0) [ 31.161089] mpt3sas_cm0: TEST_UNIT_READY: handle(0x00d9), lun(0) [ 31.167738] scsi 1:0:244:0: Direct-Access HGST HUH721008AL5200 A38F PQ: 0 ANSI: 6 [ 31.176125] scsi 1:0:244:0: SSP: handle(0x00d9), sas_addr(0x5000cca26a3181fd), phy(50), device_name(0x5000cca26a3181ff) [ 31.186900] scsi 1:0:244:0: enclosure logical id(0x5000ccab0405db00), slot(10) [ 31.194204] scsi 1:0:244:0: enclosure level(0x0000), connector name( C0 ) [ 31.201096] scsi 1:0:244:0: serial_number( 2TGW71ND) [ 31.206672] scsi 1:0:244:0: qdepth(254), tagged(1), simple(0), ordered(0), scsi_level(7), cmd_que(1) [ 31.234333] mpt3sas_cm0: port enable: SUCCESS [ 31.239493] sd 1:0:2:0: [sdb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.247353] sd 1:0:2:0: [sdb] 4096-byte physical blocks [ 31.252608] sd 1:0:3:0: [sdc] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.252628] sd 1:0:4:0: [sdd] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.252629] sd 1:0:4:0: [sdd] 4096-byte physical blocks [ 31.252640] sd 1:0:5:0: [sde] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.252642] sd 1:0:5:0: [sde] 4096-byte physical blocks [ 31.252793] sd 1:0:6:0: [sdf] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.252795] sd 1:0:6:0: [sdf] 4096-byte physical blocks [ 31.252973] sd 1:0:8:0: [sdh] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.252974] sd 1:0:8:0: [sdh] 4096-byte physical blocks [ 31.253099] sd 1:0:10:0: [sdj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253101] sd 1:0:10:0: [sdj] 4096-byte physical blocks [ 31.253304] sd 1:0:13:0: [sdm] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253311] sd 1:0:13:0: [sdm] 4096-byte physical blocks [ 31.253320] sd 1:0:11:0: [sdk] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253321] sd 1:0:11:0: [sdk] 4096-byte physical blocks [ 31.253557] sd 1:0:6:0: [sdf] Write Protect is off [ 31.253559] sd 1:0:6:0: [sdf] Mode Sense: f7 00 10 08 [ 31.253573] sd 1:0:14:0: [sdn] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253574] sd 1:0:14:0: [sdn] 4096-byte physical blocks [ 31.253654] sd 1:0:19:0: [sds] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253659] sd 1:0:19:0: [sds] 4096-byte physical blocks [ 31.253686] sd 1:0:8:0: [sdh] Write Protect is off [ 31.253688] sd 1:0:8:0: [sdh] Mode Sense: f7 00 10 08 [ 31.253722] sd 1:0:20:0: [sdt] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253723] sd 1:0:20:0: [sdt] 4096-byte physical blocks [ 31.253775] sd 1:0:18:0: [sdr] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253777] sd 1:0:18:0: [sdr] 4096-byte physical blocks [ 31.253793] sd 1:0:21:0: [sdu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253794] sd 1:0:21:0: [sdu] 4096-byte physical blocks [ 31.253830] sd 1:0:10:0: [sdj] Write Protect is off [ 31.253832] sd 1:0:10:0: [sdj] Mode Sense: f7 00 10 08 [ 31.253851] sd 1:0:22:0: [sdv] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.253856] sd 1:0:22:0: [sdv] 4096-byte physical blocks [ 31.254096] sd 1:0:13:0: [sdm] Write Protect is off [ 31.254098] sd 1:0:13:0: [sdm] Mode Sense: f7 00 10 08 [ 31.254136] sd 1:0:8:0: [sdh] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.254142] sd 1:0:23:0: [sdw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.254143] sd 1:0:23:0: [sdw] 4096-byte physical blocks [ 31.254170] sd 1:0:6:0: [sdf] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.254294] sd 1:0:10:0: [sdj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.254351] sd 1:0:26:0: [sdz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.254353] sd 1:0:26:0: [sdz] 4096-byte physical blocks [ 31.254449] sd 1:0:20:0: [sdt] Write Protect is off [ 31.254450] sd 1:0:20:0: [sdt] Mode Sense: f7 00 10 08 [ 31.254495] sd 1:0:18:0: [sdr] Write Protect is off [ 31.254497] sd 1:0:18:0: [sdr] Mode Sense: f7 00 10 08 [ 31.254504] sd 1:0:31:0: [sdae] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.254505] sd 1:0:31:0: [sdae] 4096-byte physical blocks [ 31.254518] sd 1:0:21:0: [sdu] Write Protect is off [ 31.254520] sd 1:0:21:0: [sdu] Mode Sense: f7 00 10 08 [ 31.254563] sd 1:0:32:0: [sdaf] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.254564] sd 1:0:32:0: [sdaf] 4096-byte physical blocks [ 31.254585] sd 1:0:13:0: [sdm] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.254605] sd 1:0:22:0: [sdv] Write Protect is off [ 31.254606] sd 1:0:22:0: [sdv] Mode Sense: f7 00 10 08 [ 31.254632] sd 1:0:33:0: [sdag] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.254638] sd 1:0:33:0: [sdag] 4096-byte physical blocks [ 31.254692] sd 1:0:19:0: [sds] Write Protect is off [ 31.254694] sd 1:0:19:0: [sds] Mode Sense: f7 00 10 08 [ 31.254729] sd 1:0:34:0: [sdah] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.254731] sd 1:0:34:0: [sdah] 4096-byte physical blocks [ 31.254763] sd 1:0:5:0: [sde] Write Protect is off [ 31.254765] sd 1:0:5:0: [sde] Mode Sense: f7 00 10 08 [ 31.254898] sd 1:0:25:0: [sdy] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.254900] sd 1:0:25:0: [sdy] 4096-byte physical blocks [ 31.254929] sd 1:0:20:0: [sdt] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.254978] sd 1:0:18:0: [sdr] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.254996] sd 1:0:21:0: [sdu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.255086] sd 1:0:37:0: [sdak] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.255087] sd 1:0:37:0: [sdak] 4096-byte physical blocks [ 31.255096] sd 1:0:22:0: [sdv] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.255105] sd 1:0:38:0: [sdal] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.255107] sd 1:0:38:0: [sdal] 4096-byte physical blocks [ 31.255157] sd 1:0:19:0: [sds] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.255231] sd 1:0:31:0: [sdae] Write Protect is off [ 31.255233] sd 1:0:31:0: [sdae] Mode Sense: f7 00 10 08 [ 31.255305] sd 1:0:32:0: [sdaf] Write Protect is off [ 31.255306] sd 1:0:32:0: [sdaf] Mode Sense: f7 00 10 08 [ 31.255389] sd 1:0:33:0: [sdag] Write Protect is off [ 31.255391] sd 1:0:33:0: [sdag] Mode Sense: f7 00 10 08 [ 31.255506] sd 1:0:34:0: [sdah] Write Protect is off [ 31.255508] sd 1:0:34:0: [sdah] Mode Sense: f7 00 10 08 [ 31.255695] sd 1:0:31:0: [sdae] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.255775] sd 1:0:32:0: [sdaf] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.255806] sd 1:0:37:0: [sdak] Write Protect is off [ 31.255807] sd 1:0:37:0: [sdak] Mode Sense: f7 00 10 08 [ 31.255828] sd 1:0:38:0: [sdal] Write Protect is off [ 31.255832] sd 1:0:38:0: [sdal] Mode Sense: f7 00 10 08 [ 31.255864] sd 1:0:33:0: [sdag] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.255983] sd 1:0:34:0: [sdah] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.256060] sd 1:0:5:0: [sde] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.256285] sd 1:0:9:0: [sdi] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.256287] sd 1:0:9:0: [sdi] 4096-byte physical blocks [ 31.256288] sd 1:0:37:0: [sdak] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.256309] sd 1:0:38:0: [sdal] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.256727] sd 1:0:17:0: [sdq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.256729] sd 1:0:17:0: [sdq] 4096-byte physical blocks [ 31.256756] sd 1:0:42:0: [sdap] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.256758] sd 1:0:42:0: [sdap] 4096-byte physical blocks [ 31.257860] sd 1:0:9:0: [sdi] Write Protect is off [ 31.257862] sd 1:0:9:0: [sdi] Mode Sense: f7 00 10 08 [ 31.257998] sd 1:0:29:0: [sdac] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.258001] sd 1:0:29:0: [sdac] 4096-byte physical blocks [ 31.258316] sd 1:0:41:0: [sdao] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.258319] sd 1:0:41:0: [sdao] 4096-byte physical blocks [ 31.258558] sd 1:0:42:0: [sdap] Write Protect is off [ 31.258560] sd 1:0:42:0: [sdap] Mode Sense: f7 00 10 08 [ 31.258664] sd 1:0:17:0: [sdq] Write Protect is off [ 31.258666] sd 1:0:17:0: [sdq] Mode Sense: f7 00 10 08 [ 31.258734] sd 1:0:29:0: [sdac] Write Protect is off [ 31.258735] sd 1:0:29:0: [sdac] Mode Sense: f7 00 10 08 [ 31.259025] sd 1:0:42:0: [sdap] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.259162] sd 1:0:17:0: [sdq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.259390] sd 1:0:2:0: [sdb] Write Protect is off [ 31.259392] sd 1:0:2:0: [sdb] Mode Sense: f7 00 10 08 [ 31.259560] sd 1:0:29:0: [sdac] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.259860] sd 1:0:2:0: [sdb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.260699] sd 1:0:47:0: [sdau] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.260700] sd 1:0:47:0: [sdau] 4096-byte physical blocks [ 31.260943] sd 1:0:30:0: [sdad] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.260945] sd 1:0:30:0: [sdad] 4096-byte physical blocks [ 31.261656] sd 1:0:30:0: [sdad] Write Protect is off [ 31.261657] sd 1:0:30:0: [sdad] Mode Sense: f7 00 10 08 [ 31.263371] sd 1:0:7:0: [sdg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.263373] sd 1:0:7:0: [sdg] 4096-byte physical blocks [ 31.263415] sd 1:0:11:0: [sdk] Write Protect is off [ 31.263416] sd 1:0:11:0: [sdk] Mode Sense: f7 00 10 08 [ 31.263569] sd 1:0:30:0: [sdad] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.264118] sd 1:0:7:0: [sdg] Write Protect is off [ 31.264120] sd 1:0:7:0: [sdg] Mode Sense: f7 00 10 08 [ 31.264576] sd 1:0:7:0: [sdg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.264751] sd 1:0:11:0: [sdk] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.264857] sd 1:0:26:0: [sdz] Write Protect is off [ 31.264858] sd 1:0:26:0: [sdz] Mode Sense: f7 00 10 08 [ 31.265450] sd 1:0:23:0: [sdw] Write Protect is off [ 31.265452] sd 1:0:23:0: [sdw] Mode Sense: f7 00 10 08 [ 31.265467] sd 1:0:51:0: [sday] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.265468] sd 1:0:51:0: [sday] 4096-byte physical blocks [ 31.265662] sd 1:0:9:0: [sdi] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.265830] sd 1:0:25:0: [sdy] Write Protect is off [ 31.265831] sd 1:0:25:0: [sdy] Mode Sense: f7 00 10 08 [ 31.266108] sd 1:0:14:0: [sdn] Write Protect is off [ 31.266109] sd 1:0:14:0: [sdn] Mode Sense: f7 00 10 08 [ 31.267582] sd 1:0:12:0: [sdl] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.267583] sd 1:0:12:0: [sdl] 4096-byte physical blocks [ 31.267847] sd 1:0:53:0: [sdba] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.267849] sd 1:0:53:0: [sdba] 4096-byte physical blocks [ 31.268354] sd 1:0:54:0: [sdbb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.268355] sd 1:0:54:0: [sdbb] 4096-byte physical blocks [ 31.268859] sd 1:0:12:0: [sdl] Write Protect is off [ 31.268861] sd 1:0:12:0: [sdl] Mode Sense: f7 00 10 08 [ 31.268868] sd 1:0:24:0: [sdx] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.268872] sd 1:0:24:0: [sdx] 4096-byte physical blocks [ 31.269299] sd 1:0:43:0: [sdaq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.269300] sd 1:0:43:0: [sdaq] 4096-byte physical blocks [ 31.270089] sd 1:0:43:0: [sdaq] Write Protect is off [ 31.270091] sd 1:0:43:0: [sdaq] Mode Sense: f7 00 10 08 [ 31.270510] sd 1:0:44:0: [sdar] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.270514] sd 1:0:44:0: [sdar] 4096-byte physical blocks [ 31.270550] sd 1:0:43:0: [sdaq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.270562] sd 1:0:53:0: [sdba] Write Protect is off [ 31.270563] sd 1:0:53:0: [sdba] Mode Sense: f7 00 10 08 [ 31.270919] sd 1:0:41:0: [sdao] Write Protect is off [ 31.270921] sd 1:0:41:0: [sdao] Mode Sense: f7 00 10 08 [ 31.271084] sd 1:0:35:0: [sdai] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.271086] sd 1:0:35:0: [sdai] 4096-byte physical blocks [ 31.271181] sd 1:0:36:0: [sdaj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.271183] sd 1:0:36:0: [sdaj] 4096-byte physical blocks [ 31.271302] sd 1:0:53:0: [sdba] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.271385] sd 1:0:54:0: [sdbb] Write Protect is off [ 31.271391] sd 1:0:54:0: [sdbb] Mode Sense: f7 00 10 08 [ 31.271422] sd 1:0:41:0: [sdao] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.271639] sd 1:0:44:0: [sdar] Write Protect is off [ 31.271641] sd 1:0:44:0: [sdar] Mode Sense: f7 00 10 08 [ 31.271669] sd 1:0:31:0: [sdae] Attached SCSI disk [ 31.271852] sd 1:0:54:0: [sdbb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.271968] sd 1:0:57:0: [sdbe] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.271969] sd 1:0:57:0: [sdbe] 4096-byte physical blocks [ 31.272112] sd 1:0:44:0: [sdar] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.272211] sd 1:0:35:0: [sdai] Write Protect is off [ 31.272213] sd 1:0:35:0: [sdai] Mode Sense: f7 00 10 08 [ 31.272296] sd 1:0:36:0: [sdaj] Write Protect is off [ 31.272297] sd 1:0:36:0: [sdaj] Mode Sense: f7 00 10 08 [ 31.272602] sd 1:0:55:0: [sdbc] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.272604] sd 1:0:55:0: [sdbc] 4096-byte physical blocks [ 31.272774] sd 1:0:35:0: [sdai] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.272867] sd 1:0:36:0: [sdaj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.272873] sd 1:0:46:0: [sdat] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.272874] sd 1:0:46:0: [sdat] 4096-byte physical blocks [ 31.273073] sd 1:0:52:0: [sdaz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.273074] sd 1:0:52:0: [sdaz] 4096-byte physical blocks [ 31.273145] sd 1:0:45:0: [sdas] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.273146] sd 1:0:45:0: [sdas] 4096-byte physical blocks [ 31.273373] sd 1:0:6:0: [sdf] Attached SCSI disk [ 31.273596] sd 1:0:46:0: [sdat] Write Protect is off [ 31.273598] sd 1:0:46:0: [sdat] Mode Sense: f7 00 10 08 [ 31.274566] sd 1:0:46:0: [sdat] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.275826] sd 1:0:49:0: [sdaw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.275828] sd 1:0:49:0: [sdaw] 4096-byte physical blocks [ 31.275829] sd 1:0:18:0: [sdr] Attached SCSI disk [ 31.277272] sd 1:0:52:0: [sdaz] Write Protect is off [ 31.277273] sd 1:0:52:0: [sdaz] Mode Sense: f7 00 10 08 [ 31.277305] sd 1:0:45:0: [sdas] Write Protect is off [ 31.277307] sd 1:0:45:0: [sdas] Mode Sense: f7 00 10 08 [ 31.277314] sd 1:0:47:0: [sdau] Write Protect is off [ 31.277316] sd 1:0:47:0: [sdau] Mode Sense: f7 00 10 08 [ 31.277549] sd 1:0:59:0: [sdbg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.277551] sd 1:0:59:0: [sdbg] 4096-byte physical blocks [ 31.277846] sd 1:0:60:0: [sdbh] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.277848] sd 1:0:60:0: [sdbh] 4096-byte physical blocks [ 31.277892] sd 1:0:26:0: [sdz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.278353] sd 1:0:47:0: [sdau] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.281905] sd 1:0:63:0: [sdbj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.281906] sd 1:0:63:0: [sdbj] 4096-byte physical blocks [ 31.319115] sd 1:0:99:0: [sdct] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.319117] sd 1:0:99:0: [sdct] 4096-byte physical blocks [ 31.319925] sd 1:0:102:0: [sdcw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.319927] sd 1:0:102:0: [sdcw] 4096-byte physical blocks [ 31.328266] sd 1:0:48:0: [sdav] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328268] sd 1:0:48:0: [sdav] 4096-byte physical blocks [ 31.328283] sd 1:0:58:0: [sdbf] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328285] sd 1:0:58:0: [sdbf] 4096-byte physical blocks [ 31.328434] sd 1:0:59:0: [sdbg] Write Protect is off [ 31.328436] sd 1:0:59:0: [sdbg] Mode Sense: f7 00 10 08 [ 31.328456] sd 1:0:104:0: [sdcy] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328458] sd 1:0:104:0: [sdcy] 4096-byte physical blocks [ 31.328537] sd 1:0:90:0: [sdck] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328539] sd 1:0:90:0: [sdck] 4096-byte physical blocks [ 31.328578] sd 1:0:103:0: [sdcx] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328581] sd 1:0:103:0: [sdcx] 4096-byte physical blocks [ 31.328650] sd 1:0:56:0: [sdbd] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328653] sd 1:0:56:0: [sdbd] 4096-byte physical blocks [ 31.328709] sd 1:0:27:0: [sdaa] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328711] sd 1:0:27:0: [sdaa] 4096-byte physical blocks [ 31.328716] sd 1:0:51:0: [sday] Write Protect is off [ 31.328718] sd 1:0:51:0: [sday] Mode Sense: f7 00 10 08 [ 31.328755] sd 1:0:24:0: [sdx] Write Protect is off [ 31.328757] sd 1:0:24:0: [sdx] Mode Sense: f7 00 10 08 [ 31.328769] sd 1:0:25:0: [sdy] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.328803] sd 1:0:28:0: [sdab] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328804] sd 1:0:28:0: [sdab] 4096-byte physical blocks [ 31.328819] sd 1:0:4:0: [sdd] Write Protect is off [ 31.328820] sd 1:0:23:0: [sdw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.328821] sd 1:0:4:0: [sdd] Mode Sense: f7 00 10 08 [ 31.328830] sd 1:0:55:0: [sdbc] Write Protect is off [ 31.328831] sd 1:0:55:0: [sdbc] Mode Sense: f7 00 10 08 [ 31.328832] sd 1:0:57:0: [sdbe] Write Protect is off [ 31.328834] sd 1:0:57:0: [sdbe] Mode Sense: f7 00 10 08 [ 31.328850] sd 1:0:49:0: [sdaw] Write Protect is off [ 31.328852] sd 1:0:49:0: [sdaw] Mode Sense: f7 00 10 08 [ 31.328915] sd 1:0:94:0: [sdco] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328917] sd 1:0:94:0: [sdco] 4096-byte physical blocks [ 31.328936] sd 1:0:70:0: [sdbq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328938] sd 1:0:66:0: [sdbm] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328940] sd 1:0:70:0: [sdbq] 4096-byte physical blocks [ 31.328942] sd 1:0:66:0: [sdbm] 4096-byte physical blocks [ 31.328951] sd 1:0:111:0: [sddf] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328953] sd 1:0:111:0: [sddf] 4096-byte physical blocks [ 31.328954] sd 1:0:95:0: [sdcp] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328956] sd 1:0:95:0: [sdcp] 4096-byte physical blocks [ 31.328963] sd 1:0:114:0: [sddi] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328966] sd 1:0:114:0: [sddi] 4096-byte physical blocks [ 31.328976] sd 1:0:98:0: [sdcs] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328977] sd 1:0:98:0: [sdcs] 4096-byte physical blocks [ 31.328987] sd 1:0:71:0: [sdbr] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.328989] sd 1:0:71:0: [sdbr] 4096-byte physical blocks [ 31.329005] sd 1:0:50:0: [sdax] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.329008] sd 1:0:117:0: [sddl] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.329011] sd 1:0:50:0: [sdax] 4096-byte physical blocks [ 31.329013] sd 1:0:117:0: [sddl] 4096-byte physical blocks [ 31.329024] sd 1:0:82:0: [sdcc] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.329025] sd 1:0:82:0: [sdcc] 4096-byte physical blocks [ 31.329047] sd 1:0:107:0: [sddb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.329049] sd 1:0:107:0: [sddb] 4096-byte physical blocks [ 31.329372] sd 1:0:91:0: [sdcl] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.329374] sd 1:0:91:0: [sdcl] 4096-byte physical blocks [ 31.329450] sd 1:0:109:0: [sddd] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.329453] sd 1:0:109:0: [sddd] 4096-byte physical blocks [ 31.329460] sd 1:0:60:0: [sdbh] Write Protect is off [ 31.329462] sd 1:0:60:0: [sdbh] Mode Sense: f7 00 10 08 [ 31.329467] sd 1:0:78:0: [sdby] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.329468] sd 1:0:78:0: [sdby] 4096-byte physical blocks [ 31.329578] sd 1:0:29:0: [sdac] Attached SCSI disk [ 31.329812] sd 1:0:102:0: [sdcw] Write Protect is off [ 31.329814] sd 1:0:102:0: [sdcw] Mode Sense: f7 00 10 08 [ 31.329919] sd 1:0:63:0: [sdbj] Write Protect is off [ 31.329921] sd 1:0:63:0: [sdbj] Mode Sense: f7 00 10 08 [ 31.329964] sd 1:0:60:0: [sdbh] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.329991] sd 1:0:56:0: [sdbd] Write Protect is off [ 31.329992] sd 1:0:56:0: [sdbd] Mode Sense: f7 00 10 08 [ 31.330112] sd 1:0:48:0: [sdav] Write Protect is off [ 31.330113] sd 1:0:48:0: [sdav] Mode Sense: f7 00 10 08 [ 31.330184] sd 1:0:103:0: [sdcx] Write Protect is off [ 31.330186] sd 1:0:103:0: [sdcx] Mode Sense: f7 00 10 08 [ 31.330217] sd 1:0:111:0: [sddf] Write Protect is off [ 31.330220] sd 1:0:111:0: [sddf] Mode Sense: f7 00 10 08 [ 31.330242] sd 1:0:58:0: [sdbf] Write Protect is off [ 31.330244] sd 1:0:58:0: [sdbf] Mode Sense: f7 00 10 08 [ 31.330324] sd 1:0:51:0: [sday] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.330354] sd 1:0:82:0: [sdcc] Write Protect is off [ 31.330356] sd 1:0:82:0: [sdcc] Mode Sense: f7 00 10 08 [ 31.330379] sd 1:0:94:0: [sdco] Write Protect is off [ 31.330381] sd 1:0:94:0: [sdco] Mode Sense: f7 00 10 08 [ 31.330390] sd 1:0:107:0: [sddb] Write Protect is off [ 31.330392] sd 1:0:107:0: [sddb] Mode Sense: f7 00 10 08 [ 31.330491] sd 1:0:117:0: [sddl] Write Protect is off [ 31.330492] sd 1:0:50:0: [sdax] Write Protect is off [ 31.330494] sd 1:0:117:0: [sddl] Mode Sense: f7 00 10 08 [ 31.330496] sd 1:0:50:0: [sdax] Mode Sense: f7 00 10 08 [ 31.330504] sd 1:0:70:0: [sdbq] Write Protect is off [ 31.330505] sd 1:0:70:0: [sdbq] Mode Sense: f7 00 10 08 [ 31.330530] sd 1:0:90:0: [sdck] Write Protect is off [ 31.330531] sd 1:0:112:0: [sddg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.330532] sd 1:0:90:0: [sdck] Mode Sense: f7 00 10 08 [ 31.330533] sd 1:0:112:0: [sddg] 4096-byte physical blocks [ 31.330578] sd 1:0:119:0: [sddn] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.330579] sd 1:0:119:0: [sddn] 4096-byte physical blocks [ 31.330603] sd 1:0:91:0: [sdcl] Write Protect is off [ 31.330618] sd 1:0:91:0: [sdcl] Mode Sense: f7 00 10 08 [ 31.330686] sd 1:0:69:0: [sdbp] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.330689] sd 1:0:69:0: [sdbp] 4096-byte physical blocks [ 31.330837] sd 1:0:102:0: [sdcw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.330894] sd 1:0:79:0: [sdbz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.330895] sd 1:0:79:0: [sdbz] 4096-byte physical blocks [ 31.330920] sd 1:0:71:0: [sdbr] Write Protect is off [ 31.330921] sd 1:0:71:0: [sdbr] Mode Sense: f7 00 10 08 [ 31.331064] sd 1:0:63:0: [sdbj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331090] sd 1:0:109:0: [sddd] Write Protect is off [ 31.331097] sd 1:0:109:0: [sddd] Mode Sense: f7 00 10 08 [ 31.331167] sd 1:0:56:0: [sdbd] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331174] sd 1:0:111:0: [sddf] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331181] sd 1:0:66:0: [sdbm] Write Protect is off [ 31.331182] sd 1:0:66:0: [sdbm] Mode Sense: f7 00 10 08 [ 31.331196] sd 1:0:90:0: [sdck] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331217] sd 1:0:94:0: [sdco] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331281] sd 1:0:103:0: [sdcx] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331346] sd 1:0:70:0: [sdbq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331355] sd 1:0:82:0: [sdcc] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331401] sd 1:0:117:0: [sddl] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331401] sd 1:0:50:0: [sdax] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331452] sd 1:0:48:0: [sdav] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331489] sd 1:0:119:0: [sddn] Write Protect is off [ 31.331490] sd 1:0:119:0: [sddn] Mode Sense: f7 00 10 08 [ 31.331505] sd 1:0:91:0: [sdcl] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331576] sd 1:0:107:0: [sddb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331717] sd 1:0:58:0: [sdbf] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.331744] sd 1:0:69:0: [sdbp] Write Protect is off [ 31.331745] sd 1:0:69:0: [sdbp] Mode Sense: f7 00 10 08 [ 31.331760] sd 1:0:95:0: [sdcp] Write Protect is off [ 31.331761] sd 1:0:95:0: [sdcp] Mode Sense: f7 00 10 08 [ 31.332100] sd 1:0:79:0: [sdbz] Write Protect is off [ 31.332102] sd 1:0:79:0: [sdbz] Mode Sense: f7 00 10 08 [ 31.332264] sd 1:0:78:0: [sdby] Write Protect is off [ 31.332266] sd 1:0:78:0: [sdby] Mode Sense: f7 00 10 08 [ 31.332573] sd 1:0:79:0: [sdbz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.332664] sd 1:0:34:0: [sdah] Attached SCSI disk [ 31.332817] sd 1:0:109:0: [sddd] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.332827] sd 1:0:118:0: [sddm] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.332828] sd 1:0:118:0: [sddm] 4096-byte physical blocks [ 31.332988] sd 1:0:121:0: [sddp] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.332990] sd 1:0:121:0: [sddp] 4096-byte physical blocks [ 31.333073] sd 1:0:119:0: [sddn] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.333113] sd 1:0:86:0: [sdcg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.333115] sd 1:0:86:0: [sdcg] 4096-byte physical blocks [ 31.333157] sd 1:0:71:0: [sdbr] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.333165] sd 1:0:78:0: [sdby] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.333231] sd 1:0:5:0: [sde] Attached SCSI disk [ 31.333386] sd 1:0:66:0: [sdbm] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.333707] sd 1:0:121:0: [sddp] Write Protect is off [ 31.333709] sd 1:0:121:0: [sddp] Mode Sense: f7 00 10 08 [ 31.333869] sd 1:0:57:0: [sdbe] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.334008] sd 1:0:84:0: [sdce] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.334009] sd 1:0:84:0: [sdce] 4096-byte physical blocks [ 31.334041] sd 1:0:27:0: [sdaa] Write Protect is off [ 31.334042] sd 1:0:27:0: [sdaa] Mode Sense: f7 00 10 08 [ 31.334094] sd 1:0:110:0: [sdde] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.334096] sd 1:0:110:0: [sdde] 4096-byte physical blocks [ 31.334169] sd 1:0:118:0: [sddm] Write Protect is off [ 31.334171] sd 1:0:118:0: [sddm] Mode Sense: f7 00 10 08 [ 31.334210] sd 1:0:88:0: [sdci] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.334211] sd 1:0:88:0: [sdci] 4096-byte physical blocks [ 31.334436] sd 1:0:73:0: [sdbt] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.334437] sd 1:0:73:0: [sdbt] 4096-byte physical blocks [ 31.334528] sd 1:0:106:0: [sdda] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.334535] sd 1:0:106:0: [sdda] 4096-byte physical blocks [ 31.334582] sd 1:0:49:0: [sdaw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.334677] sd 1:0:86:0: [sdcg] Write Protect is off [ 31.334678] sd 1:0:86:0: [sdcg] Mode Sense: f7 00 10 08 [ 31.334825] sd 1:0:121:0: [sddp] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.335113] sd 1:0:88:0: [sdci] Write Protect is off [ 31.335123] sd 1:0:12:0: [sdl] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.335125] sd 1:0:88:0: [sdci] Mode Sense: f7 00 10 08 [ 31.335126] sd 1:0:45:0: [sdas] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.335148] sd 1:0:72:0: [sdbs] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.335149] sd 1:0:72:0: [sdbs] 4096-byte physical blocks [ 31.335348] sd 1:0:27:0: [sdaa] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.335426] sd 1:0:59:0: [sdbg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.335506] sd 1:0:108:0: [sddc] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.335507] sd 1:0:108:0: [sddc] 4096-byte physical blocks [ 31.335730] sd 1:0:116:0: [sddk] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.335732] sd 1:0:116:0: [sddk] 4096-byte physical blocks [ 31.335771] sd 1:0:42:0: [sdap] Attached SCSI disk [ 31.335927] sd 1:0:67:0: [sdbn] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.335929] sd 1:0:67:0: [sdbn] 4096-byte physical blocks [ 31.335984] sd 1:0:30:0: [sdad] Attached SCSI disk [ 31.335995] sd 1:0:84:0: [sdce] Write Protect is off [ 31.335997] sd 1:0:84:0: [sdce] Mode Sense: f7 00 10 08 [ 31.335998] sd 1:0:106:0: [sdda] Write Protect is off [ 31.336000] sd 1:0:106:0: [sdda] Mode Sense: f7 00 10 08 [ 31.336012] sd 1:0:110:0: [sdde] Write Protect is off [ 31.336013] sd 1:0:110:0: [sdde] Mode Sense: f7 00 10 08 [ 31.336039] sd 1:0:120:0: [sddo] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.336041] sd 1:0:120:0: [sddo] 4096-byte physical blocks [ 31.336053] sd 1:0:114:0: [sddi] Write Protect is off [ 31.336055] sd 1:0:114:0: [sddi] Mode Sense: f7 00 10 08 [ 31.336280] sd 1:0:83:0: [sdcd] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.336282] sd 1:0:83:0: [sdcd] 4096-byte physical blocks [ 31.336562] sd 1:0:55:0: [sdbc] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.336664] sd 1:0:88:0: [sdci] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.336742] sd 1:0:67:0: [sdbn] Write Protect is off [ 31.336744] sd 1:0:67:0: [sdbn] Mode Sense: f7 00 10 08 [ 31.336793] sd 1:0:96:0: [sdcq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.336795] sd 1:0:96:0: [sdcq] 4096-byte physical blocks [ 31.336870] sd 1:0:116:0: [sddk] Write Protect is off [ 31.336872] sd 1:0:116:0: [sddk] Mode Sense: f7 00 10 08 [ 31.336899] sd 1:0:86:0: [sdcg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.337049] sd 1:0:85:0: [sdcf] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.337050] sd 1:0:85:0: [sdcf] 4096-byte physical blocks [ 31.337204] sd 1:0:67:0: [sdbn] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.337235] sd 1:0:21:0: [sdu] Attached SCSI disk [ 31.337283] sd 1:0:108:0: [sddc] Write Protect is off [ 31.337284] sd 1:0:108:0: [sddc] Mode Sense: f7 00 10 08 [ 31.337587] sd 1:0:114:0: [sddi] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.337641] sd 1:0:106:0: [sdda] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.338015] sd 1:0:2:0: [sdb] Attached SCSI disk [ 31.338094] sd 1:0:84:0: [sdce] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.338147] sd 1:0:83:0: [sdcd] Write Protect is off [ 31.338149] sd 1:0:83:0: [sdcd] Mode Sense: f7 00 10 08 [ 31.338262] sd 1:0:85:0: [sdcf] Write Protect is off [ 31.338263] sd 1:0:85:0: [sdcf] Mode Sense: f7 00 10 08 [ 31.338380] sd 1:0:80:0: [sdca] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.338382] sd 1:0:80:0: [sdca] 4096-byte physical blocks [ 31.338394] sd 1:0:24:0: [sdx] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.339023] sd 1:0:120:0: [sddo] Write Protect is off [ 31.339025] sd 1:0:120:0: [sddo] Mode Sense: f7 00 10 08 [ 31.339062] sd 1:0:116:0: [sddk] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.339316] sd 1:0:110:0: [sdde] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.339481] sd 1:0:83:0: [sdcd] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.339694] sd 1:0:104:0: [sdcy] Write Protect is off [ 31.339695] sd 1:0:104:0: [sdcy] Mode Sense: f7 00 10 08 [ 31.339727] sd 1:0:85:0: [sdcf] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.339813] sd 1:0:73:0: [sdbt] Write Protect is off [ 31.339814] sd 1:0:73:0: [sdbt] Mode Sense: f7 00 10 08 [ 31.339963] sd 1:0:115:0: [sddj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.339964] sd 1:0:115:0: [sddj] 4096-byte physical blocks [ 31.340086] sd 1:0:95:0: [sdcp] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.340157] sd 1:0:97:0: [sdcr] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.340159] sd 1:0:97:0: [sdcr] 4096-byte physical blocks [ 31.340548] sd 1:0:10:0: [sdj] Attached SCSI disk [ 31.340678] sd 1:0:112:0: [sddg] Write Protect is off [ 31.340680] sd 1:0:112:0: [sddg] Mode Sense: f7 00 10 08 [ 31.340716] sd 1:0:96:0: [sdcq] Write Protect is off [ 31.340719] sd 1:0:96:0: [sdcq] Mode Sense: f7 00 10 08 [ 31.340977] sd 1:0:98:0: [sdcs] Write Protect is off [ 31.340979] sd 1:0:98:0: [sdcs] Mode Sense: f7 00 10 08 [ 31.341308] sd 1:0:22:0: [sdv] Attached SCSI disk [ 31.341475] sd 1:0:98:0: [sdcs] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.342032] sd 1:0:74:0: [sdbu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.342034] sd 1:0:74:0: [sdbu] 4096-byte physical blocks [ 31.342111] sd 1:0:73:0: [sdbt] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.342541] sd 1:0:112:0: [sddg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.342556] sd 1:0:99:0: [sdct] Write Protect is off [ 31.342558] sd 1:0:99:0: [sdct] Mode Sense: f7 00 10 08 [ 31.342592] sd 1:0:118:0: [sddm] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.343012] sd 1:0:124:0: [sddr] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.343014] sd 1:0:124:0: [sddr] 4096-byte physical blocks [ 31.343673] sd 1:0:120:0: [sddo] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.344546] sd 1:0:74:0: [sdbu] Write Protect is off [ 31.344548] sd 1:0:74:0: [sdbu] Mode Sense: f7 00 10 08 [ 31.344810] sd 1:0:53:0: [sdba] Attached SCSI disk [ 31.345294] sd 1:0:105:0: [sdcz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.345296] sd 1:0:105:0: [sdcz] 4096-byte physical blocks [ 31.345466] sd 1:0:87:0: [sdch] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.345467] sd 1:0:87:0: [sdch] 4096-byte physical blocks [ 31.345694] sd 1:0:80:0: [sdca] Write Protect is off [ 31.345696] sd 1:0:80:0: [sdca] Mode Sense: f7 00 10 08 [ 31.345706] sd 1:0:69:0: [sdbp] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.345731] sd 1:0:108:0: [sddc] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.345821] sd 1:0:9:0: [sdi] Attached SCSI disk [ 31.345855] sd 1:0:17:0: [sdq] Attached SCSI disk [ 31.346350] sd 1:0:64:0: [sdbk] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.346352] sd 1:0:64:0: [sdbk] 4096-byte physical blocks [ 31.346360] sd 1:0:97:0: [sdcr] Write Protect is off [ 31.346362] sd 1:0:97:0: [sdcr] Mode Sense: f7 00 10 08 [ 31.346751] sd 1:0:80:0: [sdca] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.347549] sd 1:0:96:0: [sdcq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.347697] sd 1:0:115:0: [sddj] Write Protect is off [ 31.347699] sd 1:0:115:0: [sddj] Mode Sense: f7 00 10 08 [ 31.347959] sd 1:0:105:0: [sdcz] Write Protect is off [ 31.347961] sd 1:0:105:0: [sdcz] Mode Sense: f7 00 10 08 [ 31.348027] sd 1:0:99:0: [sdct] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.348617] sd 1:0:75:0: [sdbv] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.348619] sd 1:0:75:0: [sdbv] 4096-byte physical blocks [ 31.349075] sd 1:0:37:0: [sdak] Attached SCSI disk [ 31.349144] sd 1:0:14:0: [sdn] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.349806] sd 1:0:46:0: [sdat] Attached SCSI disk [ 31.350115] sd 1:0:126:0: [sddt] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.350117] sd 1:0:126:0: [sddt] 4096-byte physical blocks [ 31.350432] sd 1:0:75:0: [sdbv] Write Protect is off [ 31.350433] sd 1:0:75:0: [sdbv] Mode Sense: f7 00 10 08 [ 31.350503] sd 1:0:72:0: [sdbs] Write Protect is off [ 31.350504] sd 1:0:72:0: [sdbs] Mode Sense: f7 00 10 08 [ 31.351771] sd 1:0:72:0: [sdbs] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.351909] sd 1:0:127:0: [sddu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.351911] sd 1:0:127:0: [sddu] 4096-byte physical blocks [ 31.352074] sd 1:0:124:0: [sddr] Write Protect is off [ 31.352075] sd 1:0:124:0: [sddr] Mode Sense: f7 00 10 08 [ 31.352639] sd 1:0:75:0: [sdbv] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.352647] sd 1:0:71:0: [sdbr] Attached SCSI disk [ 31.353090] sd 1:0:104:0: [sdcy] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.353451] sd 1:0:128:0: [sddv] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.353453] sd 1:0:128:0: [sddv] 4096-byte physical blocks [ 31.353505] sd 1:0:82:0: [sdcc] Attached SCSI disk [ 31.353969] sd 1:0:79:0: [sdbz] Attached SCSI disk [ 31.354962] sd 1:0:16:0: [sdp] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.354964] sd 1:0:16:0: [sdp] 4096-byte physical blocks [ 31.355037] sd 1:0:64:0: [sdbk] Write Protect is off [ 31.355038] sd 1:0:64:0: [sdbk] Mode Sense: f7 00 10 08 [ 31.355175] sd 1:0:38:0: [sdal] Attached SCSI disk [ 31.355278] sd 1:0:125:0: [sdds] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.355280] sd 1:0:125:0: [sdds] 4096-byte physical blocks [ 31.355427] sd 1:0:77:0: [sdbx] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.355435] sd 1:0:77:0: [sdbx] 4096-byte physical blocks [ 31.355679] sd 1:0:97:0: [sdcr] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.355827] sd 1:0:126:0: [sddt] Write Protect is off [ 31.355829] sd 1:0:126:0: [sddt] Mode Sense: f7 00 10 08 [ 31.356458] sd 1:0:130:0: [sddx] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.356461] sd 1:0:130:0: [sddx] 4096-byte physical blocks [ 31.356710] sd 1:0:40:0: [sdan] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.356712] sd 1:0:40:0: [sdan] 4096-byte physical blocks [ 31.357847] sd 1:0:65:0: [sdbl] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.357852] sd 1:0:65:0: [sdbl] 4096-byte physical blocks [ 31.357979] sd 1:0:101:0: [sdcv] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.357989] sd 1:0:101:0: [sdcv] 4096-byte physical blocks [ 31.358131] sd 1:0:74:0: [sdbu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.358536] sd 1:0:64:0: [sdbk] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.358558] sd 1:0:40:0: [sdan] Write Protect is off [ 31.358559] sd 1:0:40:0: [sdan] Mode Sense: f7 00 10 08 [ 31.358726] sd 1:0:98:0: [sdcs] Attached SCSI disk [ 31.359194] sd 1:0:101:0: [sdcv] Write Protect is off [ 31.359195] sd 1:0:101:0: [sdcv] Mode Sense: f7 00 10 08 [ 31.359415] sd 1:0:40:0: [sdan] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.359441] sd 1:0:115:0: [sddj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.360224] sd 1:0:125:0: [sdds] Write Protect is off [ 31.360225] sd 1:0:125:0: [sdds] Mode Sense: f7 00 10 08 [ 31.360701] sd 1:0:125:0: [sdds] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.360804] sd 1:0:101:0: [sdcv] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.361072] sd 1:0:111:0: [sddf] Attached SCSI disk [ 31.362743] sd 1:0:70:0: [sdbq] Attached SCSI disk [ 31.362757] sd 1:0:13:0: [sdm] Attached SCSI disk [ 31.363084] sd 1:0:126:0: [sddt] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.363202] sd 1:0:127:0: [sddu] Write Protect is off [ 31.363204] sd 1:0:127:0: [sddu] Mode Sense: f7 00 10 08 [ 31.363983] sd 1:0:16:0: [sdp] Write Protect is off [ 31.363985] sd 1:0:16:0: [sdp] Mode Sense: f7 00 10 08 [ 31.364096] sd 1:0:77:0: [sdbx] Write Protect is off [ 31.364098] sd 1:0:77:0: [sdbx] Mode Sense: f7 00 10 08 [ 31.364224] sd 1:0:50:0: [sdax] Attached SCSI disk [ 31.364563] sd 1:0:87:0: [sdch] Write Protect is off [ 31.364565] sd 1:0:87:0: [sdch] Mode Sense: f7 00 10 08 [ 31.364756] sd 1:0:135:0: [sdec] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.364758] sd 1:0:135:0: [sdec] 4096-byte physical blocks [ 31.364888] sd 1:0:131:0: [sddy] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.364890] sd 1:0:131:0: [sddy] 4096-byte physical blocks [ 31.364930] sd 1:0:130:0: [sddx] Write Protect is off [ 31.364931] sd 1:0:130:0: [sddx] Mode Sense: f7 00 10 08 [ 31.365049] sd 1:0:87:0: [sdch] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.365091] sd 1:0:77:0: [sdbx] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.365259] sd 1:0:114:0: [sddi] Attached SCSI disk [ 31.365321] sd 1:0:16:0: [sdp] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.365393] sd 1:0:105:0: [sdcz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.365608] sd 1:0:131:0: [sddy] Write Protect is off [ 31.365610] sd 1:0:131:0: [sddy] Mode Sense: f7 00 10 08 [ 31.365762] sd 1:0:4:0: [sdd] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.366074] sd 1:0:65:0: [sdbl] Write Protect is off [ 31.366076] sd 1:0:65:0: [sdbl] Mode Sense: f7 00 10 08 [ 31.366234] sd 1:0:68:0: [sdbo] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.366237] sd 1:0:68:0: [sdbo] 4096-byte physical blocks [ 31.366625] sd 1:0:129:0: [sddw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.366627] sd 1:0:129:0: [sddw] 4096-byte physical blocks [ 31.366637] sd 1:0:131:0: [sddy] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.368114] sd 1:0:127:0: [sddu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.368291] sd 1:0:65:0: [sdbl] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.368849] sd 1:0:15:0: [sdo] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.368851] sd 1:0:15:0: [sdo] 4096-byte physical blocks [ 31.370291] sd 1:0:14:0: [sdn] Attached SCSI disk [ 31.370723] sd 1:0:130:0: [sddx] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.370787] sd 1:0:132:0: [sddz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.370789] sd 1:0:132:0: [sddz] 4096-byte physical blocks [ 31.371544] sd 1:0:132:0: [sddz] Write Protect is off [ 31.371546] sd 1:0:132:0: [sddz] Mode Sense: f7 00 10 08 [ 31.371863] sd 1:0:92:0: [sdcm] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.371865] sd 1:0:92:0: [sdcm] 4096-byte physical blocks [ 31.372065] sd 1:0:138:0: [sdef] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.372066] sd 1:0:138:0: [sdef] 4096-byte physical blocks [ 31.372273] sd 1:0:124:0: [sddr] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.372600] sd 1:0:132:0: [sddz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.372747] sd 1:0:135:0: [sdec] Write Protect is off [ 31.372749] sd 1:0:135:0: [sdec] Mode Sense: f7 00 10 08 [ 31.373529] sd 1:0:41:0: [sdao] Attached SCSI disk [ 31.373541] sd 1:0:11:0: [sdk] Attached SCSI disk [ 31.374688] sd 1:0:136:0: [sded] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.374690] sd 1:0:136:0: [sded] 4096-byte physical blocks [ 31.374821] sd 1:0:33:0: [sdag] Attached SCSI disk [ 31.374907] sd 1:0:139:0: [sdeg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.374909] sd 1:0:139:0: [sdeg] 4096-byte physical blocks [ 31.375093] sd 1:0:75:0: [sdbv] Attached SCSI disk [ 31.375476] sd 1:0:39:0: [sdam] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.375477] sd 1:0:39:0: [sdam] 4096-byte physical blocks [ 31.375664] sd 1:0:94:0: [sdco] Attached SCSI disk [ 31.375960] sd 1:0:76:0: [sdbw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.375975] sd 1:0:76:0: [sdbw] 4096-byte physical blocks [ 31.376306] sd 1:0:129:0: [sddw] Write Protect is off [ 31.376313] sd 1:0:129:0: [sddw] Mode Sense: f7 00 10 08 [ 31.376355] sd 1:0:15:0: [sdo] Write Protect is off [ 31.376357] sd 1:0:15:0: [sdo] Mode Sense: f7 00 10 08 [ 31.376464] sd 1:0:61:0: [sdbi] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.376466] sd 1:0:61:0: [sdbi] 4096-byte physical blocks [ 31.376602] sd 1:0:100:0: [sdcu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.376604] sd 1:0:100:0: [sdcu] 4096-byte physical blocks [ 31.376780] sd 1:0:129:0: [sddw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.377123] sd 1:0:15:0: [sdo] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.377306] sd 1:0:39:0: [sdam] Write Protect is off [ 31.377307] sd 1:0:39:0: [sdam] Mode Sense: f7 00 10 08 [ 31.377591] sd 1:0:128:0: [sddv] Write Protect is off [ 31.377593] sd 1:0:128:0: [sddv] Mode Sense: f7 00 10 08 [ 31.377709] sd 1:0:100:0: [sdcu] Write Protect is off [ 31.377711] sd 1:0:100:0: [sdcu] Mode Sense: f7 00 10 08 [ 31.377821] sd 1:0:102:0: [sdcw] Attached SCSI disk [ 31.378073] sd 1:0:128:0: [sddv] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.379159] sd 1:0:72:0: [sdbs] Attached SCSI disk [ 31.379900] sd 1:0:137:0: [sdee] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.379902] sd 1:0:137:0: [sdee] 4096-byte physical blocks [ 31.380548] sd 1:0:28:0: [sdab] Write Protect is off [ 31.380550] sd 1:0:28:0: [sdab] Mode Sense: f7 00 10 08 [ 31.381405] sd 1:0:140:0: [sdeh] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.381414] sd 1:0:140:0: [sdeh] 4096-byte physical blocks [ 31.381769] sd 1:0:135:0: [sdec] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.383578] sd 1:0:24:0: [sdx] Attached SCSI disk [ 31.384119] sd 1:0:99:0: [sdct] Attached SCSI disk [ 31.384375] sd 1:0:107:0: [sddb] Attached SCSI disk [ 31.384437] sd 1:0:85:0: [sdcf] Attached SCSI disk [ 31.384979] sd 1:0:138:0: [sdef] Write Protect is off [ 31.384981] sd 1:0:138:0: [sdef] Mode Sense: f7 00 10 08 [ 31.385077] sd 1:0:136:0: [sded] Write Protect is off [ 31.385078] sd 1:0:136:0: [sded] Mode Sense: f7 00 10 08 [ 31.385454] sd 1:0:138:0: [sdef] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.385682] sd 1:0:12:0: [sdl] Attached SCSI disk [ 31.386005] sd 1:0:83:0: [sdcd] Attached SCSI disk [ 31.387022] sd 1:0:54:0: [sdbb] Attached SCSI disk [ 31.387032] sd 1:0:44:0: [sdar] Attached SCSI disk [ 31.387611] sd 1:0:95:0: [sdcp] Attached SCSI disk [ 31.388033] sd 1:0:43:0: [sdaq] Attached SCSI disk [ 31.388229] sd 1:0:93:0: [sdcn] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.388231] sd 1:0:93:0: [sdcn] 4096-byte physical blocks [ 31.388600] sd 1:0:73:0: [sdbt] Attached SCSI disk [ 31.389173] sd 1:0:122:0: [sddq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.389175] sd 1:0:122:0: [sddq] 4096-byte physical blocks [ 31.389581] sd 1:0:35:0: [sdai] Attached SCSI disk [ 31.390099] sd 1:0:52:0: [sdaz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.390197] sd 1:0:76:0: [sdbw] Write Protect is off [ 31.390199] sd 1:0:76:0: [sdbw] Mode Sense: f7 00 10 08 [ 31.390241] sd 1:0:48:0: [sdav] Attached SCSI disk [ 31.390820] sd 1:0:137:0: [sdee] Write Protect is off [ 31.390822] sd 1:0:137:0: [sdee] Mode Sense: f7 00 10 08 [ 31.391298] sd 1:0:137:0: [sdee] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.392274] sd 1:0:106:0: [sdda] Attached SCSI disk [ 31.392435] sd 1:0:142:0: [sdej] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.392437] sd 1:0:142:0: [sdej] 4096-byte physical blocks [ 31.392865] sd 1:0:141:0: [sdei] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.392866] sd 1:0:141:0: [sdei] 4096-byte physical blocks [ 31.392949] sd 1:0:139:0: [sdeg] Write Protect is off [ 31.392950] sd 1:0:139:0: [sdeg] Mode Sense: f7 00 10 08 [ 31.394364] sd 1:0:56:0: [sdbd] Attached SCSI disk [ 31.395287] sd 1:0:134:0: [sdeb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.395289] sd 1:0:134:0: [sdeb] 4096-byte physical blocks [ 31.395693] sd 1:0:45:0: [sdas] Attached SCSI disk [ 31.395894] sd 1:0:47:0: [sdau] Attached SCSI disk [ 31.396270] sd 1:0:19:0: [sds] Attached SCSI disk [ 31.398141] sd 1:0:133:0: [sdea] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.398143] sd 1:0:133:0: [sdea] 4096-byte physical blocks [ 31.398151] sd 1:0:117:0: [sddl] Attached SCSI disk [ 31.398549] sd 1:0:140:0: [sdeh] Write Protect is off [ 31.398551] sd 1:0:140:0: [sdeh] Mode Sense: f7 00 10 08 [ 31.398601] sd 1:0:139:0: [sdeg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.398737] sd 1:0:142:0: [sdej] Write Protect is off [ 31.398738] sd 1:0:142:0: [sdej] Mode Sense: f7 00 10 08 [ 31.399026] sd 1:0:140:0: [sdeh] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.399049] sd 1:0:104:0: [sdcy] Attached SCSI disk [ 31.399680] sd 1:0:26:0: [sdz] Attached SCSI disk [ 31.399700] sd 1:0:109:0: [sddd] Attached SCSI disk [ 31.400122] sd 1:0:134:0: [sdeb] Write Protect is off [ 31.400123] sd 1:0:134:0: [sdeb] Mode Sense: f7 00 10 08 [ 31.400781] sd 1:0:55:0: [sdbc] Attached SCSI disk [ 31.400785] sd 1:0:143:0: [sdek] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.400787] sd 1:0:143:0: [sdek] 4096-byte physical blocks [ 31.401579] sd 1:0:143:0: [sdek] Write Protect is off [ 31.401580] sd 1:0:143:0: [sdek] Mode Sense: f7 00 10 08 [ 31.402565] sd 1:0:68:0: [sdbo] Write Protect is off [ 31.402567] sd 1:0:68:0: [sdbo] Mode Sense: f7 00 10 08 [ 31.403091] sd 1:0:87:0: [sdch] Attached SCSI disk [ 31.403228] sd 1:0:136:0: [sded] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.403422] sd 1:0:119:0: [sddn] Attached SCSI disk [ 31.403737] sd 1:0:96:0: [sdcq] Attached SCSI disk [ 31.404281] sd 1:0:58:0: [sdbf] Attached SCSI disk [ 31.404343] sd 1:0:59:0: [sdbg] Attached SCSI disk [ 31.404345] sd 1:0:68:0: [sdbo] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.404716] sd 1:0:80:0: [sdca] Attached SCSI disk [ 31.405477] sd 1:0:141:0: [sdei] Write Protect is off [ 31.405479] sd 1:0:141:0: [sdei] Mode Sense: f7 00 10 08 [ 31.405801] sd 1:0:134:0: [sdeb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.405854] sd 1:0:74:0: [sdbu] Attached SCSI disk [ 31.405895] sd 1:0:89:0: [sdcj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.405896] sd 1:0:89:0: [sdcj] 4096-byte physical blocks [ 31.405932] sd 1:0:141:0: [sdei] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.406792] sd 1:0:89:0: [sdcj] Write Protect is off [ 31.406794] sd 1:0:89:0: [sdcj] Mode Sense: f7 00 10 08 [ 31.407018] sd 1:0:28:0: [sdab] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.408360] sd 1:0:89:0: [sdcj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.408978] sd 1:0:126:0: [sddt] Attached SCSI disk [ 31.409127] sd 1:0:125:0: [sdds] Attached SCSI disk [ 31.409454] sd 1:0:49:0: [sdaw] Attached SCSI disk [ 31.409930] sd 1:0:142:0: [sdej] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.410423] sd 1:0:115:0: [sddj] Attached SCSI disk [ 31.411610] sd 1:0:25:0: [sdy] Attached SCSI disk [ 31.412304] sd 1:0:116:0: [sddk] Attached SCSI disk [ 31.412471] sd 1:0:110:0: [sdde] Attached SCSI disk [ 31.413860] sd 1:0:113:0: [sddh] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.413862] sd 1:0:113:0: [sddh] 4096-byte physical blocks [ 31.413921] sd 1:0:7:0: [sdg] Attached SCSI disk [ 31.415027] sd 1:0:86:0: [sdcg] Attached SCSI disk [ 31.415927] sd 1:0:60:0: [sdbh] Attached SCSI disk [ 31.418926] sd 1:0:124:0: [sddr] Attached SCSI disk [ 31.418940] sd 1:0:93:0: [sdcn] Write Protect is off [ 31.418942] sd 1:0:93:0: [sdcn] Mode Sense: f7 00 10 08 [ 31.419767] sd 1:0:130:0: [sddx] Attached SCSI disk [ 31.419809] sd 1:0:108:0: [sddc] Attached SCSI disk [ 31.420474] sd 1:0:93:0: [sdcn] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.422160] sd 1:0:127:0: [sddu] Attached SCSI disk [ 31.422775] sd 1:0:129:0: [sddw] Attached SCSI disk [ 31.422788] sd 1:0:36:0: [sdaj] Attached SCSI disk [ 31.422905] sd 1:0:145:0: [sdem] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.422906] sd 1:0:145:0: [sdem] 4096-byte physical blocks [ 31.423164] sd 1:0:120:0: [sddo] Attached SCSI disk [ 31.423643] sd 1:0:23:0: [sdw] Attached SCSI disk [ 31.425946] sd 1:0:32:0: [sdaf] Attached SCSI disk [ 31.425973] sd 1:0:90:0: [sdck] Attached SCSI disk [ 31.426331] sd 1:0:136:0: [sded] Attached SCSI disk [ 31.426811] sd 1:0:105:0: [sdcz] Attached SCSI disk [ 31.426843] sd 1:0:97:0: [sdcr] Attached SCSI disk [ 31.427286] sd 1:0:61:0: [sdbi] Write Protect is off [ 31.427292] sd 1:0:61:0: [sdbi] Mode Sense: f7 00 10 08 [ 31.427356] sd 1:0:143:0: [sdek] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.427722] sd 1:0:39:0: [sdam] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.428516] sd 1:0:100:0: [sdcu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.430554] sd 1:0:138:0: [sdef] Attached SCSI disk [ 31.430788] sd 1:0:145:0: [sdem] Write Protect is off [ 31.430790] sd 1:0:145:0: [sdem] Mode Sense: f7 00 10 08 [ 31.431092] sd 1:0:121:0: [sddp] Attached SCSI disk [ 31.434480] sd 1:0:84:0: [sdce] Attached SCSI disk [ 31.434925] sd 1:0:57:0: [sdbe] Attached SCSI disk [ 31.435025] sd 1:0:113:0: [sddh] Write Protect is off [ 31.435028] sd 1:0:113:0: [sddh] Mode Sense: f7 00 10 08 [ 31.435048] sd 1:0:64:0: [sdbk] Attached SCSI disk [ 31.435629] sd 1:0:128:0: [sddv] Attached SCSI disk [ 31.435692] sd 1:0:135:0: [sdec] Attached SCSI disk [ 31.436055] sd 1:0:144:0: [sdel] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.436056] sd 1:0:144:0: [sdel] 4096-byte physical blocks [ 31.436439] sd 1:0:133:0: [sdea] Write Protect is off [ 31.436441] sd 1:0:133:0: [sdea] Mode Sense: f7 00 10 08 [ 31.436681] sd 1:0:118:0: [sddm] Attached SCSI disk [ 31.437585] sd 1:0:122:0: [sddq] Write Protect is off [ 31.437587] sd 1:0:122:0: [sddq] Mode Sense: f7 00 10 08 [ 31.438531] sd 1:0:146:0: [sden] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.438533] sd 1:0:146:0: [sden] 4096-byte physical blocks [ 31.438872] sd 1:0:92:0: [sdcm] Write Protect is off [ 31.438874] sd 1:0:92:0: [sdcm] Mode Sense: f7 00 10 08 [ 31.440131] sd 1:0:137:0: [sdee] Attached SCSI disk [ 31.443487] sd 1:0:4:0: [sdd] Attached SCSI disk [ 31.444658] sd 1:0:145:0: [sdem] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.445854] sd 1:0:81:0: [sdcb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.445855] sd 1:0:81:0: [sdcb] 4096-byte physical blocks [ 31.446167] sd 1:0:61:0: [sdbi] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.446452] sd 1:0:139:0: [sdeg] Attached SCSI disk [ 31.446465] sd 1:0:122:0: [sddq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.447110] sd 1:0:133:0: [sdea] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.447213] sd 1:0:146:0: [sden] Write Protect is off [ 31.447215] sd 1:0:146:0: [sden] Mode Sense: f7 00 10 08 [ 31.447632] sd 1:0:20:0: [sdt] Attached SCSI disk [ 31.452168] sd 1:0:147:0: [sdeo] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.452170] sd 1:0:147:0: [sdeo] 4096-byte physical blocks [ 31.452899] sd 1:0:8:0: [sdh] Attached SCSI disk [ 31.452901] sd 1:0:147:0: [sdeo] Write Protect is off [ 31.452903] sd 1:0:147:0: [sdeo] Mode Sense: f7 00 10 08 [ 31.453369] sd 1:0:147:0: [sdeo] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.454472] sd 1:0:141:0: [sdei] Attached SCSI disk [ 31.455955] sd 1:0:76:0: [sdbw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.457261] sd 1:0:113:0: [sddh] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.459607] sd 1:0:146:0: [sden] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.465876] sd 1:0:144:0: [sdel] Write Protect is off [ 31.465878] sd 1:0:144:0: [sdel] Mode Sense: f7 00 10 08 [ 31.467035] sd 1:0:148:0: [sdep] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.467036] sd 1:0:148:0: [sdep] 4096-byte physical blocks [ 31.467749] sd 1:0:148:0: [sdep] Write Protect is off [ 31.467751] sd 1:0:148:0: [sdep] Mode Sense: f7 00 10 08 [ 31.468208] sd 1:0:148:0: [sdep] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.474997] sd 1:0:140:0: [sdeh] Attached SCSI disk [ 31.477250] sd 1:0:149:0: [sdeq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.477252] sd 1:0:149:0: [sdeq] 4096-byte physical blocks [ 31.477263] sd 1:0:93:0: [sdcn] Attached SCSI disk [ 31.477303] sd 1:0:134:0: [sdeb] Attached SCSI disk [ 31.480094] sd 1:0:103:0: [sdcx] Attached SCSI disk [ 31.482543] sd 1:0:91:0: [sdcl] Attached SCSI disk [ 31.484367] sd 1:0:92:0: [sdcm] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.485181] sd 1:0:150:0: [sder] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.485183] sd 1:0:150:0: [sder] 4096-byte physical blocks [ 31.489257] sd 1:0:81:0: [sdcb] Write Protect is off [ 31.489259] sd 1:0:81:0: [sdcb] Mode Sense: f7 00 10 08 [ 31.490095] sd 1:0:149:0: [sdeq] Write Protect is off [ 31.490096] sd 1:0:149:0: [sdeq] Mode Sense: f7 00 10 08 [ 31.490858] sd 1:0:67:0: [sdbn] Attached SCSI disk [ 31.491291] sd 1:0:27:0: [sdaa] Attached SCSI disk [ 31.492160] sd 1:0:88:0: [sdci] Attached SCSI disk [ 31.493430] sd 1:0:28:0: [sdab] Attached SCSI disk [ 31.495996] sd 1:0:147:0: [sdeo] Attached SCSI disk [ 31.497534] sd 1:0:69:0: [sdbp] Attached SCSI disk [ 31.497651] sd 1:0:151:0: [sdes] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.497652] sd 1:0:151:0: [sdes] 4096-byte physical blocks [ 31.498366] sd 1:0:151:0: [sdes] Write Protect is off [ 31.498369] sd 1:0:151:0: [sdes] Mode Sense: f7 00 10 08 [ 31.498609] sd 1:0:78:0: [sdby] Attached SCSI disk [ 31.498663] sd 1:0:51:0: [sday] Attached SCSI disk [ 31.498687] sd 1:0:150:0: [sder] Write Protect is off [ 31.498688] sd 1:0:150:0: [sder] Mode Sense: f7 00 10 08 [ 31.498840] sd 1:0:151:0: [sdes] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.499167] sd 1:0:150:0: [sder] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.500318] sd 1:0:149:0: [sdeq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.503253] sd 1:0:89:0: [sdcj] Attached SCSI disk [ 31.505600] sd 1:0:144:0: [sdel] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.507270] sd 1:0:63:0: [sdbj] Attached SCSI disk [ 31.508035] sd 1:0:133:0: [sdea] Attached SCSI disk [ 31.509931] sd 1:0:65:0: [sdbl] Attached SCSI disk [ 31.511466] sd 1:0:112:0: [sddg] Attached SCSI disk [ 31.511937] sd 1:0:146:0: [sden] Attached SCSI disk [ 31.515360] sd 1:0:39:0: [sdam] Attached SCSI disk [ 31.516709] sd 1:0:152:0: [sdet] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.516711] sd 1:0:152:0: [sdet] 4096-byte physical blocks [ 31.516905] sd 1:0:92:0: [sdcm] Attached SCSI disk [ 31.517416] sd 1:0:152:0: [sdet] Write Protect is off [ 31.517418] sd 1:0:152:0: [sdet] Mode Sense: f7 00 10 08 [ 31.517883] sd 1:0:152:0: [sdet] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.523403] sd 1:0:81:0: [sdcb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.523598] sd 1:0:16:0: [sdp] Attached SCSI disk [ 31.524561] sd 1:0:77:0: [sdbx] Attached SCSI disk [ 31.524895] sd 1:0:66:0: [sdbm] Attached SCSI disk [ 31.526000] sd 1:0:100:0: [sdcu] Attached SCSI disk [ 31.532011] sd 1:0:148:0: [sdep] Attached SCSI disk [ 31.533355] sd 1:0:122:0: [sddq] Attached SCSI disk [ 31.533396] sd 1:0:145:0: [sdem] Attached SCSI disk [ 31.533652] sd 1:0:142:0: [sdej] Attached SCSI disk [ 31.536070] sd 1:0:153:0: [sdeu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.536072] sd 1:0:153:0: [sdeu] 4096-byte physical blocks [ 31.536792] sd 1:0:153:0: [sdeu] Write Protect is off [ 31.536794] sd 1:0:153:0: [sdeu] Mode Sense: f7 00 10 08 [ 31.537264] sd 1:0:153:0: [sdeu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.538383] sd 1:0:52:0: [sdaz] Attached SCSI disk [ 31.540923] sd 1:0:149:0: [sdeq] Attached SCSI disk [ 31.541726] sd 1:0:154:0: [sdev] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.541727] sd 1:0:154:0: [sdev] 4096-byte physical blocks [ 31.541736] sd 1:0:113:0: [sddh] Attached SCSI disk [ 31.541833] sd 1:0:61:0: [sdbi] Attached SCSI disk [ 31.546924] sd 1:0:154:0: [sdev] Write Protect is off [ 31.546927] sd 1:0:154:0: [sdev] Mode Sense: f7 00 10 08 [ 31.547802] sd 1:0:155:0: [sdew] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.547804] sd 1:0:155:0: [sdew] 4096-byte physical blocks [ 31.548779] sd 1:0:131:0: [sddy] Attached SCSI disk [ 31.550506] sd 1:0:132:0: [sddz] Attached SCSI disk [ 31.550631] sd 1:0:154:0: [sdev] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.553812] sd 1:0:81:0: [sdcb] Attached SCSI disk [ 31.556737] sd 1:0:155:0: [sdew] Write Protect is off [ 31.556739] sd 1:0:155:0: [sdew] Mode Sense: f7 00 10 08 [ 31.556772] sd 1:0:68:0: [sdbo] Attached SCSI disk [ 31.563645] sd 1:0:152:0: [sdet] Attached SCSI disk [ 31.569059] sd 1:0:156:0: [sdex] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.569061] sd 1:0:156:0: [sdex] 4096-byte physical blocks [ 31.574729] sd 1:0:158:0: [sdez] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.574732] sd 1:0:158:0: [sdez] 4096-byte physical blocks [ 31.575458] sd 1:0:158:0: [sdez] Write Protect is off [ 31.575460] sd 1:0:158:0: [sdez] Mode Sense: f7 00 10 08 [ 31.575942] sd 1:0:158:0: [sdez] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.577140] sd 1:0:151:0: [sdes] Attached SCSI disk [ 31.577349] sd 1:0:150:0: [sder] Attached SCSI disk [ 31.579122] sd 1:0:156:0: [sdex] Write Protect is off [ 31.579124] sd 1:0:156:0: [sdex] Mode Sense: f7 00 10 08 [ 31.581150] sd 1:0:153:0: [sdeu] Attached SCSI disk [ 31.582323] sd 1:0:159:0: [sdfa] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.582325] sd 1:0:159:0: [sdfa] 4096-byte physical blocks [ 31.583024] sd 1:0:159:0: [sdfa] Write Protect is off [ 31.583026] sd 1:0:159:0: [sdfa] Mode Sense: f7 00 10 08 [ 31.583469] sd 1:0:159:0: [sdfa] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.585980] sd 1:0:160:0: [sdfb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.585982] sd 1:0:160:0: [sdfb] 4096-byte physical blocks [ 31.586702] sd 1:0:160:0: [sdfb] Write Protect is off [ 31.586705] sd 1:0:160:0: [sdfb] Mode Sense: f7 00 10 08 [ 31.586801] sd 1:0:157:0: [sdey] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.586803] sd 1:0:157:0: [sdey] 4096-byte physical blocks [ 31.587184] sd 1:0:160:0: [sdfb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.587379] sd 1:0:154:0: [sdev] Attached SCSI disk [ 31.593130] sd 1:0:157:0: [sdey] Write Protect is off [ 31.593132] sd 1:0:157:0: [sdey] Mode Sense: f7 00 10 08 [ 31.593539] sd 1:0:161:0: [sdfc] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.593541] sd 1:0:161:0: [sdfc] 4096-byte physical blocks [ 31.594287] sd 1:0:161:0: [sdfc] Write Protect is off [ 31.594288] sd 1:0:161:0: [sdfc] Mode Sense: f7 00 10 08 [ 31.594762] sd 1:0:161:0: [sdfc] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.598132] sd 1:0:162:0: [sdfd] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.598133] sd 1:0:162:0: [sdfd] 4096-byte physical blocks [ 31.600843] sd 1:0:163:0: [sdfe] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.600844] sd 1:0:163:0: [sdfe] 4096-byte physical blocks [ 31.605308] sd 1:0:164:0: [sdff] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.605309] sd 1:0:164:0: [sdff] 4096-byte physical blocks [ 31.605831] sd 1:0:162:0: [sdfd] Write Protect is off [ 31.605832] sd 1:0:162:0: [sdfd] Mode Sense: f7 00 10 08 [ 31.606040] sd 1:0:164:0: [sdff] Write Protect is off [ 31.606042] sd 1:0:164:0: [sdff] Mode Sense: f7 00 10 08 [ 31.606519] sd 1:0:164:0: [sdff] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.608864] sd 1:0:155:0: [sdew] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.609682] sd 1:0:156:0: [sdex] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.611626] sd 1:0:163:0: [sdfe] Write Protect is off [ 31.611628] sd 1:0:163:0: [sdfe] Mode Sense: f7 00 10 08 [ 31.611907] sd 1:0:166:0: [sdfh] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.611908] sd 1:0:166:0: [sdfh] 4096-byte physical blocks [ 31.612458] sd 1:0:157:0: [sdey] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.612622] sd 1:0:166:0: [sdfh] Write Protect is off [ 31.612623] sd 1:0:166:0: [sdfh] Mode Sense: f7 00 10 08 [ 31.613089] sd 1:0:166:0: [sdfh] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.613531] sd 1:0:101:0: [sdcv] Attached SCSI disk [ 31.614391] sd 1:0:40:0: [sdan] Attached SCSI disk [ 31.614450] sd 1:0:167:0: [sdfi] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.614451] sd 1:0:167:0: [sdfi] 4096-byte physical blocks [ 31.614575] sd 1:0:163:0: [sdfe] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.614953] sd 1:0:159:0: [sdfa] Attached SCSI disk [ 31.616728] sd 1:0:165:0: [sdfg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.616730] sd 1:0:165:0: [sdfg] 4096-byte physical blocks [ 31.617641] sd 1:0:169:0: [sdfk] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.617643] sd 1:0:169:0: [sdfk] 4096-byte physical blocks [ 31.617721] sd 1:0:162:0: [sdfd] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.620471] sd 1:0:173:0: [sdfo] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.620472] sd 1:0:173:0: [sdfo] 4096-byte physical blocks [ 31.620986] sd 1:0:175:0: [sdfq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.620988] sd 1:0:175:0: [sdfq] 4096-byte physical blocks [ 31.621717] sd 1:0:175:0: [sdfq] Write Protect is off [ 31.621719] sd 1:0:175:0: [sdfq] Mode Sense: f7 00 10 08 [ 31.622060] sd 1:0:165:0: [sdfg] Write Protect is off [ 31.622062] sd 1:0:165:0: [sdfg] Mode Sense: f7 00 10 08 [ 31.622178] sd 1:0:175:0: [sdfq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.622535] sd 1:0:165:0: [sdfg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.622589] sd 1:0:160:0: [sdfb] Attached SCSI disk [ 31.623098] sd 1:0:177:0: [sdfs] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.623099] sd 1:0:177:0: [sdfs] 4096-byte physical blocks [ 31.624165] sd 1:0:170:0: [sdfl] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.624167] sd 1:0:170:0: [sdfl] 4096-byte physical blocks [ 31.624195] sd 1:0:178:0: [sdft] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.624196] sd 1:0:178:0: [sdft] 4096-byte physical blocks [ 31.624240] sd 1:0:167:0: [sdfi] Write Protect is off [ 31.624241] sd 1:0:167:0: [sdfi] Mode Sense: f7 00 10 08 [ 31.624636] sd 1:0:174:0: [sdfp] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.624637] sd 1:0:174:0: [sdfp] 4096-byte physical blocks [ 31.624704] sd 1:0:167:0: [sdfi] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.625390] sd 1:0:179:0: [sdfu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.625392] sd 1:0:179:0: [sdfu] 4096-byte physical blocks [ 31.626642] sd 1:0:180:0: [sdfv] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.626644] sd 1:0:180:0: [sdfv] 4096-byte physical blocks [ 31.626743] sd 1:0:169:0: [sdfk] Write Protect is off [ 31.626745] sd 1:0:169:0: [sdfk] Mode Sense: f7 00 10 08 [ 31.627373] sd 1:0:180:0: [sdfv] Write Protect is off [ 31.627376] sd 1:0:180:0: [sdfv] Mode Sense: f7 00 10 08 [ 31.628271] sd 1:0:180:0: [sdfv] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.628669] sd 1:0:176:0: [sdfr] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.628671] sd 1:0:176:0: [sdfr] 4096-byte physical blocks [ 31.628763] sd 1:0:181:0: [sdfw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.628764] sd 1:0:181:0: [sdfw] 4096-byte physical blocks [ 31.629083] sd 1:0:172:0: [sdfn] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.629085] sd 1:0:172:0: [sdfn] 4096-byte physical blocks [ 31.630233] sd 1:0:15:0: [sdo] Attached SCSI disk [ 31.631173] sd 1:0:173:0: [sdfo] Write Protect is off [ 31.631175] sd 1:0:173:0: [sdfo] Mode Sense: f7 00 10 08 [ 31.631787] sd 1:0:168:0: [sdfj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.631789] sd 1:0:168:0: [sdfj] 4096-byte physical blocks [ 31.631827] sd 1:0:171:0: [sdfm] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.631828] sd 1:0:171:0: [sdfm] 4096-byte physical blocks [ 31.632986] sd 1:0:174:0: [sdfp] Write Protect is off [ 31.632987] sd 1:0:174:0: [sdfp] Mode Sense: f7 00 10 08 [ 31.632999] sd 1:0:183:0: [sdfy] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.633001] sd 1:0:183:0: [sdfy] 4096-byte physical blocks [ 31.633464] sd 1:0:174:0: [sdfp] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.633714] sd 1:0:183:0: [sdfy] Write Protect is off [ 31.633716] sd 1:0:183:0: [sdfy] Mode Sense: f7 00 10 08 [ 31.634184] sd 1:0:183:0: [sdfy] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.634476] sd 1:0:179:0: [sdfu] Write Protect is off [ 31.634480] sd 1:0:179:0: [sdfu] Mode Sense: f7 00 10 08 [ 31.635235] sd 1:0:168:0: [sdfj] Write Protect is off [ 31.635237] sd 1:0:168:0: [sdfj] Mode Sense: f7 00 10 08 [ 31.636051] sd 1:0:185:0: [sdfz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.636053] sd 1:0:185:0: [sdfz] 4096-byte physical blocks [ 31.636443] sd 1:0:177:0: [sdfs] Write Protect is off [ 31.636445] sd 1:0:177:0: [sdfs] Mode Sense: f7 00 10 08 [ 31.636611] sd 1:0:181:0: [sdfw] Write Protect is off [ 31.636614] sd 1:0:181:0: [sdfw] Mode Sense: f7 00 10 08 [ 31.636759] sd 1:0:185:0: [sdfz] Write Protect is off [ 31.636761] sd 1:0:185:0: [sdfz] Mode Sense: f7 00 10 08 [ 31.637091] sd 1:0:177:0: [sdfs] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.637194] sd 1:0:170:0: [sdfl] Write Protect is off [ 31.637196] sd 1:0:170:0: [sdfl] Mode Sense: f7 00 10 08 [ 31.637211] sd 1:0:185:0: [sdfz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.637304] sd 1:0:186:0: [sdga] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.637306] sd 1:0:186:0: [sdga] 4096-byte physical blocks [ 31.637654] sd 1:0:170:0: [sdfl] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.637934] sd 1:0:178:0: [sdft] Write Protect is off [ 31.637935] sd 1:0:178:0: [sdft] Mode Sense: f7 00 10 08 [ 31.638317] sd 1:0:182:0: [sdfx] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.638319] sd 1:0:182:0: [sdfx] 4096-byte physical blocks [ 31.638354] sd 1:0:169:0: [sdfk] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.638658] sd 1:0:187:0: [sdgb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.638660] sd 1:0:187:0: [sdgb] 4096-byte physical blocks [ 31.639367] sd 1:0:187:0: [sdgb] Write Protect is off [ 31.639369] sd 1:0:187:0: [sdgb] Mode Sense: f7 00 10 08 [ 31.639602] sd 1:0:188:0: [sdgc] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.639603] sd 1:0:188:0: [sdgc] 4096-byte physical blocks [ 31.639839] sd 1:0:187:0: [sdgb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.640288] sd 1:0:172:0: [sdfn] Write Protect is off [ 31.640289] sd 1:0:172:0: [sdfn] Mode Sense: f7 00 10 08 [ 31.640568] sd 1:0:189:0: [sdgd] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.640570] sd 1:0:189:0: [sdgd] 4096-byte physical blocks [ 31.640762] sd 1:0:172:0: [sdfn] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.641279] sd 1:0:189:0: [sdgd] Write Protect is off [ 31.641280] sd 1:0:189:0: [sdgd] Mode Sense: f7 00 10 08 [ 31.641316] sd 1:0:190:0: [sdge] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.641318] sd 1:0:190:0: [sdge] 4096-byte physical blocks [ 31.641570] sd 1:0:182:0: [sdfx] Write Protect is off [ 31.641572] sd 1:0:182:0: [sdfx] Mode Sense: f7 00 10 08 [ 31.641758] sd 1:0:189:0: [sdgd] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.642041] sd 1:0:182:0: [sdfx] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.642297] sd 1:0:171:0: [sdfm] Write Protect is off [ 31.642299] sd 1:0:171:0: [sdfm] Mode Sense: f7 00 10 08 [ 31.642436] sd 1:0:164:0: [sdff] Attached SCSI disk [ 31.642527] sd 1:0:176:0: [sdfr] Write Protect is off [ 31.642529] sd 1:0:176:0: [sdfr] Mode Sense: f7 00 10 08 [ 31.642775] sd 1:0:171:0: [sdfm] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.642799] sd 1:0:191:0: [sdgf] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.642800] sd 1:0:191:0: [sdgf] 4096-byte physical blocks [ 31.643172] sd 1:0:179:0: [sdfu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.643615] sd 1:0:166:0: [sdfh] Attached SCSI disk [ 31.644213] sd 1:0:173:0: [sdfo] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.644364] sd 1:0:178:0: [sdft] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.645335] sd 1:0:181:0: [sdfw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.648762] sd 1:0:194:0: [sdgi] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.648764] sd 1:0:194:0: [sdgi] 4096-byte physical blocks [ 31.649970] sd 1:0:186:0: [sdga] Write Protect is off [ 31.649972] sd 1:0:186:0: [sdga] Mode Sense: f7 00 10 08 [ 31.650445] sd 1:0:186:0: [sdga] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.650771] sd 1:0:191:0: [sdgf] Write Protect is off [ 31.650772] sd 1:0:191:0: [sdgf] Mode Sense: f7 00 10 08 [ 31.651249] sd 1:0:191:0: [sdgf] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.651364] sd 1:0:196:0: [sdgk] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.651367] sd 1:0:196:0: [sdgk] 4096-byte physical blocks [ 31.651616] sd 1:0:176:0: [sdfr] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.652092] sd 1:0:196:0: [sdgk] Write Protect is off [ 31.652094] sd 1:0:196:0: [sdgk] Mode Sense: f7 00 10 08 [ 31.652368] sd 1:0:197:0: [sdgl] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.652369] sd 1:0:197:0: [sdgl] 4096-byte physical blocks [ 31.652563] sd 1:0:196:0: [sdgk] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.653100] sd 1:0:197:0: [sdgl] Write Protect is off [ 31.653103] sd 1:0:197:0: [sdgl] Mode Sense: f7 00 10 08 [ 31.653568] sd 1:0:197:0: [sdgl] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.654264] sd 1:0:198:0: [sdgm] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.654265] sd 1:0:175:0: [sdfq] Attached SCSI disk [ 31.654266] sd 1:0:198:0: [sdgm] 4096-byte physical blocks [ 31.654780] sd 1:0:190:0: [sdge] Write Protect is off [ 31.654782] sd 1:0:190:0: [sdge] Mode Sense: f7 00 10 08 [ 31.654977] sd 1:0:198:0: [sdgm] Write Protect is off [ 31.654978] sd 1:0:198:0: [sdgm] Mode Sense: f7 00 10 08 [ 31.655356] sd 1:0:190:0: [sdge] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.655444] sd 1:0:198:0: [sdgm] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.655928] sd 1:0:199:0: [sdgn] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.655930] sd 1:0:199:0: [sdgn] 4096-byte physical blocks [ 31.656656] sd 1:0:199:0: [sdgn] Write Protect is off [ 31.656657] sd 1:0:199:0: [sdgn] Mode Sense: f7 00 10 08 [ 31.657135] sd 1:0:199:0: [sdgn] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.657620] sd 1:0:194:0: [sdgi] Write Protect is off [ 31.657621] sd 1:0:194:0: [sdgi] Mode Sense: f7 00 10 08 [ 31.658685] sd 1:0:188:0: [sdgc] Write Protect is off [ 31.658686] sd 1:0:188:0: [sdgc] Mode Sense: f7 00 10 08 [ 31.659506] sd 1:0:194:0: [sdgi] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.659904] sd 1:0:201:0: [sdgp] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.659906] sd 1:0:201:0: [sdgp] 4096-byte physical blocks [ 31.661091] sd 1:0:180:0: [sdfv] Attached SCSI disk [ 31.661873] sd 1:0:202:0: [sdgq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.661875] sd 1:0:202:0: [sdgq] 4096-byte physical blocks [ 31.662599] sd 1:0:202:0: [sdgq] Write Protect is off [ 31.662601] sd 1:0:202:0: [sdgq] Mode Sense: f7 00 10 08 [ 31.663063] sd 1:0:202:0: [sdgq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.663449] sd 1:0:182:0: [sdfx] Attached SCSI disk [ 31.663610] sd 1:0:203:0: [sdgr] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.663612] sd 1:0:203:0: [sdgr] 4096-byte physical blocks [ 31.664319] sd 1:0:203:0: [sdgr] Write Protect is off [ 31.664320] sd 1:0:203:0: [sdgr] Mode Sense: f7 00 10 08 [ 31.664390] sd 1:0:191:0: [sdgf] Attached SCSI disk [ 31.664788] sd 1:0:203:0: [sdgr] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.665250] sd 1:0:196:0: [sdgk] Attached SCSI disk [ 31.666033] sd 1:0:172:0: [sdfn] Attached SCSI disk [ 31.666333] sd 1:0:197:0: [sdgl] Attached SCSI disk [ 31.667641] sd 1:0:200:0: [sdgo] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.667643] sd 1:0:200:0: [sdgo] 4096-byte physical blocks [ 31.668320] sd 1:0:179:0: [sdfu] Attached SCSI disk [ 31.668361] sd 1:0:200:0: [sdgo] Write Protect is off [ 31.668363] sd 1:0:200:0: [sdgo] Mode Sense: f7 00 10 08 [ 31.668591] sd 1:0:157:0: [sdey] Attached SCSI disk [ 31.668828] sd 1:0:200:0: [sdgo] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.669704] sd 1:0:206:0: [sdgu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.669706] sd 1:0:206:0: [sdgu] 4096-byte physical blocks [ 31.670125] sd 1:0:170:0: [sdfl] Attached SCSI disk [ 31.670471] sd 1:0:168:0: [sdfj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.671632] sd 1:0:201:0: [sdgp] Write Protect is off [ 31.671634] sd 1:0:201:0: [sdgp] Mode Sense: f7 00 10 08 [ 31.672092] sd 1:0:201:0: [sdgp] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.672351] sd 1:0:188:0: [sdgc] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.674242] sd 1:0:165:0: [sdfg] Attached SCSI disk [ 31.674254] sd 1:0:163:0: [sdfe] Attached SCSI disk [ 31.676884] sd 1:0:161:0: [sdfc] Attached SCSI disk [ 31.676922] sd 1:0:208:0: [sdgw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.676924] sd 1:0:208:0: [sdgw] 4096-byte physical blocks [ 31.677021] sd 1:0:186:0: [sdga] Attached SCSI disk [ 31.677621] sd 1:0:208:0: [sdgw] Write Protect is off [ 31.677622] sd 1:0:208:0: [sdgw] Mode Sense: f7 00 10 08 [ 31.677744] sd 1:0:202:0: [sdgq] Attached SCSI disk [ 31.678065] sd 1:0:208:0: [sdgw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.678292] sd 1:0:185:0: [sdfz] Attached SCSI disk [ 31.678640] sd 1:0:183:0: [sdfy] Attached SCSI disk [ 31.678758] sd 1:0:203:0: [sdgr] Attached SCSI disk [ 31.679754] sd 1:0:209:0: [sdgx] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.679756] sd 1:0:209:0: [sdgx] 4096-byte physical blocks [ 31.679913] sd 1:0:173:0: [sdfo] Attached SCSI disk [ 31.680445] sd 1:0:189:0: [sdgd] Attached SCSI disk [ 31.680464] sd 1:0:209:0: [sdgx] Write Protect is off [ 31.680465] sd 1:0:209:0: [sdgx] Mode Sense: f7 00 10 08 [ 31.681840] sd 1:0:181:0: [sdfw] Attached SCSI disk [ 31.682232] sd 1:0:200:0: [sdgo] Attached SCSI disk [ 31.682533] sd 1:0:210:0: [sdgy] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.682534] sd 1:0:210:0: [sdgy] 4096-byte physical blocks [ 31.682697] sd 1:0:207:0: [sdgv] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.682698] sd 1:0:207:0: [sdgv] 4096-byte physical blocks [ 31.683278] sd 1:0:210:0: [sdgy] Write Protect is off [ 31.683280] sd 1:0:210:0: [sdgy] Mode Sense: f7 00 10 08 [ 31.683389] sd 1:0:169:0: [sdfk] Attached SCSI disk [ 31.683758] sd 1:0:210:0: [sdgy] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.684023] sd 1:0:174:0: [sdfp] Attached SCSI disk [ 31.684805] sd 1:0:211:0: [sdgz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.684808] sd 1:0:211:0: [sdgz] 4096-byte physical blocks [ 31.685019] sd 1:0:171:0: [sdfm] Attached SCSI disk [ 31.685414] sd 1:0:190:0: [sdge] Attached SCSI disk [ 31.685519] sd 1:0:211:0: [sdgz] Write Protect is off [ 31.685521] sd 1:0:211:0: [sdgz] Mode Sense: f7 00 10 08 [ 31.685972] sd 1:0:211:0: [sdgz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.687622] sd 1:0:212:0: [sdha] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.687624] sd 1:0:212:0: [sdha] 4096-byte physical blocks [ 31.687625] sd 1:0:193:0: [sdgh] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.687627] sd 1:0:193:0: [sdgh] 4096-byte physical blocks [ 31.687666] sd 1:0:195:0: [sdgj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.687668] sd 1:0:195:0: [sdgj] 4096-byte physical blocks [ 31.688337] sd 1:0:212:0: [sdha] Write Protect is off [ 31.688340] sd 1:0:212:0: [sdha] Mode Sense: f7 00 10 08 [ 31.688392] sd 1:0:195:0: [sdgj] Write Protect is off [ 31.688393] sd 1:0:195:0: [sdgj] Mode Sense: f7 00 10 08 [ 31.688809] sd 1:0:212:0: [sdha] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.688872] sd 1:0:195:0: [sdgj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.688892] sd 1:0:206:0: [sdgu] Write Protect is off [ 31.688894] sd 1:0:206:0: [sdgu] Mode Sense: f7 00 10 08 [ 31.688962] sd 1:0:207:0: [sdgv] Write Protect is off [ 31.688963] sd 1:0:207:0: [sdgv] Mode Sense: f7 00 10 08 [ 31.689347] sd 1:0:176:0: [sdfr] Attached SCSI disk [ 31.689715] sd 1:0:213:0: [sdhb] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.689716] sd 1:0:213:0: [sdhb] 4096-byte physical blocks [ 31.690447] sd 1:0:213:0: [sdhb] Write Protect is off [ 31.690449] sd 1:0:213:0: [sdhb] Mode Sense: f7 00 10 08 [ 31.690509] sd 1:0:192:0: [sdgg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.690511] sd 1:0:192:0: [sdgg] 4096-byte physical blocks [ 31.690920] sd 1:0:213:0: [sdhb] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.691189] sd 1:0:162:0: [sdfd] Attached SCSI disk [ 31.691722] sd 1:0:208:0: [sdgw] Attached SCSI disk [ 31.691898] sd 1:0:214:0: [sdhc] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.691899] sd 1:0:214:0: [sdhc] 4096-byte physical blocks [ 31.692591] sd 1:0:214:0: [sdhc] Write Protect is off [ 31.692593] sd 1:0:214:0: [sdhc] Mode Sense: f7 00 10 08 [ 31.693060] sd 1:0:214:0: [sdhc] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.693666] sd 1:0:206:0: [sdgu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.695225] sd 1:0:215:0: [sdhd] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.695227] sd 1:0:215:0: [sdhd] 4096-byte physical blocks [ 31.695917] sd 1:0:215:0: [sdhd] Write Protect is off [ 31.695919] sd 1:0:215:0: [sdhd] Mode Sense: f7 00 10 08 [ 31.696213] sd 1:0:158:0: [sdez] Attached SCSI disk [ 31.696360] sd 1:0:215:0: [sdhd] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.696600] sd 1:0:198:0: [sdgm] Attached SCSI disk [ 31.702201] sd 1:0:188:0: [sdgc] Attached SCSI disk [ 31.702303] sd 1:0:207:0: [sdgv] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.702375] sd 1:0:209:0: [sdgx] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.703051] sd 1:0:187:0: [sdgb] Attached SCSI disk [ 31.706269] sd 1:0:201:0: [sdgp] Attached SCSI disk [ 31.707388] sd 1:0:218:0: [sdhg] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.707390] sd 1:0:218:0: [sdhg] 4096-byte physical blocks [ 31.708096] sd 1:0:218:0: [sdhg] Write Protect is off [ 31.708097] sd 1:0:218:0: [sdhg] Mode Sense: f7 00 10 08 [ 31.708199] sd 1:0:205:0: [sdgt] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.708201] sd 1:0:205:0: [sdgt] 4096-byte physical blocks [ 31.708286] sd 1:0:195:0: [sdgj] Attached SCSI disk [ 31.708567] sd 1:0:218:0: [sdhg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.708715] sd 1:0:214:0: [sdhc] Attached SCSI disk [ 31.710282] sd 1:0:215:0: [sdhd] Attached SCSI disk [ 31.711578] sd 1:0:194:0: [sdgi] Attached SCSI disk [ 31.711657] sd 1:0:219:0: [sdhh] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.711658] sd 1:0:219:0: [sdhh] 4096-byte physical blocks [ 31.712364] sd 1:0:219:0: [sdhh] Write Protect is off [ 31.712366] sd 1:0:219:0: [sdhh] Mode Sense: f7 00 10 08 [ 31.712828] sd 1:0:219:0: [sdhh] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.714037] sd 1:0:220:0: [sdhi] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.714038] sd 1:0:220:0: [sdhi] 4096-byte physical blocks [ 31.714755] sd 1:0:220:0: [sdhi] Write Protect is off [ 31.714757] sd 1:0:220:0: [sdhi] Mode Sense: f7 00 10 08 [ 31.715199] sd 1:0:220:0: [sdhi] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.716309] sd 1:0:209:0: [sdgx] Attached SCSI disk [ 31.716949] sd 1:0:221:0: [sdhj] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.716951] sd 1:0:221:0: [sdhj] 4096-byte physical blocks [ 31.717668] sd 1:0:221:0: [sdhj] Write Protect is off [ 31.717669] sd 1:0:221:0: [sdhj] Mode Sense: f7 00 10 08 [ 31.718141] sd 1:0:221:0: [sdhj] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.719137] sd 1:0:204:0: [sdgs] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.719139] sd 1:0:204:0: [sdgs] 4096-byte physical blocks [ 31.720644] sd 1:0:222:0: [sdhk] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.720646] sd 1:0:222:0: [sdhk] 4096-byte physical blocks [ 31.721349] sd 1:0:222:0: [sdhk] Write Protect is off [ 31.721351] sd 1:0:222:0: [sdhk] Mode Sense: f7 00 10 08 [ 31.721821] sd 1:0:222:0: [sdhk] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.721928] sd 1:0:211:0: [sdgz] Attached SCSI disk [ 31.723019] sd 1:0:193:0: [sdgh] Write Protect is off [ 31.723021] sd 1:0:193:0: [sdgh] Mode Sense: f7 00 10 08 [ 31.723198] sd 1:0:192:0: [sdgg] Write Protect is off [ 31.723199] sd 1:0:192:0: [sdgg] Mode Sense: f7 00 10 08 [ 31.723441] sd 1:0:207:0: [sdgv] Attached SCSI disk [ 31.725122] sd 1:0:223:0: [sdhl] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.725124] sd 1:0:223:0: [sdhl] 4096-byte physical blocks [ 31.725320] sd 1:0:199:0: [sdgn] Attached SCSI disk [ 31.725816] sd 1:0:205:0: [sdgt] Write Protect is off [ 31.725818] sd 1:0:205:0: [sdgt] Mode Sense: f7 00 10 08 [ 31.725862] sd 1:0:223:0: [sdhl] Write Protect is off [ 31.725864] sd 1:0:223:0: [sdhl] Mode Sense: f7 00 10 08 [ 31.726354] sd 1:0:223:0: [sdhl] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.728607] sd 1:0:224:0: [sdhm] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.728610] sd 1:0:224:0: [sdhm] 4096-byte physical blocks [ 31.729316] sd 1:0:224:0: [sdhm] Write Protect is off [ 31.729318] sd 1:0:224:0: [sdhm] Mode Sense: f7 00 10 08 [ 31.729785] sd 1:0:224:0: [sdhm] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.730816] sd 1:0:220:0: [sdhi] Attached SCSI disk [ 31.733715] sd 1:0:225:0: [sdhn] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.733716] sd 1:0:225:0: [sdhn] 4096-byte physical blocks [ 31.734096] sd 1:0:205:0: [sdgt] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.734458] sd 1:0:225:0: [sdhn] Write Protect is off [ 31.734460] sd 1:0:225:0: [sdhn] Mode Sense: f7 00 10 08 [ 31.734923] sd 1:0:225:0: [sdhn] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.735305] sd 1:0:210:0: [sdgy] Attached SCSI disk [ 31.735749] sd 1:0:206:0: [sdgu] Attached SCSI disk [ 31.736066] sd 1:0:226:0: [sdho] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.736069] sd 1:0:226:0: [sdho] 4096-byte physical blocks [ 31.736826] sd 1:0:226:0: [sdho] Write Protect is off [ 31.736828] sd 1:0:226:0: [sdho] Mode Sense: f7 00 10 08 [ 31.737288] sd 1:0:226:0: [sdho] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.737746] sd 1:0:222:0: [sdhk] Attached SCSI disk [ 31.739470] sd 1:0:218:0: [sdhg] Attached SCSI disk [ 31.739810] sd 1:0:227:0: [sdhp] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.739811] sd 1:0:227:0: [sdhp] 4096-byte physical blocks [ 31.739909] sd 1:0:204:0: [sdgs] Write Protect is off [ 31.739911] sd 1:0:204:0: [sdgs] Mode Sense: f7 00 10 08 [ 31.740366] sd 1:0:223:0: [sdhl] Attached SCSI disk [ 31.740532] sd 1:0:227:0: [sdhp] Write Protect is off [ 31.740533] sd 1:0:227:0: [sdhp] Mode Sense: f7 00 10 08 [ 31.740978] sd 1:0:227:0: [sdhp] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.742090] sd 1:0:212:0: [sdha] Attached SCSI disk [ 31.742092] sd 1:0:221:0: [sdhj] Attached SCSI disk [ 31.743788] sd 1:0:204:0: [sdgs] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.743932] sd 1:0:224:0: [sdhm] Attached SCSI disk [ 31.749052] sd 1:0:225:0: [sdhn] Attached SCSI disk [ 31.749252] sd 1:0:213:0: [sdhb] Attached SCSI disk [ 31.750510] sd 1:0:192:0: [sdgg] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.750683] sd 1:0:143:0: [sdek] Attached SCSI disk [ 31.751003] sd 1:0:216:0: [sdhe] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.751005] sd 1:0:216:0: [sdhe] 4096-byte physical blocks [ 31.751826] sd 1:0:226:0: [sdho] Attached SCSI disk [ 31.754532] sd 1:0:219:0: [sdhh] Attached SCSI disk [ 31.754892] sd 1:0:227:0: [sdhp] Attached SCSI disk [ 31.755256] sd 1:0:76:0: [sdbw] Attached SCSI disk [ 31.756126] sd 1:0:231:0: [sdht] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.756127] sd 1:0:231:0: [sdht] 4096-byte physical blocks [ 31.756637] sd 1:0:193:0: [sdgh] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.756837] sd 1:0:231:0: [sdht] Write Protect is off [ 31.756839] sd 1:0:231:0: [sdht] Mode Sense: f7 00 10 08 [ 31.757308] sd 1:0:231:0: [sdht] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.760958] sd 1:0:232:0: [sdhu] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.760960] sd 1:0:232:0: [sdhu] 4096-byte physical blocks [ 31.761687] sd 1:0:232:0: [sdhu] Write Protect is off [ 31.761688] sd 1:0:232:0: [sdhu] Mode Sense: f7 00 10 08 [ 31.762173] sd 1:0:232:0: [sdhu] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.763937] sd 1:0:233:0: [sdhv] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.763939] sd 1:0:233:0: [sdhv] 4096-byte physical blocks [ 31.764160] sd 1:0:230:0: [sdhs] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.764162] sd 1:0:230:0: [sdhs] 4096-byte physical blocks [ 31.764656] sd 1:0:233:0: [sdhv] Write Protect is off [ 31.764658] sd 1:0:233:0: [sdhv] Mode Sense: f7 00 10 08 [ 31.764764] sd 1:0:204:0: [sdgs] Attached SCSI disk [ 31.764894] sd 1:0:230:0: [sdhs] Write Protect is off [ 31.764896] sd 1:0:230:0: [sdhs] Mode Sense: f7 00 10 08 [ 31.765111] sd 1:0:233:0: [sdhv] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.765421] sd 1:0:230:0: [sdhs] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.767089] sd 1:0:234:0: [sdhw] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.767091] sd 1:0:234:0: [sdhw] 4096-byte physical blocks [ 31.767359] sd 1:0:228:0: [sdhq] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.767361] sd 1:0:228:0: [sdhq] 4096-byte physical blocks [ 31.767812] sd 1:0:234:0: [sdhw] Write Protect is off [ 31.767814] sd 1:0:234:0: [sdhw] Mode Sense: f7 00 10 08 [ 31.768847] sd 1:0:234:0: [sdhw] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.768909] sd 1:0:217:0: [sdhf] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.768914] sd 1:0:217:0: [sdhf] 4096-byte physical blocks [ 31.769023] sd 1:0:228:0: [sdhq] Write Protect is off [ 31.769025] sd 1:0:228:0: [sdhq] Mode Sense: f7 00 10 08 [ 31.770648] sd 1:0:193:0: [sdgh] Attached SCSI disk [ 31.770690] sd 1:0:231:0: [sdht] Attached SCSI disk [ 31.774144] sd 1:0:236:0: [sdhy] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.774146] sd 1:0:236:0: [sdhy] 4096-byte physical blocks [ 31.776016] sd 1:0:237:0: [sdhz] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.776018] sd 1:0:237:0: [sdhz] 4096-byte physical blocks [ 31.776762] sd 1:0:237:0: [sdhz] Write Protect is off [ 31.776764] sd 1:0:237:0: [sdhz] Mode Sense: f7 00 10 08 [ 31.777227] sd 1:0:237:0: [sdhz] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.777838] sd 1:0:144:0: [sdel] Attached SCSI disk [ 31.777963] sd 1:0:228:0: [sdhq] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.778636] sd 1:0:230:0: [sdhs] Attached SCSI disk [ 31.782302] sd 1:0:229:0: [sdhr] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.782304] sd 1:0:229:0: [sdhr] 4096-byte physical blocks [ 31.783920] sd 1:0:240:0: [sdic] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.783922] sd 1:0:240:0: [sdic] 4096-byte physical blocks [ 31.784449] sd 1:0:236:0: [sdhy] Write Protect is off [ 31.784450] sd 1:0:236:0: [sdhy] Mode Sense: f7 00 10 08 [ 31.784695] sd 1:0:240:0: [sdic] Write Protect is off [ 31.784696] sd 1:0:240:0: [sdic] Mode Sense: f7 00 10 08 [ 31.784699] sd 1:0:216:0: [sdhe] Write Protect is off [ 31.784700] sd 1:0:216:0: [sdhe] Mode Sense: f7 00 10 08 [ 31.784908] sd 1:0:236:0: [sdhy] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.785185] sd 1:0:240:0: [sdic] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.786479] sd 1:0:241:0: [sdid] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.786481] sd 1:0:241:0: [sdid] 4096-byte physical blocks [ 31.787211] sd 1:0:241:0: [sdid] Write Protect is off [ 31.787214] sd 1:0:241:0: [sdid] Mode Sense: f7 00 10 08 [ 31.787691] sd 1:0:241:0: [sdid] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.788313] sd 1:0:235:0: [sdhx] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.788315] sd 1:0:235:0: [sdhx] 4096-byte physical blocks [ 31.788373] sd 1:0:242:0: [sdie] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.788375] sd 1:0:242:0: [sdie] 4096-byte physical blocks [ 31.789143] sd 1:0:242:0: [sdie] Write Protect is off [ 31.789144] sd 1:0:242:0: [sdie] Mode Sense: f7 00 10 08 [ 31.789166] sd 1:0:217:0: [sdhf] Write Protect is off [ 31.789168] sd 1:0:217:0: [sdhf] Mode Sense: f7 00 10 08 [ 31.789611] sd 1:0:242:0: [sdie] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.789860] sd 1:0:243:0: [sdif] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.789862] sd 1:0:243:0: [sdif] 4096-byte physical blocks [ 31.790582] sd 1:0:243:0: [sdif] Write Protect is off [ 31.790584] sd 1:0:243:0: [sdif] Mode Sense: f7 00 10 08 [ 31.790793] sd 1:0:237:0: [sdhz] Attached SCSI disk [ 31.791055] sd 1:0:243:0: [sdif] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.800363] sd 1:0:244:0: [sdig] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.800364] sd 1:0:244:0: [sdig] 4096-byte physical blocks [ 31.801019] sd 1:0:192:0: [sdgg] Attached SCSI disk [ 31.801076] sd 1:0:244:0: [sdig] Write Protect is off [ 31.801078] sd 1:0:244:0: [sdig] Mode Sense: f7 00 10 08 [ 31.801534] sd 1:0:244:0: [sdig] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.802027] sd 1:0:229:0: [sdhr] Write Protect is off [ 31.802029] sd 1:0:229:0: [sdhr] Mode Sense: f7 00 10 08 [ 31.802915] sd 1:0:235:0: [sdhx] Write Protect is off [ 31.802917] sd 1:0:235:0: [sdhx] Mode Sense: f7 00 10 08 [ 31.806075] sd 1:0:235:0: [sdhx] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.812896] sd 1:0:217:0: [sdhf] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.813506] sd 1:0:241:0: [sdid] Attached SCSI disk [ 31.813541] sd 1:0:244:0: [sdig] Attached SCSI disk [ 31.813603] sd 1:0:232:0: [sdhu] Attached SCSI disk [ 31.814205] sd 1:0:242:0: [sdie] Attached SCSI disk [ 31.814940] sd 1:0:240:0: [sdic] Attached SCSI disk [ 31.817604] sd 1:0:229:0: [sdhr] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.817730] sd 1:0:233:0: [sdhv] Attached SCSI disk [ 31.819102] sd 1:0:234:0: [sdhw] Attached SCSI disk [ 31.820073] sd 1:0:216:0: [sdhe] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.822026] sd 1:0:243:0: [sdif] Attached SCSI disk [ 31.827862] sd 1:0:236:0: [sdhy] Attached SCSI disk [ 31.831404] sd 1:0:235:0: [sdhx] Attached SCSI disk [ 31.869136] sd 1:0:238:0: [sdia] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.869138] sd 1:0:238:0: [sdia] 4096-byte physical blocks [ 31.873268] sd 1:0:167:0: [sdfi] Attached SCSI disk [ 31.881364] sd 1:0:239:0: [sdib] 15628053168 512-byte logical blocks: (8.00 TB/7.27 TiB) [ 31.881365] sd 1:0:239:0: [sdib] 4096-byte physical blocks [ 31.886900] sd 1:0:239:0: [sdib] Write Protect is off [ 31.886901] sd 1:0:239:0: [sdib] Mode Sense: f7 00 10 08 [ 31.891023] sd 1:0:228:0: [sdhq] Attached SCSI disk [ 31.894258] sd 1:0:205:0: [sdgt] Attached SCSI disk [ 31.899945] sd 1:0:168:0: [sdfj] Attached SCSI disk [ 31.909223] sd 1:0:239:0: [sdib] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 31.937424] sd 1:0:238:0: [sdia] Write Protect is off [ 31.937425] sd 1:0:238:0: [sdia] Mode Sense: f7 00 10 08 [ 31.938245] sd 1:0:155:0: [sdew] Attached SCSI disk [ 31.983172] sd 1:0:216:0: [sdhe] Attached SCSI disk [ 32.045746] sd 1:0:238:0: [sdia] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 32.061830] sd 1:0:229:0: [sdhr] Attached SCSI disk [ 32.120234] sd 1:0:156:0: [sdex] Attached SCSI disk [ 32.170339] sd 1:0:178:0: [sdft] Attached SCSI disk [ 32.459480] sd 1:0:217:0: [sdhf] Attached SCSI disk [ 32.538296] sd 1:0:239:0: [sdib] Attached SCSI disk [ 32.625166] sd 1:0:177:0: [sdfs] Attached SCSI disk [ 32.879708] sd 1:0:238:0: [sdia] Attached SCSI disk [ 38.910007] sd 1:0:3:0: [sdc] 4096-byte physical blocks [ 38.915954] sd 1:0:3:0: [sdc] Write Protect is off [ 38.920753] sd 1:0:3:0: [sdc] Mode Sense: f7 00 10 08 [ 38.921199] sd 1:0:3:0: [sdc] Write cache: enabled, read cache: enabled, supports DPO and FUA [ 39.042872] sd 1:0:3:0: [sdc] Attached SCSI disk [ 39.131528] EXT4-fs (sda2): mounted filesystem with ordered data mode. Opts: (null) [ 39.438534] systemd-journald[365]: Received SIGTERM from PID 1 (systemd). [ 39.475859] SELinux: Disabled at runtime. [ 39.480574] SELinux: Unregistering netfilter hooks [ 39.521533] type=1404 audit(1575901064.012:2): selinux=0 auid=4294967295 ses=4294967295 [ 39.552759] ip_tables: (C) 2000-2006 Netfilter Core Team [ 39.558776] systemd[1]: Inserted module 'ip_tables' [ 39.666045] EXT4-fs (sda2): re-mounted. Opts: (null) [ 39.684129] systemd-journald[5778]: Received request to flush runtime journal from PID 1 [ 39.774789] piix4_smbus 0000:00:14.0: SMBus Host Controller at 0xb00, revision 0 [ 39.782438] piix4_smbus 0000:00:14.0: Using register 0x2e for SMBus port selection [ 39.807059] ACPI Error: No handler for Region [SYSI] (ffff8913a9e7da68) [IPMI] (20130517/evregion-162) [ 39.820149] ACPI Error: Region IPMI (ID=7) has no handler (20130517/exfldio-305) [ 39.829034] ACPI Error: Method parse/execution failed [\_SB_.PMI0._GHL] (Node ffff8913a9e7a5a0), AE_NOT_EXIST (20130517/psparse-536) [ 39.847923] ACPI Error: Method parse/execution failed [\_SB_.PMI0._PMC] (Node ffff8913a9e7a500), AE_NOT_EXIST (20130517/psparse-536) [ 39.866864] ACPI Exception: AE_NOT_EXIST, Evaluating _PMC (20130517/power_meter-753) [ 39.880616] ipmi message handler version 39.2 [ 39.880745] ccp 0000:02:00.2: 3 command queues available [ 39.880778] ccp 0000:02:00.2: irq 235 for MSI/MSI-X [ 39.880788] ccp 0000:02:00.2: irq 236 for MSI/MSI-X [ 39.880831] ccp 0000:02:00.2: Queue 2 can access 4 LSB regions [ 39.880833] ccp 0000:02:00.2: Queue 3 can access 4 LSB regions [ 39.880835] ccp 0000:02:00.2: Queue 4 can access 4 LSB regions [ 39.880836] ccp 0000:02:00.2: Queue 0 gets LSB 4 [ 39.880837] ccp 0000:02:00.2: Queue 1 gets LSB 5 [ 39.880838] ccp 0000:02:00.2: Queue 2 gets LSB 6 [ 39.881173] ccp 0000:02:00.2: enabled [ 39.881289] ccp 0000:03:00.1: 5 command queues available [ 39.881325] ccp 0000:03:00.1: irq 238 for MSI/MSI-X [ 39.881348] ccp 0000:03:00.1: Queue 0 can access 7 LSB regions [ 39.881349] ccp 0000:03:00.1: Queue 1 can access 7 LSB regions [ 39.881351] ccp 0000:03:00.1: Queue 2 can access 7 LSB regions [ 39.881353] ccp 0000:03:00.1: Queue 3 can access 7 LSB regions [ 39.881354] ccp 0000:03:00.1: Queue 4 can access 7 LSB regions [ 39.881356] ccp 0000:03:00.1: Queue 0 gets LSB 1 [ 39.881357] ccp 0000:03:00.1: Queue 1 gets LSB 2 [ 39.881358] ccp 0000:03:00.1: Queue 2 gets LSB 3 [ 39.881359] ccp 0000:03:00.1: Queue 3 gets LSB 4 [ 39.881360] ccp 0000:03:00.1: Queue 4 gets LSB 5 [ 39.881791] ccp 0000:03:00.1: enabled [ 39.881985] ccp 0000:41:00.2: 3 command queues available [ 39.882025] ccp 0000:41:00.2: irq 240 for MSI/MSI-X [ 39.882041] ccp 0000:41:00.2: irq 241 for MSI/MSI-X [ 39.882090] ccp 0000:41:00.2: Queue 2 can access 4 LSB regions [ 39.882092] ccp 0000:41:00.2: Queue 3 can access 4 LSB regions [ 39.882094] ccp 0000:41:00.2: Queue 4 can access 4 LSB regions [ 39.882096] ccp 0000:41:00.2: Queue 0 gets LSB 4 [ 39.882097] ccp 0000:41:00.2: Queue 1 gets LSB 5 [ 39.882099] ccp 0000:41:00.2: Queue 2 gets LSB 6 [ 39.882415] ccp 0000:41:00.2: enabled [ 39.882557] ccp 0000:42:00.1: 5 command queues available [ 39.882596] ccp 0000:42:00.1: irq 243 for MSI/MSI-X [ 39.882622] ccp 0000:42:00.1: Queue 0 can access 7 LSB regions [ 39.882624] ccp 0000:42:00.1: Queue 1 can access 7 LSB regions [ 39.882627] ccp 0000:42:00.1: Queue 2 can access 7 LSB regions [ 39.882629] ccp 0000:42:00.1: Queue 3 can access 7 LSB regions [ 39.882632] ccp 0000:42:00.1: Queue 4 can access 7 LSB regions [ 39.882633] ccp 0000:42:00.1: Queue 0 gets LSB 1 [ 39.882634] ccp 0000:42:00.1: Queue 1 gets LSB 2 [ 39.882636] ccp 0000:42:00.1: Queue 2 gets LSB 3 [ 39.882637] ccp 0000:42:00.1: Queue 3 gets LSB 4 [ 39.882638] ccp 0000:42:00.1: Queue 4 gets LSB 5 [ 39.883043] ccp 0000:42:00.1: enabled [ 39.883203] ccp 0000:85:00.2: 3 command queues available [ 39.883240] ccp 0000:85:00.2: irq 245 for MSI/MSI-X [ 39.883250] ccp 0000:85:00.2: irq 246 for MSI/MSI-X [ 39.883291] ccp 0000:85:00.2: Queue 2 can access 4 LSB regions [ 39.883293] ccp 0000:85:00.2: Queue 3 can access 4 LSB regions [ 39.883295] ccp 0000:85:00.2: Queue 4 can access 4 LSB regions [ 39.883297] ccp 0000:85:00.2: Queue 0 gets LSB 4 [ 39.883298] ccp 0000:85:00.2: Queue 1 gets LSB 5 [ 39.883299] ccp 0000:85:00.2: Queue 2 gets LSB 6 [ 39.883691] ccp 0000:85:00.2: enabled [ 39.883796] ccp 0000:86:00.1: 5 command queues available [ 39.883835] ccp 0000:86:00.1: irq 248 for MSI/MSI-X [ 39.883860] ccp 0000:86:00.1: Queue 0 can access 7 LSB regions [ 39.883862] ccp 0000:86:00.1: Queue 1 can access 7 LSB regions [ 39.883865] ccp 0000:86:00.1: Queue 2 can access 7 LSB regions [ 39.883867] ccp 0000:86:00.1: Queue 3 can access 7 LSB regions [ 39.883869] ccp 0000:86:00.1: Queue 4 can access 7 LSB regions [ 39.883871] ccp 0000:86:00.1: Queue 0 gets LSB 1 [ 39.883872] ccp 0000:86:00.1: Queue 1 gets LSB 2 [ 39.883874] ccp 0000:86:00.1: Queue 2 gets LSB 3 [ 39.883875] ccp 0000:86:00.1: Queue 3 gets LSB 4 [ 39.883876] ccp 0000:86:00.1: Queue 4 gets LSB 5 [ 39.884336] ccp 0000:86:00.1: enabled [ 39.884514] ccp 0000:c2:00.2: 3 command queues available [ 39.884555] ccp 0000:c2:00.2: irq 250 for MSI/MSI-X [ 39.884565] ccp 0000:c2:00.2: irq 251 for MSI/MSI-X [ 39.884609] ccp 0000:c2:00.2: Queue 2 can access 4 LSB regions [ 39.884611] ccp 0000:c2:00.2: Queue 3 can access 4 LSB regions [ 39.884613] ccp 0000:c2:00.2: Queue 4 can access 4 LSB regions [ 39.884614] ccp 0000:c2:00.2: Queue 0 gets LSB 4 [ 39.884616] ccp 0000:c2:00.2: Queue 1 gets LSB 5 [ 39.884617] ccp 0000:c2:00.2: Queue 2 gets LSB 6 [ 39.884927] ccp 0000:c2:00.2: enabled [ 39.885035] ccp 0000:c3:00.1: 5 command queues available [ 39.885067] ccp 0000:c3:00.1: irq 253 for MSI/MSI-X [ 39.885087] ccp 0000:c3:00.1: Queue 0 can access 7 LSB regions [ 39.885089] ccp 0000:c3:00.1: Queue 1 can access 7 LSB regions [ 39.885091] ccp 0000:c3:00.1: Queue 2 can access 7 LSB regions [ 39.885092] ccp 0000:c3:00.1: Queue 3 can access 7 LSB regions [ 39.885094] ccp 0000:c3:00.1: Queue 4 can access 7 LSB regions [ 39.885095] ccp 0000:c3:00.1: Queue 0 gets LSB 1 [ 39.885096] ccp 0000:c3:00.1: Queue 1 gets LSB 2 [ 39.885097] ccp 0000:c3:00.1: Queue 2 gets LSB 3 [ 39.885098] ccp 0000:c3:00.1: Queue 3 gets LSB 4 [ 39.885099] ccp 0000:c3:00.1: Queue 4 gets LSB 5 [ 39.885462] ccp 0000:c3:00.1: enabled [ 39.978579] sd 0:2:0:0: Attached scsi generic sg0 type 0 [ 39.978782] scsi 1:0:0:0: Attached scsi generic sg1 type 13 [ 39.979237] scsi 1:0:1:0: Attached scsi generic sg2 type 13 [ 39.979772] sd 1:0:2:0: Attached scsi generic sg3 type 0 [ 39.980475] sd 1:0:3:0: Attached scsi generic sg4 type 0 [ 39.981364] sd 1:0:4:0: Attached scsi generic sg5 type 0 [ 39.981781] sd 1:0:5:0: Attached scsi generic sg6 type 0 [ 39.982219] sd 1:0:6:0: Attached scsi generic sg7 type 0 [ 39.982748] sd 1:0:7:0: Attached scsi generic sg8 type 0 [ 39.983085] sd 1:0:8:0: Attached scsi generic sg9 type 0 [ 39.983451] sd 1:0:9:0: Attached scsi generic sg10 type 0 [ 39.983767] sd 1:0:10:0: Attached scsi generic sg11 type 0 [ 39.984304] sd 1:0:11:0: Attached scsi generic sg12 type 0 [ 39.984869] sd 1:0:12:0: Attached scsi generic sg13 type 0 [ 39.985272] sd 1:0:13:0: Attached scsi generic sg14 type 0 [ 39.985828] sd 1:0:14:0: Attached scsi generic sg15 type 0 [ 39.986197] sd 1:0:15:0: Attached scsi generic sg16 type 0 [ 39.986459] sd 1:0:16:0: Attached scsi generic sg17 type 0 [ 39.986698] sd 1:0:17:0: Attached scsi generic sg18 type 0 [ 39.987545] sd 1:0:18:0: Attached scsi generic sg19 type 0 [ 39.988001] sd 1:0:19:0: Attached scsi generic sg20 type 0 [ 39.988360] sd 1:0:20:0: Attached scsi generic sg21 type 0 [ 39.988620] sd 1:0:21:0: Attached scsi generic sg22 type 0 [ 39.988875] sd 1:0:22:0: Attached scsi generic sg23 type 0 [ 39.989676] sd 1:0:23:0: Attached scsi generic sg24 type 0 [ 39.990382] sd 1:0:24:0: Attached scsi generic sg25 type 0 [ 39.991164] sd 1:0:25:0: Attached scsi generic sg26 type 0 [ 39.991680] sd 1:0:26:0: Attached scsi generic sg27 type 0 [ 39.992131] sd 1:0:27:0: Attached scsi generic sg28 type 0 [ 39.992596] sd 1:0:28:0: Attached scsi generic sg29 type 0 [ 39.992887] sd 1:0:29:0: Attached scsi generic sg30 type 0 [ 39.993941] sd 1:0:30:0: Attached scsi generic sg31 type 0 [ 39.994451] sd 1:0:31:0: Attached scsi generic sg32 type 0 [ 39.995036] sd 1:0:32:0: Attached scsi generic sg33 type 0 [ 39.995597] sd 1:0:33:0: Attached scsi generic sg34 type 0 [ 39.995859] sd 1:0:34:0: Attached scsi generic sg35 type 0 [ 39.997725] sd 1:0:35:0: Attached scsi generic sg36 type 0 [ 39.999311] sd 1:0:36:0: Attached scsi generic sg37 type 0 [ 40.000600] sd 1:0:37:0: Attached scsi generic sg38 type 0 [ 40.000952] sd 1:0:38:0: Attached scsi generic sg39 type 0 [ 40.001234] sd 1:0:39:0: Attached scsi generic sg40 type 0 [ 40.001409] sd 1:0:40:0: Attached scsi generic sg41 type 0 [ 40.003122] sd 1:0:41:0: Attached scsi generic sg42 type 0 [ 40.003664] sd 1:0:42:0: Attached scsi generic sg43 type 0 [ 40.004289] sd 1:0:43:0: Attached scsi generic sg44 type 0 [ 40.008943] sd 1:0:44:0: Attached scsi generic sg45 type 0 [ 40.009505] sd 1:0:45:0: Attached scsi generic sg46 type 0 [ 40.009952] sd 1:0:46:0: Attached scsi generic sg47 type 0 [ 40.012087] sd 1:0:47:0: Attached scsi generic sg48 type 0 [ 40.013766] sd 1:0:48:0: Attached scsi generic sg49 type 0 [ 40.014150] sd 1:0:49:0: Attached scsi generic sg50 type 0 [ 40.016746] sd 1:0:50:0: Attached scsi generic sg51 type 0 [ 40.017210] sd 1:0:51:0: Attached scsi generic sg52 type 0 [ 40.017674] sd 1:0:52:0: Attached scsi generic sg53 type 0 [ 40.018220] sd 1:0:53:0: Attached scsi generic sg54 type 0 [ 40.018968] sd 1:0:54:0: Attached scsi generic sg55 type 0 [ 40.019521] sd 1:0:55:0: Attached scsi generic sg56 type 0 [ 40.021127] sd 1:0:56:0: Attached scsi generic sg57 type 0 [ 40.021541] sd 1:0:57:0: Attached scsi generic sg58 type 0 [ 40.023123] sd 1:0:58:0: Attached scsi generic sg59 type 0 [ 40.023550] sd 1:0:59:0: Attached scsi generic sg60 type 0 [ 40.025883] sd 1:0:60:0: Attached scsi generic sg61 type 0 [ 40.026256] sd 1:0:61:0: Attached scsi generic sg62 type 0 [ 40.026485] scsi 1:0:62:0: Attached scsi generic sg63 type 13 [ 40.026678] sd 1:0:63:0: Attached scsi generic sg64 type 0 [ 40.026809] sd 1:0:64:0: Attached scsi generic sg65 type 0 [ 40.026878] sd 1:0:65:0: Attached scsi generic sg66 type 0 [ 40.026919] sd 1:0:66:0: Attached scsi generic sg67 type 0 [ 40.026961] sd 1:0:67:0: Attached scsi generic sg68 type 0 [ 40.027083] sd 1:0:68:0: Attached scsi generic sg69 type 0 [ 40.027433] sd 1:0:69:0: Attached scsi generic sg70 type 0 [ 40.029594] sd 1:0:70:0: Attached scsi generic sg71 type 0 [ 40.029931] sd 1:0:71:0: Attached scsi generic sg72 type 0 [ 40.030284] sd 1:0:72:0: Attached scsi generic sg73 type 0 [ 40.031785] sd 1:0:73:0: Attached scsi generic sg74 type 0 [ 40.033119] sd 1:0:74:0: Attached scsi generic sg75 type 0 [ 40.035467] sd 1:0:75:0: Attached scsi generic sg76 type 0 [ 40.036846] sd 1:0:76:0: Attached scsi generic sg77 type 0 [ 40.037137] sd 1:0:77:0: Attached scsi generic sg78 type 0 [ 40.037345] sd 1:0:78:0: Attached scsi generic sg79 type 0 [ 40.037592] sd 1:0:79:0: Attached scsi generic sg80 type 0 [ 40.038648] sd 1:0:80:0: Attached scsi generic sg81 type 0 [ 40.039831] sd 1:0:81:0: Attached scsi generic sg82 type 0 [ 40.040675] sd 1:0:82:0: Attached scsi generic sg83 type 0 [ 40.041209] sd 1:0:83:0: Attached scsi generic sg84 type 0 [ 40.041610] sd 1:0:84:0: Attached scsi generic sg85 type 0 [ 40.044140] sd 1:0:85:0: Attached scsi generic sg86 type 0 [ 40.044524] sd 1:0:86:0: Attached scsi generic sg87 type 0 [ 40.046308] sd 1:0:87:0: Attached scsi generic sg88 type 0 [ 40.046660] sd 1:0:88:0: Attached scsi generic sg89 type 0 [ 40.047483] sd 1:0:89:0: Attached scsi generic sg90 type 0 [ 40.048100] sd 1:0:90:0: Attached scsi generic sg91 type 0 [ 40.048653] sd 1:0:91:0: Attached scsi generic sg92 type 0 [ 40.049326] sd 1:0:92:0: Attached scsi generic sg93 type 0 [ 40.050057] sd 1:0:93:0: Attached scsi generic sg94 type 0 [ 40.050610] sd 1:0:94:0: Attached scsi generic sg95 type 0 [ 40.051299] sd 1:0:95:0: Attached scsi generic sg96 type 0 [ 40.052011] sd 1:0:96:0: Attached scsi generic sg97 type 0 [ 40.052751] sd 1:0:97:0: Attached scsi generic sg98 type 0 [ 40.053355] sd 1:0:98:0: Attached scsi generic sg99 type 0 [ 40.054131] sd 1:0:99:0: Attached scsi generic sg100 type 0 [ 40.056040] sd 1:0:100:0: Attached scsi generic sg101 type 0 [ 40.058756] sd 1:0:101:0: Attached scsi generic sg102 type 0 [ 40.063950] sd 1:0:102:0: Attached scsi generic sg103 type 0 [ 40.064363] sd 1:0:103:0: Attached scsi generic sg104 type 0 [ 40.064663] sd 1:0:104:0: Attached scsi generic sg105 type 0 [ 40.066695] sd 1:0:105:0: Attached scsi generic sg106 type 0 [ 40.067289] sd 1:0:106:0: Attached scsi generic sg107 type 0 [ 40.070381] sd 1:0:107:0: Attached scsi generic sg108 type 0 [ 40.071065] sd 1:0:108:0: Attached scsi generic sg109 type 0 [ 40.071658] sd 1:0:109:0: Attached scsi generic sg110 type 0 [ 40.072264] sd 1:0:110:0: Attached scsi generic sg111 type 0 [ 41.016318] sd 1:0:111:0: Attached scsi generic sg112 type 0 [ 41.018549] ipmi device interface [ 41.030678] sd 1:0:112:0: Attached scsi generic sg113 type 0 [ 41.038979] sd 1:0:113:0: Attached scsi generic sg114 type 0 [ 41.041079] IPMI System Interface driver [ 41.041125] ipmi_si dmi-ipmi-si.0: ipmi_platform: probing via SMBIOS [ 41.041128] ipmi_si: SMBIOS: io 0xca8 regsize 1 spacing 4 irq 10 [ 41.041129] ipmi_si: Adding SMBIOS-specified kcs state machine [ 41.041177] ipmi_si IPI0001:00: ipmi_platform: probing via ACPI [ 41.041202] ipmi_si IPI0001:00: [io 0x0ca8] regsize 1 spacing 4 irq 10 [ 41.041204] ipmi_si dmi-ipmi-si.0: Removing SMBIOS-specified kcs state machine in favor of ACPI [ 41.041204] ipmi_si: Adding ACPI-specified kcs state machine [ 41.041564] ipmi_si: Trying ACPI-specified kcs state machine at i/o address 0xca8, slave address 0x20, irq 10 [ 41.066626] ipmi_si IPI0001:00: The BMC does not support setting the recv irq bit, compensating, but the BMC needs to be fixed. [ 41.074679] ipmi_si IPI0001:00: Using irq 10 [ 41.100010] ipmi_si IPI0001:00: Found new BMC (man_id: 0x0002a2, prod_id: 0x0100, dev_id: 0x20) [ 41.130277] sd 1:0:114:0: Attached scsi generic sg115 type 0 [ 41.134196] device-mapper: uevent: version 1.0.3 [ 41.138049] device-mapper: ioctl: 4.37.1-ioctl (2018-04-03) initialised: dm-devel@redhat.com [ 41.153114] sd 1:0:115:0: Attached scsi generic sg116 type 0 [ 41.159138] sd 1:0:116:0: Attached scsi generic sg117 type 0 [ 41.166824] sd 1:0:117:0: Attached scsi generic sg118 type 0 [ 41.173200] sd 1:0:118:0: Attached scsi generic sg119 type 0 [ 41.179262] sd 1:0:119:0: Attached scsi generic sg120 type 0 [ 41.184534] ipmi_si IPI0001:00: IPMI kcs interface initialized [ 41.197001] sd 1:0:120:0: Attached scsi generic sg121 type 0 [ 41.203602] sd 1:0:121:0: Attached scsi generic sg122 type 0 [ 41.210854] sd 1:0:122:0: Attached scsi generic sg123 type 0 [ 41.218846] scsi 1:0:123:0: Attached scsi generic sg124 type 13 [ 41.226222] sd 1:0:124:0: Attached scsi generic sg125 type 0 [ 41.232499] sd 1:0:125:0: Attached scsi generic sg126 type 0 [ 41.238880] sd 1:0:126:0: Attached scsi generic sg127 type 0 [ 41.247689] sd 1:0:127:0: Attached scsi generic sg128 type 0 [ 41.254079] sd 1:0:128:0: Attached scsi generic sg129 type 0 [ 41.260439] sd 1:0:129:0: Attached scsi generic sg130 type 0 [ 41.267986] sd 1:0:130:0: Attached scsi generic sg131 type 0 [ 41.274038] sd 1:0:131:0: Attached scsi generic sg132 type 0 [ 41.280543] sd 1:0:132:0: Attached scsi generic sg133 type 0 [ 41.286763] sd 1:0:133:0: Attached scsi generic sg134 type 0 [ 41.293040] sd 1:0:134:0: Attached scsi generic sg135 type 0 [ 41.301028] sd 1:0:135:0: Attached scsi generic sg136 type 0 [ 41.307132] sd 1:0:136:0: Attached scsi generic sg137 type 0 [ 41.314248] sd 1:0:137:0: Attached scsi generic sg138 type 0 [ 41.320309] sd 1:0:138:0: Attached scsi generic sg139 type 0 [ 41.326443] sd 1:0:139:0: Attached scsi generic sg140 type 0 [ 41.332765] sd 1:0:140:0: Attached scsi generic sg141 type 0 [ 41.338768] sd 1:0:141:0: Attached scsi generic sg142 type 0 [ 41.344830] sd 1:0:142:0: Attached scsi generic sg143 type 0 [ 41.351071] sd 1:0:143:0: Attached scsi generic sg144 type 0 [ 41.357222] sd 1:0:144:0: Attached scsi generic sg145 type 0 [ 41.363205] sd 1:0:145:0: Attached scsi generic sg146 type 0 [ 41.369107] sd 1:0:146:0: Attached scsi generic sg147 type 0 [ 41.375532] sd 1:0:147:0: Attached scsi generic sg148 type 0 [ 41.381258] sd 1:0:148:0: Attached scsi generic sg149 type 0 [ 41.387214] sd 1:0:149:0: Attached scsi generic sg150 type 0 [ 41.393498] sd 1:0:150:0: Attached scsi generic sg151 type 0 [ 41.399730] sd 1:0:151:0: Attached scsi generic sg152 type 0 [ 41.405958] sd 1:0:152:0: Attached scsi generic sg153 type 0 [ 41.412226] sd 1:0:153:0: Attached scsi generic sg154 type 0 [ 41.420016] sd 1:0:154:0: Attached scsi generic sg155 type 0 [ 41.426076] sd 1:0:155:0: Attached scsi generic sg156 type 0 [ 41.432318] sd 1:0:156:0: Attached scsi generic sg157 type 0 [ 41.439160] sd 1:0:157:0: Attached scsi generic sg158 type 0 [ 41.445400] sd 1:0:158:0: Attached scsi generic sg159 type 0 [ 41.451484] sd 1:0:159:0: Attached scsi generic sg160 type 0 [ 41.458614] sd 1:0:160:0: Attached scsi generic sg161 type 0 [ 41.466218] sd 1:0:161:0: Attached scsi generic sg162 type 0 [ 41.473285] sd 1:0:162:0: Attached scsi generic sg163 type 0 [ 41.479548] sd 1:0:163:0: Attached scsi generic sg164 type 0 [ 41.485777] sd 1:0:164:0: Attached scsi generic sg165 type 0 [ 41.492726] sd 1:0:165:0: Attached scsi generic sg166 type 0 [ 41.499066] sd 1:0:166:0: Attached scsi generic sg167 type 0 [ 41.505354] sd 1:0:167:0: Attached scsi generic sg168 type 0 [ 41.511522] sd 1:0:168:0: Attached scsi generic sg169 type 0 [ 41.517710] sd 1:0:169:0: Attached scsi generic sg170 type 0 [ 41.523860] sd 1:0:170:0: Attached scsi generic sg171 type 0 [ 41.530058] sd 1:0:171:0: Attached scsi generic sg172 type 0 [ 41.536124] sd 1:0:172:0: Attached scsi generic sg173 type 0 [ 41.542479] sd 1:0:173:0: Attached scsi generic sg174 type 0 [ 41.549159] sd 1:0:174:0: Attached scsi generic sg175 type 0 [ 41.555247] sd 1:0:175:0: Attached scsi generic sg176 type 0 [ 41.561114] sd 1:0:176:0: Attached scsi generic sg177 type 0 [ 41.567475] sd 1:0:177:0: Attached scsi generic sg178 type 0 [ 41.573641] sd 1:0:178:0: Attached scsi generic sg179 type 0 [ 41.580125] sd 1:0:179:0: Attached scsi generic sg180 type 0 [ 41.586444] sd 1:0:180:0: Attached scsi generic sg181 type 0 [ 41.592620] sd 1:0:181:0: Attached scsi generic sg182 type 0 [ 41.599135] sd 1:0:182:0: Attached scsi generic sg183 type 0 [ 41.605200] sd 1:0:183:0: Attached scsi generic sg184 type 0 [ 41.611259] scsi 1:0:184:0: Attached scsi generic sg185 type 13 [ 41.617786] sd 1:0:185:0: Attached scsi generic sg186 type 0 [ 41.625144] sd 1:0:186:0: Attached scsi generic sg187 type 0 [ 41.631113] sd 1:0:187:0: Attached scsi generic sg188 type 0 [ 41.637306] sd 1:0:188:0: Attached scsi generic sg189 type 0 [ 41.643163] sd 1:0:189:0: Attached scsi generic sg190 type 0 [ 41.649153] sd 1:0:190:0: Attached scsi generic sg191 type 0 [ 41.655794] sd 1:0:191:0: Attached scsi generic sg192 type 0 [ 41.661669] sd 1:0:192:0: Attached scsi generic sg193 type 0 [ 41.667527] sd 1:0:193:0: Attached scsi generic sg194 type 0 [ 41.673356] sd 1:0:194:0: Attached scsi generic sg195 type 0 [ 41.679092] sd 1:0:195:0: Attached scsi generic sg196 type 0 [ 41.684847] sd 1:0:196:0: Attached scsi generic sg197 type 0 [ 41.690598] sd 1:0:197:0: Attached scsi generic sg198 type 0 [ 41.696312] sd 1:0:198:0: Attached scsi generic sg199 type 0 [ 41.702041] sd 1:0:199:0: Attached scsi generic sg200 type 0 [ 41.707748] sd 1:0:200:0: Attached scsi generic sg201 type 0 [ 41.713454] sd 1:0:201:0: Attached scsi generic sg202 type 0 [ 41.719176] sd 1:0:202:0: Attached scsi generic sg203 type 0 [ 41.724884] sd 1:0:203:0: Attached scsi generic sg204 type 0 [ 41.730599] sd 1:0:204:0: Attached scsi generic sg205 type 0 [ 41.736299] sd 1:0:205:0: Attached scsi generic sg206 type 0 [ 41.742022] sd 1:0:206:0: Attached scsi generic sg207 type 0 [ 41.747725] sd 1:0:207:0: Attached scsi generic sg208 type 0 [ 41.753444] sd 1:0:208:0: Attached scsi generic sg209 type 0 [ 41.759144] sd 1:0:209:0: Attached scsi generic sg210 type 0 [ 41.764853] sd 1:0:210:0: Attached scsi generic sg211 type 0 [ 41.770549] sd 1:0:211:0: Attached scsi generic sg212 type 0 [ 41.776269] sd 1:0:212:0: Attached scsi generic sg213 type 0 [ 41.781975] sd 1:0:213:0: Attached scsi generic sg214 type 0 [ 41.787692] sd 1:0:214:0: Attached scsi generic sg215 type 0 [ 41.793400] sd 1:0:215:0: Attached scsi generic sg216 type 0 [ 41.799104] sd 1:0:216:0: Attached scsi generic sg217 type 0 [ 41.804818] sd 1:0:217:0: Attached scsi generic sg218 type 0 [ 41.810527] sd 1:0:218:0: Attached scsi generic sg219 type 0 [ 41.816225] sd 1:0:219:0: Attached scsi generic sg220 type 0 [ 41.821930] sd 1:0:220:0: Attached scsi generic sg221 type 0 [ 41.827631] sd 1:0:221:0: Attached scsi generic sg222 type 0 [ 41.833351] sd 1:0:222:0: Attached scsi generic sg223 type 0 [ 41.839059] sd 1:0:223:0: Attached scsi generic sg224 type 0 [ 41.844775] sd 1:0:224:0: Attached scsi generic sg225 type 0 [ 41.850480] sd 1:0:225:0: Attached scsi generic sg226 type 0 [ 41.856185] sd 1:0:226:0: Attached scsi generic sg227 type 0 [ 41.861897] sd 1:0:227:0: Attached scsi generic sg228 type 0 [ 41.867613] sd 1:0:228:0: Attached scsi generic sg229 type 0 [ 41.873314] sd 1:0:229:0: Attached scsi generic sg230 type 0 [ 41.879034] sd 1:0:230:0: Attached scsi generic sg231 type 0 [ 41.884735] sd 1:0:231:0: Attached scsi generic sg232 type 0 [ 41.890443] sd 1:0:232:0: Attached scsi generic sg233 type 0 [ 41.896143] sd 1:0:233:0: Attached scsi generic sg234 type 0 [ 41.901849] sd 1:0:234:0: Attached scsi generic sg235 type 0 [ 41.907551] sd 1:0:235:0: Attached scsi generic sg236 type 0 [ 41.913257] sd 1:0:236:0: Attached scsi generic sg237 type 0 [ 41.918971] sd 1:0:237:0: Attached scsi generic sg238 type 0 [ 41.924675] sd 1:0:238:0: Attached scsi generic sg239 type 0 [ 41.930382] sd 1:0:239:0: Attached scsi generic sg240 type 0 [ 41.936084] sd 1:0:240:0: Attached scsi generic sg241 type 0 [ 41.941798] sd 1:0:241:0: Attached scsi generic sg242 type 0 [ 41.947507] sd 1:0:242:0: Attached scsi generic sg243 type 0 [ 41.953204] sd 1:0:243:0: Attached scsi generic sg244 type 0 [ 41.958911] sd 1:0:244:0: Attached scsi generic sg245 type 0 [ 44.574939] device-mapper: multipath service-time: version 0.3.0 loaded [ 49.035616] ses 1:0:0:0: Attached Enclosure device [ 49.047420] ses 1:0:1:0: Attached Enclosure device [ 49.052325] ses 1:0:62:0: Attached Enclosure device [ 49.057324] ses 1:0:123:0: Attached Enclosure device [ 49.062417] ses 1:0:184:0: Attached Enclosure device [ 49.555777] input: PC Speaker as /devices/platform/pcspkr/input/input2 [ 49.668905] cryptd: max_cpu_qlen set to 1000 [ 49.707892] AVX2 version of gcm_enc/dec engaged. [ 49.712536] AES CTR mode by8 optimization enabled [ 49.720922] alg: No test for __gcm-aes-aesni (__driver-gcm-aes-aesni) [ 49.727517] alg: No test for __generic-gcm-aes-aesni (__driver-generic-gcm-aes-aesni) [ 49.819024] kvm: Nested Paging enabled [ 49.827753] MCE: In-kernel MCE decoding enabled. [ 49.836456] AMD64 EDAC driver v3.4.0 [ 49.840068] EDAC amd64: DRAM ECC enabled. [ 49.844102] EDAC amd64: F17h detected (node 0). [ 49.848703] EDAC MC: UMC0 chip selects: [ 49.848707] EDAC amd64: MC: 0: 0MB 1: 0MB [ 49.853424] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 49.858146] EDAC amd64: MC: 4: 0MB 5: 0MB [ 49.862864] EDAC amd64: MC: 6: 0MB 7: 0MB [ 49.867583] EDAC MC: UMC1 chip selects: [ 49.867586] EDAC amd64: MC: 0: 0MB 1: 0MB [ 49.872292] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 49.877011] EDAC amd64: MC: 4: 0MB 5: 0MB [ 49.881722] EDAC amd64: MC: 6: 0MB 7: 0MB [ 49.886438] EDAC amd64: using x8 syndromes. [ 49.890635] EDAC amd64: MCT channel count: 2 [ 49.901276] EDAC MC0: Giving out device to 'amd64_edac' 'F17h': DEV 0000:00:18.3 [ 49.908683] EDAC amd64: DRAM ECC enabled. [ 49.912700] EDAC amd64: F17h detected (node 1). [ 49.917284] EDAC MC: UMC0 chip selects: [ 49.917287] EDAC amd64: MC: 0: 0MB 1: 0MB [ 49.921999] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 49.926714] EDAC amd64: MC: 4: 0MB 5: 0MB [ 49.931431] EDAC amd64: MC: 6: 0MB 7: 0MB [ 49.936147] EDAC MC: UMC1 chip selects: [ 49.936151] EDAC amd64: MC: 0: 0MB 1: 0MB [ 49.940867] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 49.945581] EDAC amd64: MC: 4: 0MB 5: 0MB [ 49.950294] EDAC amd64: MC: 6: 0MB 7: 0MB [ 49.955007] EDAC amd64: using x8 syndromes. [ 49.959199] EDAC amd64: MCT channel count: 2 [ 49.970360] EDAC MC1: Giving out device to 'amd64_edac' 'F17h': DEV 0000:00:19.3 [ 49.978033] EDAC amd64: DRAM ECC enabled. [ 49.982056] EDAC amd64: F17h detected (node 2). [ 49.986649] EDAC MC: UMC0 chip selects: [ 49.986652] EDAC amd64: MC: 0: 0MB 1: 0MB [ 49.991446] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 49.996186] EDAC amd64: MC: 4: 0MB 5: 0MB [ 50.000908] EDAC amd64: MC: 6: 0MB 7: 0MB [ 50.005638] EDAC MC: UMC1 chip selects: [ 50.005640] EDAC amd64: MC: 0: 0MB 1: 0MB [ 50.010370] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 50.015080] EDAC amd64: MC: 4: 0MB 5: 0MB [ 50.019795] EDAC amd64: MC: 6: 0MB 7: 0MB [ 50.024508] EDAC amd64: using x8 syndromes. [ 50.028706] EDAC amd64: MCT channel count: 2 [ 50.036602] EDAC MC2: Giving out device to 'amd64_edac' 'F17h': DEV 0000:00:1a.3 [ 50.044009] EDAC amd64: DRAM ECC enabled. [ 50.048034] EDAC amd64: F17h detected (node 3). [ 50.052630] EDAC MC: UMC0 chip selects: [ 50.052633] EDAC amd64: MC: 0: 0MB 1: 0MB [ 50.057352] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 50.062065] EDAC amd64: MC: 4: 0MB 5: 0MB [ 50.066778] EDAC amd64: MC: 6: 0MB 7: 0MB [ 50.071495] EDAC MC: UMC1 chip selects: [ 50.071498] EDAC amd64: MC: 0: 0MB 1: 0MB [ 50.076207] EDAC amd64: MC: 2: 16383MB 3: 16383MB [ 50.080926] EDAC amd64: MC: 4: 0MB 5: 0MB [ 50.085638] EDAC amd64: MC: 6: 0MB 7: 0MB [ 50.090351] EDAC amd64: using x8 syndromes. [ 50.094550] EDAC amd64: MCT channel count: 2 [ 50.105189] EDAC MC3: Giving out device to 'amd64_edac' 'F17h': DEV 0000:00:1b.3 [ 50.112934] EDAC PCI0: Giving out device to module 'amd64_edac' controller 'EDAC PCI controller': DEV '0000:00:18.0' (POLLED) [ 50.843845] dcdbas dcdbas: Dell Systems Management Base Driver (version 5.6.0-3.3) [ 75.482153] Adding 4194300k swap on /dev/sda3. Priority:-2 extents:1 across:4194300k FS [ 75.523442] type=1305 audit(1575901100.013:3): audit_pid=49307 old=0 auid=4294967295 ses=4294967295 res=1 [ 75.545883] RPC: Registered named UNIX socket transport module. [ 75.552220] RPC: Registered udp transport module. [ 75.558315] RPC: Registered tcp transport module. [ 75.564406] RPC: Registered tcp NFSv4.1 backchannel transport module. [ 76.205183] mlx5_core 0000:01:00.0: slow_pci_heuristic:5575:(pid 49586): Max link speed = 100000, PCI BW = 126016 [ 76.215508] mlx5_core 0000:01:00.0: MLX5E: StrdRq(0) RqSz(1024) StrdSz(256) RxCqeCmprss(0) [ 76.223801] mlx5_core 0000:01:00.0: MLX5E: StrdRq(0) RqSz(1024) StrdSz(256) RxCqeCmprss(0) [ 76.684522] tg3 0000:81:00.0: irq 254 for MSI/MSI-X [ 76.684540] tg3 0000:81:00.0: irq 255 for MSI/MSI-X [ 76.684552] tg3 0000:81:00.0: irq 256 for MSI/MSI-X [ 76.684563] tg3 0000:81:00.0: irq 257 for MSI/MSI-X [ 76.684573] tg3 0000:81:00.0: irq 258 for MSI/MSI-X [ 76.810728] IPv6: ADDRCONF(NETDEV_UP): em1: link is not ready [ 80.343797] tg3 0000:81:00.0 em1: Link is up at 1000 Mbps, full duplex [ 80.350338] tg3 0000:81:00.0 em1: Flow control is on for TX and on for RX [ 80.357183] tg3 0000:81:00.0 em1: EEE is enabled [ 80.361816] IPv6: ADDRCONF(NETDEV_CHANGE): em1: link becomes ready [ 81.170910] IPv6: ADDRCONF(NETDEV_UP): ib0: link is not ready [ 81.444140] IPv6: ADDRCONF(NETDEV_CHANGE): ib0: link becomes ready [ 85.577696] FS-Cache: Loaded [ 85.608522] FS-Cache: Netfs 'nfs' registered for caching [ 85.618255] Key type dns_resolver registered [ 85.646616] NFS: Registering the id_resolver key type [ 85.651939] Key type id_resolver registered [ 85.657486] Key type id_legacy registered [ 191.261132] mpt3sas_cm0: log_info(0x31200205): originator(PL), code(0x20), sub_code(0x0205) [ 303.048704] LNet: HW NUMA nodes: 4, HW CPU cores: 48, npartitions: 4 [ 303.056201] alg: No test for adler32 (adler32-zlib) [ 303.856304] Lustre: Lustre: Build Version: 2.12.3_4_g142b4d4 [ 303.961403] LNet: 63766:0:(config.c:1627:lnet_inet_enumerate()) lnet: Ignoring interface em2: it's down [ 303.971183] LNet: Using FastReg for registration [ 303.988081] LNet: Added LNI 10.0.10.115@o2ib7 [8/256/0/180] [ 2112.595584] md: md6 stopped. [ 2112.606902] async_tx: api initialized (async) [ 2112.613260] xor: automatically using best checksumming function: [ 2112.628302] avx : 9596.000 MB/sec [ 2112.661307] raid6: sse2x1 gen() 6082 MB/s [ 2112.682304] raid6: sse2x2 gen() 11304 MB/s [ 2112.703304] raid6: sse2x4 gen() 12933 MB/s [ 2112.724303] raid6: avx2x1 gen() 14250 MB/s [ 2112.745309] raid6: avx2x2 gen() 18863 MB/s [ 2112.766304] raid6: avx2x4 gen() 18812 MB/s [ 2112.770579] raid6: using algorithm avx2x2 gen() (18863 MB/s) [ 2112.776241] raid6: using avx2x2 recovery algorithm [ 2112.797874] md/raid:md6: device dm-1 operational as raid disk 0 [ 2112.803814] md/raid:md6: device dm-34 operational as raid disk 9 [ 2112.809838] md/raid:md6: device dm-54 operational as raid disk 8 [ 2112.815854] md/raid:md6: device dm-16 operational as raid disk 7 [ 2112.821866] md/raid:md6: device dm-6 operational as raid disk 6 [ 2112.827795] md/raid:md6: device dm-42 operational as raid disk 5 [ 2112.833812] md/raid:md6: device dm-26 operational as raid disk 4 [ 2112.839823] md/raid:md6: device dm-17 operational as raid disk 3 [ 2112.845844] md/raid:md6: device dm-5 operational as raid disk 2 [ 2112.851766] md/raid:md6: device dm-43 operational as raid disk 1 [ 2112.858918] md/raid:md6: raid level 6 active with 10 out of 10 devices, algorithm 2 [ 2112.898947] md6: detected capacity change from 0 to 64011422924800 [ 2112.916672] md: md8 stopped. [ 2112.928560] md/raid:md8: device dm-44 operational as raid disk 0 [ 2112.934581] md/raid:md8: device dm-46 operational as raid disk 9 [ 2112.940592] md/raid:md8: device dm-23 operational as raid disk 8 [ 2112.946613] md/raid:md8: device dm-33 operational as raid disk 7 [ 2112.952647] md/raid:md8: device dm-45 operational as raid disk 6 [ 2112.958684] md/raid:md8: device dm-4 operational as raid disk 5 [ 2112.964624] md/raid:md8: device dm-8 operational as raid disk 4 [ 2112.970570] md/raid:md8: device dm-25 operational as raid disk 3 [ 2112.976615] md/raid:md8: device dm-21 operational as raid disk 2 [ 2112.982625] md/raid:md8: device dm-53 operational as raid disk 1 [ 2112.989407] md/raid:md8: raid level 6 active with 10 out of 10 devices, algorithm 2 [ 2113.025969] md8: detected capacity change from 0 to 64011422924800 [ 2113.055680] md: md4 stopped. [ 2113.067145] md/raid:md4: device dm-116 operational as raid disk 0 [ 2113.073272] md/raid:md4: device dm-100 operational as raid disk 9 [ 2113.079372] md/raid:md4: device dm-107 operational as raid disk 8 [ 2113.085471] md/raid:md4: device dm-94 operational as raid disk 7 [ 2113.091486] md/raid:md4: device dm-84 operational as raid disk 6 [ 2113.097499] md/raid:md4: device dm-76 operational as raid disk 5 [ 2113.103513] md/raid:md4: device dm-83 operational as raid disk 4 [ 2113.109531] md/raid:md4: device dm-66 operational as raid disk 3 [ 2113.115547] md/raid:md4: device dm-69 operational as raid disk 2 [ 2113.121564] md/raid:md4: device dm-117 operational as raid disk 1 [ 2113.128560] md/raid:md4: raid level 6 active with 10 out of 10 devices, algorithm 2 [ 2113.169475] md4: detected capacity change from 0 to 64011422924800 [ 2113.196823] md: md0 stopped. [ 2113.210826] md/raid:md0: device dm-60 operational as raid disk 0 [ 2113.216844] md/raid:md0: device dm-95 operational as raid disk 9 [ 2113.222857] md/raid:md0: device dm-91 operational as raid disk 8 [ 2113.228876] md/raid:md0: device dm-80 operational as raid disk 7 [ 2113.234891] md/raid:md0: device dm-88 operational as raid disk 6 [ 2113.240903] md/raid:md0: device dm-65 operational as raid disk 5 [ 2113.246915] md/raid:md0: device dm-64 operational as raid disk 4 [ 2113.252934] md/raid:md0: device dm-89 operational as raid disk 3 [ 2113.258949] md/raid:md0: device dm-74 operational as raid disk 2 [ 2113.264965] md/raid:md0: device dm-104 operational as raid disk 1 [ 2113.271903] md/raid:md0: raid level 6 active with 10 out of 10 devices, algorithm 2 [ 2113.308972] md0: detected capacity change from 0 to 64011422924800 [ 2113.341960] md: md10 stopped. [ 2113.354658] md/raid:md10: device dm-58 operational as raid disk 0 [ 2113.360759] md/raid:md10: device dm-18 operational as raid disk 9 [ 2113.366857] md/raid:md10: device dm-57 operational as raid disk 8 [ 2113.372959] md/raid:md10: device dm-15 operational as raid disk 7 [ 2113.379058] md/raid:md10: device dm-7 operational as raid disk 6 [ 2113.385073] md/raid:md10: device dm-27 operational as raid disk 5 [ 2113.391173] md/raid:md10: device dm-40 operational as raid disk 4 [ 2113.397273] md/raid:md10: device dm-28 operational as raid disk 3 [ 2113.403369] md/raid:md10: device dm-3 operational as raid disk 2 [ 2113.409384] md/raid:md10: device dm-56 operational as raid disk 1 [ 2113.416142] md/raid:md10: raid level 6 active with 10 out of 10 devices, algorithm 2 [ 2113.466385] md10: detected capacity change from 0 to 64011422924800 [ 2113.495745] md: md2 stopped. [ 2113.508936] md/raid:md2: device dm-119 operational as raid disk 0 [ 2113.515039] md/raid:md2: device dm-99 operational as raid disk 9 [ 2113.521056] md/raid:md2: device dm-114 operational as raid disk 8 [ 2113.527163] md/raid:md2: device dm-79 operational as raid disk 7 [ 2113.533173] md/raid:md2: device dm-86 operational as raid disk 6 [ 2113.539190] md/raid:md2: device dm-77 operational as raid disk 5 [ 2113.545206] md/raid:md2: device dm-73 operational as raid disk 4 [ 2113.551224] md/raid:md2: device dm-101 operational as raid disk 3 [ 2113.557329] md/raid:md2: device dm-105 operational as raid disk 2 [ 2113.563436] md/raid:md2: device dm-106 operational as raid disk 1 [ 2113.570345] md/raid:md2: raid level 6 active with 10 out of 10 devices, algorithm 2 [ 2113.592487] md2: detected capacity change from 0 to 64011422924800 [ 2113.817580] LDISKFS-fs (md6): file extents enabled, maximum tree depth=5 [ 2113.946595] LDISKFS-fs (md8): file extents enabled, maximum tree depth=5 [ 2114.147645] LDISKFS-fs (md6): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [ 2114.202582] LDISKFS-fs (md4): file extents enabled, maximum tree depth=5 [ 2114.328693] LDISKFS-fs (md0): file extents enabled, maximum tree depth=5 [ 2114.342963] LDISKFS-fs (md8): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [ 2114.534044] LDISKFS-fs (md4): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [ 2114.659742] LDISKFS-fs (md0): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [ 2114.683575] LDISKFS-fs (md10): file extents enabled, maximum tree depth=5 [ 2114.694623] LDISKFS-fs (md2): file extents enabled, maximum tree depth=5 [ 2114.774385] LustreError: 137-5: fir-OST0054_UUID: not available for connect from 10.8.27.17@o2ib6 (no target). If you are running an HA pair check that the target is mounted on the other server. [ 2114.791669] LustreError: Skipped 17 previous similar messages [ 2114.921315] Lustre: fir-OST005a: Not available for connect from 10.9.117.8@o2ib4 (not set up) [ 2114.929849] Lustre: Skipped 1 previous similar message [ 2115.042213] LDISKFS-fs (md10): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [ 2115.282969] LustreError: 137-5: fir-OST005c_UUID: not available for connect from 10.9.117.27@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [ 2115.300335] LustreError: Skipped 220 previous similar messages [ 2115.453675] Lustre: fir-OST005a: Not available for connect from 10.9.102.25@o2ib4 (not set up) [ 2115.462300] Lustre: Skipped 39 previous similar messages [ 2116.011575] LDISKFS-fs (md2): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [ 2116.086764] Lustre: fir-OST005a: Imperative Recovery enabled, recovery window shrunk from 300-900 down to 150-900 [ 2116.097595] Lustre: fir-OST005a: in recovery but waiting for the first client to connect [ 2116.097864] Lustre: fir-OST005a: Will be in recovery for at least 2:30, or until 1291 clients reconnect [ 2116.098139] Lustre: fir-OST005a: Connection restored to (at 10.8.24.17@o2ib6) [ 2116.301499] LustreError: 137-5: fir-OST0054_UUID: not available for connect from 10.9.105.45@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [ 2116.318868] LustreError: Skipped 415 previous similar messages [ 2116.607374] Lustre: fir-OST005c: Connection restored to 7f6916f2-c589-3558-df52-0f5294f8fa05 (at 10.9.102.19@o2ib4) [ 2116.617640] Lustre: fir-OST0058: Not available for connect from 10.9.102.60@o2ib4 (not set up) [ 2116.617642] Lustre: Skipped 52 previous similar messages [ 2116.631749] Lustre: Skipped 46 previous similar messages [ 2116.753977] Lustre: fir-OST0058: Imperative Recovery enabled, recovery window shrunk from 300-900 down to 150-900 [ 2116.764243] Lustre: Skipped 1 previous similar message [ 2116.769973] Lustre: fir-OST0058: in recovery but waiting for the first client to connect [ 2116.772765] Lustre: fir-OST0058: Will be in recovery for at least 2:30, or until 1291 clients reconnect [ 2116.772766] Lustre: Skipped 1 previous similar message [ 2116.792617] Lustre: Skipped 1 previous similar message [ 2117.610633] Lustre: fir-OST005e: Connection restored to (at 10.8.24.1@o2ib6) [ 2117.617780] Lustre: Skipped 209 previous similar messages [ 2117.757906] Lustre: fir-OST0056: Imperative Recovery enabled, recovery window shrunk from 300-900 down to 150-900 [ 2117.768170] Lustre: Skipped 2 previous similar messages [ 2117.773910] Lustre: fir-OST0056: in recovery but waiting for the first client to connect [ 2117.782003] Lustre: Skipped 2 previous similar messages [ 2117.788887] Lustre: fir-OST0056: Will be in recovery for at least 2:30, or until 1291 clients reconnect [ 2117.798284] Lustre: Skipped 2 previous similar messages [ 2119.614365] Lustre: fir-OST005e: Connection restored to f5313c6f-3647-048f-259c-ceddb6cbc1d1 (at 10.9.103.43@o2ib4) [ 2119.614367] Lustre: fir-OST005c: Connection restored to f5313c6f-3647-048f-259c-ceddb6cbc1d1 (at 10.9.103.43@o2ib4) [ 2119.614369] Lustre: fir-OST0056: Connection restored to f5313c6f-3647-048f-259c-ceddb6cbc1d1 (at 10.9.103.43@o2ib4) [ 2119.614370] Lustre: fir-OST005a: Connection restored to f5313c6f-3647-048f-259c-ceddb6cbc1d1 (at 10.9.103.43@o2ib4) [ 2119.614371] Lustre: fir-OST0058: Connection restored to f5313c6f-3647-048f-259c-ceddb6cbc1d1 (at 10.9.103.43@o2ib4) [ 2119.614373] Lustre: Skipped 784 previous similar messages [ 2119.614374] Lustre: Skipped 784 previous similar messages [ 2119.614375] Lustre: Skipped 784 previous similar messages [ 2119.614379] Lustre: Skipped 784 previous similar messages [ 2119.688119] Lustre: Skipped 17 previous similar messages [ 2122.534723] Lustre: fir-OST005e: Denying connection for new client 5f11dd29-1211-44a2-2612-f8309cf085b3 (at 10.8.21.18@o2ib6), waiting for 1290 known clients (528 recovered, 17 in progress, and 0 evicted) to recover in 2:24 [ 2122.554537] Lustre: Skipped 2 previous similar messages [ 2127.578715] Lustre: fir-OST0058: Recovery over after 0:11, of 1291 clients 1291 recovered and 0 were evicted. [ 2127.604937] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11587727 to 0x1800000401:11587777 [ 2127.636081] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3010908 to 0x1a80000402:3010945 [ 2127.646210] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11643012 to 0x1980000401:11643041 [ 2127.651249] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3015076 to 0x1900000402:3015105 [ 2127.666810] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11601326 to 0x1a80000400:11601409 [ 2127.676743] Lustre: fir-OST005e: deleting orphan objects from 0x0:27453436 to 0x0:27453473 [ 2127.677004] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000400:11457723 to 0x1a00000400:11457761 [ 2127.700276] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:2978880 to 0x1a00000401:2978945 [ 2127.702764] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:792278 to 0x1980000400:792321 [ 2127.703020] Lustre: fir-OST005a: deleting orphan objects from 0x0:27548344 to 0x0:27548385 [ 2127.704825] Lustre: fir-OST0056: deleting orphan objects from 0x1880000402:3009048 to 0x1880000402:3009121 [ 2127.707244] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:788482 to 0x1a80000401:788513 [ 2127.712646] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3018544 to 0x1980000402:3018625 [ 2127.737264] Lustre: fir-OST0056: deleting orphan objects from 0x1880000400:11579442 to 0x1880000400:11579521 [ 2127.757860] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:789629 to 0x1900000400:789665 [ 2127.758850] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11597256 to 0x1900000401:11597281 [ 2127.782821] Lustre: fir-OST0058: deleting orphan objects from 0x0:27492955 to 0x0:27492993 [ 2127.784564] Lustre: fir-OST0056: deleting orphan objects from 0x1880000401:789050 to 0x1880000401:789089 [ 2127.801547] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3000247 to 0x1800000400:3000289 [ 2127.814050] Lustre: fir-OST005c: deleting orphan objects from 0x0:27178781 to 0x0:27178817 [ 2127.831910] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:777103 to 0x1a00000402:777121 [ 2127.849043] Lustre: fir-OST0054: deleting orphan objects from 0x0:27444185 to 0x0:27444225 [ 2127.852518] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:786910 to 0x1800000402:786945 [ 2127.914689] Lustre: fir-OST0056: deleting orphan objects from 0x0:27466201 to 0x0:27466241 [ 2147.537429] Lustre: fir-OST005e: Connection restored to 5f11dd29-1211-44a2-2612-f8309cf085b3 (at 10.8.21.18@o2ib6) [ 2147.547785] Lustre: Skipped 6485 previous similar messages [ 9376.001426] Lustre: fir-OST0054: haven't heard from client 798dc93c-11ba-328e-acec-b07846966ea5 (at 10.8.0.67@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892252ab7800, cur 1575910401 expire 1575910251 last 1575910174 [12736.930259] Lustre: fir-OST0054: Connection restored to 798dc93c-11ba-328e-acec-b07846966ea5 (at 10.8.0.67@o2ib6) [12736.940519] Lustre: Skipped 5 previous similar messages [12890.468581] Lustre: fir-OST0054: Connection restored to 9a70df35-6de0-4 (at 10.8.19.7@o2ib6) [12890.477027] Lustre: Skipped 5 previous similar messages [12898.060656] Lustre: fir-OST0058: haven't heard from client 1d08460b-716a-03a1-30aa-d26bf61d87fe (at 10.8.0.65@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8922804ed000, cur 1575913923 expire 1575913773 last 1575913696 [12898.082275] Lustre: Skipped 5 previous similar messages [13479.118042] Lustre: fir-OST0054: Connection restored to 8171b0fd-9423-4 (at 10.9.109.27@o2ib4) [13479.126662] Lustre: Skipped 4 previous similar messages [15086.261148] Lustre: fir-OST0054: Connection restored to 1d08460b-716a-03a1-30aa-d26bf61d87fe (at 10.8.0.65@o2ib6) [15086.271416] Lustre: Skipped 5 previous similar messages [19374.041469] LustreError: 67930:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005e: cli f9f503f0-6ff6-698f-9a8d-14bd128a6d42 claims 16801792 GRANT, real grant 16752640 [22695.260464] Lustre: fir-OST0054: haven't heard from client 83281a6e-8cdd-af0c-d930-afb3d26c7eba (at 10.8.23.14@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892252b8a000, cur 1575923720 expire 1575923570 last 1575923493 [22695.282202] Lustre: Skipped 5 previous similar messages [22710.880614] Lustre: fir-OST0054: Connection restored to 83281a6e-8cdd-af0c-d930-afb3d26c7eba (at 10.8.23.14@o2ib6) [22710.890963] Lustre: Skipped 5 previous similar messages [23164.274873] Lustre: fir-OST0056: haven't heard from client 50bb3322-2186-2682-e22f-d2e40908bd0d (at 10.8.23.14@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff890d9fd03800, cur 1575924189 expire 1575924039 last 1575923962 [23164.296576] Lustre: Skipped 5 previous similar messages [23187.511794] Lustre: fir-OST0054: Connection restored to 83281a6e-8cdd-af0c-d930-afb3d26c7eba (at 10.8.23.14@o2ib6) [23187.522142] Lustre: Skipped 5 previous similar messages [23791.966216] Lustre: fir-OST0054: Connection restored to 83281a6e-8cdd-af0c-d930-afb3d26c7eba (at 10.8.23.14@o2ib6) [23791.976564] Lustre: Skipped 5 previous similar messages [23816.286759] Lustre: fir-OST0054: haven't heard from client 75aebac8-89c1-69e4-9dfa-1727b2d47fae (at 10.8.23.14@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff890bba3a7c00, cur 1575924841 expire 1575924691 last 1575924614 [23816.308467] Lustre: Skipped 5 previous similar messages [24720.298181] Lustre: fir-OST0058: haven't heard from client 2867fefc-6124-ed47-3fcc-acf48d637860 (at 10.8.18.35@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892252ce6000, cur 1575925745 expire 1575925595 last 1575925518 [24720.319902] Lustre: Skipped 5 previous similar messages [37894.763171] perf: interrupt took too long (2503 > 2500), lowering kernel.perf_event_max_sample_rate to 79000 [55071.320836] Lustre: 67791:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956084/real 1575956084] req@ffff88eded431680 x1652452367426464/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956095 ref 1 fl Rpc:X/0/ffffffff rc 0/-1 [55071.320838] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956084/real 1575956084] req@ffff88e821f9c800 x1652452367426448/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956095 ref 1 fl Rpc:X/0/ffffffff rc 0/-1 [55082.321056] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956095/real 1575956095] req@ffff88e821f9c800 x1652452367426448/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956106 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [55093.348286] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956106/real 1575956106] req@ffff88e821f9c800 x1652452367426448/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956117 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [55093.375649] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 2 previous similar messages [55104.375502] Lustre: 67791:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956117/real 1575956117] req@ffff88eded431680 x1652452367426464/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956128 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [55104.402874] Lustre: 67791:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 1 previous similar message [55115.385726] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956128/real 1575956128] req@ffff88e821f9c800 x1652452367426448/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956139 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [55115.413060] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 1 previous similar message [55126.412953] Lustre: 67791:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956139/real 1575956139] req@ffff88eded431680 x1652452367426464/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956150 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [55126.440294] Lustre: 67791:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 1 previous similar message [55148.424402] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956161/real 1575956161] req@ffff88e821f9c800 x1652452367426448/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956172 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [55148.451748] Lustre: 67848:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 2 previous similar messages [55181.452062] Lustre: 67791:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575956194/real 1575956194] req@ffff88eded431680 x1652452367426464/t0(0) o106->fir-OST0054@10.9.106.54@o2ib4:15/16 lens 296/280 e 0 to 1 dl 1575956205 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [55181.479405] Lustre: 67791:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 5 previous similar messages [55201.974017] Lustre: fir-OST0056: haven't heard from client 1316ac10-17f9-20d9-6734-8b32fc11fac2 (at 10.9.106.54@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892251566800, cur 1575956226 expire 1575956076 last 1575955999 [55201.995833] Lustre: Skipped 5 previous similar messages [55202.001208] LustreError: 67848:0:(ldlm_lockd.c:681:ldlm_handle_ast_error()) ### client (nid 10.9.106.54@o2ib4) failed to reply to glimpse AST (req@ffff88e821f9c800 x1652452367426448 status 0 rc -5), evict it ns: filter-fir-OST0054_UUID lock: ffff88fef56bc380/0x7066c9c1891f377c lrc: 3/0,0 mode: PW/PW res: [0x1800000401:0xa16b7b:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 67108864->68719476735) flags: 0x40000080000000 nid: 10.9.106.54@o2ib4 remote: 0xa907bc36138dc384 expref: 7 pid: 67663 timeout: 0 lvb_type: 0 [55202.001216] LustreError: 138-a: fir-OST0054: A client on nid 10.9.106.54@o2ib4 was evicted due to a lock glimpse callback time out: rc -5 [55202.001246] LustreError: 66071:0:(ldlm_lockd.c:256:expired_lock_main()) ### lock callback timer expired after 1575956226s: evicting client at 10.9.106.54@o2ib4 ns: filter-fir-OST0054_UUID lock: ffff88f5e8f93600/0x7066c9c1891f78a5 lrc: 3/0,0 mode: PW/PW res: [0x1800000401:0xb38953:0x0].0x0 rrc: 2 type: EXT [0->18446744073709551615] (req 34359738368->18446744073709551615) flags: 0x40000000000000 nid: 10.9.106.54@o2ib4 remote: 0xa907bc36138dc513 expref: 5 pid: 67900 timeout: 0 lvb_type: 0 [55202.102654] LustreError: 67848:0:(ldlm_lockd.c:681:ldlm_handle_ast_error()) Skipped 1 previous similar message [55211.915449] Lustre: fir-OST005e: haven't heard from client 1316ac10-17f9-20d9-6734-8b32fc11fac2 (at 10.9.106.54@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892250a28800, cur 1575956236 expire 1575956086 last 1575956009 [55211.937252] Lustre: Skipped 3 previous similar messages [55216.926307] Lustre: fir-OST005a: haven't heard from client 1316ac10-17f9-20d9-6734-8b32fc11fac2 (at 10.9.106.54@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff88f2aecda800, cur 1575956241 expire 1575956091 last 1575956014 [55357.690391] Lustre: fir-OST0054: Connection restored to 1316ac10-17f9-20d9-6734-8b32fc11fac2 (at 10.9.106.54@o2ib4) [55357.700832] Lustre: Skipped 5 previous similar messages [73866.289223] Lustre: fir-OST005a: haven't heard from client 0af2ee10-72ea-97a8-65e7-44544fdbc0b9 (at 10.9.108.39@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff88f2aec88800, cur 1575974890 expire 1575974740 last 1575974663 [82860.494528] Lustre: fir-OST0058: Client 67942120-f44f-42ea-60c3-96f62fccea78 (at 10.9.109.39@o2ib4) reconnecting [82860.504721] Lustre: fir-OST0058: Connection restored to 67942120-f44f-42ea-60c3-96f62fccea78 (at 10.9.109.39@o2ib4) [82868.720930] Lustre: fir-OST0056: Client c9911b4c-e55e-f4aa-416a-b652019239f7 (at 10.9.117.40@o2ib4) reconnecting [82868.731127] Lustre: fir-OST0056: Connection restored to c9911b4c-e55e-f4aa-416a-b652019239f7 (at 10.9.117.40@o2ib4) [82872.652414] Lustre: fir-OST0058: Client e72387a4-2bab-d686-07ea-8e45160d2e1d (at 10.9.117.23@o2ib4) reconnecting [82872.662616] Lustre: fir-OST0058: Connection restored to e72387a4-2bab-d686-07ea-8e45160d2e1d (at 10.9.117.23@o2ib4) [82874.824840] Lustre: fir-OST005a: Client d873db05-7c48-65ad-d97d-599447705616 (at 10.9.106.5@o2ib4) reconnecting [82874.834931] Lustre: Skipped 5 previous similar messages [82874.840194] Lustre: fir-OST005a: Connection restored to d873db05-7c48-65ad-d97d-599447705616 (at 10.9.106.5@o2ib4) [82874.850558] Lustre: Skipped 5 previous similar messages [82878.869944] Lustre: fir-OST005a: Client acd3ae51-2d23-93df-d1b3-33ff6a3945ef (at 10.9.114.5@o2ib4) reconnecting [82878.880035] Lustre: Skipped 64 previous similar messages [82878.885392] Lustre: fir-OST005a: Connection restored to acd3ae51-2d23-93df-d1b3-33ff6a3945ef (at 10.9.114.5@o2ib4) [82878.895765] Lustre: Skipped 64 previous similar messages [82881.458358] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.209@o2ib7 added to recovery queue. Health = 900 [82881.471341] LustreError: 67718:0:(ldlm_lib.c:3256:target_bulk_io()) @@@ Reconnect on bulk WRITE req@ffff8922ca374050 x1649309827682656/t0(0) o4->c93954af-761b-f1eb-f651-9881322a7a72@10.9.108.51@o2ib4:698/0 lens 488/448 e 1 to 0 dl 1575983923 ref 1 fl Interpret:/0/0 rc 0/0 [82881.496115] Lustre: fir-OST0058: Bulk IO write error with c93954af-761b-f1eb-f651-9881322a7a72 (at 10.9.108.51@o2ib4), client will retry: rc = -110 [82885.471409] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.210@o2ib7 added to recovery queue. Health = 900 [82885.484361] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) Skipped 1 previous similar message [82885.495092] LustreError: 67822:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 14680064(16777216) req@ffff88f2fbc1e050 x1652122491273216/t0(0) o4->eb7e3af2-d117-4@10.9.101.1@o2ib4:702/0 lens 488/448 e 1 to 0 dl 1575983927 ref 1 fl Interpret:/0/0 rc 0/0 [82885.519616] Lustre: fir-OST0054: Bulk IO write error with eb7e3af2-d117-4 (at 10.9.101.1@o2ib4), client will retry: rc = -110 [82886.919156] Lustre: fir-OST0054: Client 32524583-8f43-9f54-827c-15b3a46fedcc (at 10.8.30.10@o2ib6) reconnecting [82886.919272] Lustre: fir-OST005c: Connection restored to d69fcdf7-730b-8cda-70aa-8ec0410da18f (at 10.8.29.1@o2ib6) [82886.919275] Lustre: Skipped 104 previous similar messages [82886.944900] Lustre: Skipped 105 previous similar messages [82895.494630] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.210@o2ib7 added to recovery queue. Health = 900 [82895.507604] LustreError: 67989:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 9437184(12582912) req@ffff89224e0e5050 x1649309827682656/t0(0) o4->c93954af-761b-f1eb-f651-9881322a7a72@10.9.108.51@o2ib4:710/0 lens 488/448 e 0 to 0 dl 1575983935 ref 1 fl Interpret:/2/0 rc 0/0 [82895.533682] Lustre: fir-OST0058: Bulk IO write error with c93954af-761b-f1eb-f651-9881322a7a72 (at 10.9.108.51@o2ib4), client will retry: rc = -110 [82900.507721] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.211@o2ib7 added to recovery queue. Health = 900 [82900.520690] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) Skipped 2 previous similar messages [82900.531503] LustreError: 67722:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 13631488(16777216) req@ffff8902f6709050 x1652122491471360/t0(0) o4->eb7e3af2-d117-4@10.9.101.1@o2ib4:718/0 lens 488/448 e 1 to 0 dl 1575983943 ref 1 fl Interpret:/0/0 rc 0/0 [82900.556080] Lustre: fir-OST0054: Bulk IO write error with eb7e3af2-d117-4 (at 10.9.101.1@o2ib4), client will retry: rc = -110 [82901.259219] LustreError: 137-5: fir-OST0059_UUID: not available for connect from 10.9.116.11@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [82901.276627] LustreError: Skipped 204 previous similar messages [82901.347731] Lustre: 67873:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575983917/real 1575983917] req@ffff8912f703f080 x1652452421351632/t0(0) o105->fir-OST0054@10.9.101.29@o2ib4:15/16 lens 360/224 e 0 to 1 dl 1575983924 ref 1 fl Rpc:X/0/ffffffff rc 0/-1 [82901.375070] Lustre: 67873:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 2 previous similar messages [82902.982452] Lustre: fir-OST005e: Client 91d6d3f9-54bc-fd90-b16e-873e3af76326 (at 10.9.106.44@o2ib4) reconnecting [82902.983396] Lustre: fir-OST0054: Connection restored to 91d6d3f9-54bc-fd90-b16e-873e3af76326 (at 10.9.106.44@o2ib4) [82902.983398] Lustre: Skipped 187 previous similar messages [82903.008483] Lustre: Skipped 187 previous similar messages [82906.576837] LustreError: 67929:0:(ldlm_lib.c:3256:target_bulk_io()) @@@ Reconnect on bulk WRITE req@ffff8907c0ee1850 x1649049588956192/t0(0) o4->20463417-fb32-2f92-5aae-59bfa8e287e3@10.9.101.29@o2ib4:725/0 lens 488/448 e 1 to 0 dl 1575983950 ref 1 fl Interpret:/0/0 rc 0/0 [82906.601645] Lustre: fir-OST0054: Bulk IO write error with 20463417-fb32-2f92-5aae-59bfa8e287e3 (at 10.9.101.29@o2ib4), client will retry: rc = -110 [82908.822798] LustreError: 137-5: fir-OST0059_UUID: not available for connect from 10.8.31.2@o2ib6 (no target). If you are running an HA pair check that the target is mounted on the other server. [82908.839992] LustreError: Skipped 1 previous similar message [82910.607090] LustreError: 137-5: fir-OST0059_UUID: not available for connect from 10.9.101.28@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [82910.624484] LustreError: Skipped 2 previous similar messages [82915.531020] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.211@o2ib7 added to recovery queue. Health = 900 [82915.531026] LustreError: 67718:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 0(205619) req@ffff89224e0e3850 x1652123335105408/t0(0) o4->4cec062a-e1ff-4@10.9.101.3@o2ib4:732/0 lens 488/448 e 1 to 0 dl 1575983957 ref 1 fl Interpret:/0/0 rc 0/0 [82915.531049] Lustre: fir-OST0054: Bulk IO write error with 4cec062a-e1ff-4 (at 10.9.101.3@o2ib4), client will retry: rc = -110 [82915.578309] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) Skipped 1 previous similar message [82916.566038] LustreError: 67989:0:(ldlm_lib.c:3256:target_bulk_io()) @@@ Reconnect on bulk WRITE req@ffff892251cea050 x1650930158014448/t0(0) o4->fe46e801-2d86-9439-0b24-b78514ed5486@10.9.109.8@o2ib4:739/0 lens 488/448 e 1 to 0 dl 1575983964 ref 1 fl Interpret:/0/0 rc 0/0 [82921.314277] LustreError: 137-5: fir-OST005f_UUID: not available for connect from 10.8.17.19@o2ib6 (no target). If you are running an HA pair check that the target is mounted on the other server. [82921.331565] LustreError: Skipped 2 previous similar messages [82926.121728] LustreError: 137-5: fir-OST0059_UUID: not available for connect from 10.9.105.11@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [82926.139115] LustreError: Skipped 11 previous similar messages [82933.838387] LustreError: 67722:0:(ldlm_lib.c:3256:target_bulk_io()) @@@ Reconnect on bulk WRITE req@ffff88f3a9515050 x1649292592204048/t0(0) o4->d269b7b3-c7ee-1895-0bbf-8293c505cff2@10.9.110.44@o2ib4:1/0 lens 488/448 e 1 to 0 dl 1575983981 ref 1 fl Interpret:/0/0 rc 0/0 [82933.862916] Lustre: fir-OST0058: Bulk IO write error with d269b7b3-c7ee-1895-0bbf-8293c505cff2 (at 10.9.110.44@o2ib4), client will retry: rc = -110 [82933.876140] Lustre: Skipped 1 previous similar message [82934.748407] LustreError: 137-5: fir-OST0055_UUID: not available for connect from 10.9.107.48@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [82934.765772] LustreError: Skipped 14 previous similar messages [82934.985866] Lustre: fir-OST005c: Client f5acbb80-2671-675b-21f5-81352b190567 (at 10.9.110.49@o2ib4) reconnecting [82934.996050] Lustre: Skipped 1086 previous similar messages [82935.001544] Lustre: fir-OST005c: Connection restored to (at 10.9.110.49@o2ib4) [82935.008881] Lustre: Skipped 1086 previous similar messages [82950.477728] Lustre: 89234:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575983967/real 1575983967] req@ffff88fddaa5d580 x1652452421353168/t0(0) o105->fir-OST005a@10.9.110.16@o2ib4:15/16 lens 360/224 e 0 to 1 dl 1575983974 ref 1 fl Rpc:X/0/ffffffff rc 0/-1 [82952.145857] LustreError: 137-5: fir-OST0057_UUID: not available for connect from 10.8.23.19@o2ib6 (no target). If you are running an HA pair check that the target is mounted on the other server. [82952.163151] LustreError: Skipped 40 previous similar messages [82955.589834] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.211@o2ib7 added to recovery queue. Health = 900 [82955.602809] LustreError: 67980:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 7864320(16252928) req@ffff88f2b024d850 x1649292592204048/t0(0) o4->d269b7b3-c7ee-1895-0bbf-8293c505cff2@10.9.110.44@o2ib4:15/0 lens 488/448 e 0 to 0 dl 1575983995 ref 1 fl Interpret:/2/0 rc 0/0 [82984.912818] LustreError: 137-5: fir-OST0055_UUID: not available for connect from 10.8.27.15@o2ib6 (no target). If you are running an HA pair check that the target is mounted on the other server. [82984.930100] LustreError: Skipped 77 previous similar messages [82999.030286] Lustre: fir-OST005c: Client 9622ebd9-08dd-84f5-187b-b07758b1dd55 (at 10.9.103.48@o2ib4) reconnecting [82999.030363] Lustre: fir-OST005a: Connection restored to 7fc3ef05-0495-25a3-7cdb-c6f981dcc2b9 (at 10.9.102.68@o2ib4) [82999.030365] Lustre: Skipped 1252 previous similar messages [82999.056380] Lustre: Skipped 1255 previous similar messages [83014.761030] Lustre: 67693:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575984031/real 1575984031] req@ffff8922f461d100 x1652452421355920/t0(0) o104->fir-OST0054@10.9.108.37@o2ib4:15/16 lens 296/224 e 0 to 1 dl 1575984038 ref 1 fl Rpc:X/0/ffffffff rc 0/-1 [83017.628089] LustreError: 68013:0:(ldlm_lib.c:3256:target_bulk_io()) @@@ Reconnect on bulk WRITE req@ffff88f2ab53a850 x1649049588956192/t0(0) o4->20463417-fb32-2f92-5aae-59bfa8e287e3@10.9.101.29@o2ib4:91/0 lens 488/448 e 0 to 0 dl 1575984071 ref 1 fl Interpret:/2/0 rc 0/0 [83017.652133] LustreError: 68013:0:(ldlm_lib.c:3256:target_bulk_io()) Skipped 1 previous similar message [83017.661969] Lustre: fir-OST0054: Bulk IO write error with 20463417-fb32-2f92-5aae-59bfa8e287e3 (at 10.9.101.29@o2ib4), client will retry: rc = -110 [83017.675188] Lustre: Skipped 2 previous similar messages [83028.770318] LustreError: 67718:0:(ldlm_lib.c:3256:target_bulk_io()) @@@ Reconnect on bulk WRITE req@ffff89224f960050 x1649292592204048/t0(0) o4->d269b7b3-c7ee-1895-0bbf-8293c505cff2@10.9.110.44@o2ib4:102/0 lens 488/448 e 0 to 0 dl 1575984082 ref 1 fl Interpret:/2/0 rc 0/0 [83048.937549] LustreError: 137-5: fir-OST005d_UUID: not available for connect from 10.9.104.8@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [83048.954829] LustreError: Skipped 6990 previous similar messages [83067.484865] Lustre: fir-OST005a: haven't heard from client 2bacacc9-821b-1013-eb06-dd3bdbe6bf12 (at 10.9.104.8@o2ib4) in 162 seconds. I think it's dead, and I am evicting it. exp ffff8902cd64e400, cur 1575984091 expire 1575983941 last 1575983929 [83067.506600] Lustre: Skipped 5 previous similar messages [83068.476206] Lustre: fir-OST005e: haven't heard from client 2bacacc9-821b-1013-eb06-dd3bdbe6bf12 (at 10.9.104.8@o2ib4) in 163 seconds. I think it's dead, and I am evicting it. exp ffff8922509fb000, cur 1575984092 expire 1575983942 last 1575983929 [83296.692149] LustreError: 137-5: fir-OST0057_UUID: not available for connect from 10.9.109.11@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [83296.709516] LustreError: Skipped 111 previous similar messages [83311.284314] Lustre: fir-OST0058: Client d758ce23-488a-e6d5-8c6f-41cbf6d78ec4 (at 10.9.105.21@o2ib4) reconnecting [83311.293300] Lustre: fir-OST005e: Connection restored to d758ce23-488a-e6d5-8c6f-41cbf6d78ec4 (at 10.9.105.21@o2ib4) [83311.293302] Lustre: Skipped 11935 previous similar messages [83311.310497] Lustre: Skipped 11936 previous similar messages [83325.611334] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.211@o2ib7 added to recovery queue. Health = 900 [83325.624301] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) Skipped 2 previous similar messages [83325.635126] LustreError: 68003:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 12582912(16777216) req@ffff89025a28b850 x1652122504801216/t0(0) o4->eb7e3af2-d117-4@10.9.101.1@o2ib4:398/0 lens 488/448 e 0 to 0 dl 1575984378 ref 1 fl Interpret:/0/0 rc 0/0 [83325.659776] Lustre: fir-OST0054: Bulk IO write error with eb7e3af2-d117-4 (at 10.9.101.1@o2ib4), client will retry: rc = -110 [83325.671083] Lustre: Skipped 3 previous similar messages [83358.938007] LustreError: 67808:0:(ldlm_lib.c:3256:target_bulk_io()) @@@ Reconnect on bulk WRITE req@ffff8922ce6f2850 x1648661001299536/t0(0) o4->226a4739-0dcd-665f-3f64-f361283e71b8@10.9.105.16@o2ib4:433/0 lens 488/448 e 0 to 0 dl 1575984413 ref 1 fl Interpret:/0/0 rc 0/0 [83358.962179] LustreError: 67808:0:(ldlm_lib.c:3256:target_bulk_io()) Skipped 2 previous similar messages [83359.223675] md: md3 stopped. [83359.236553] md/raid:md3: device dm-108 operational as raid disk 0 [83359.242672] md/raid:md3: device dm-85 operational as raid disk 9 [83359.248686] md/raid:md3: device dm-97 operational as raid disk 8 [83359.254715] md/raid:md3: device dm-82 operational as raid disk 7 [83359.260729] md/raid:md3: device dm-98 operational as raid disk 6 [83359.266737] md/raid:md3: device dm-72 operational as raid disk 5 [83359.272751] md/raid:md3: device dm-81 operational as raid disk 4 [83359.278766] md/raid:md3: device dm-61 operational as raid disk 3 [83359.284780] md/raid:md3: device dm-103 operational as raid disk 2 [83359.290873] md/raid:md3: device dm-109 operational as raid disk 1 [83359.298897] md/raid:md3: raid level 6 active with 10 out of 10 devices, algorithm 2 [83359.337097] md3: detected capacity change from 0 to 64011422924800 [83359.367126] md: md1 stopped. [83359.377897] md/raid:md1: device dm-63 operational as raid disk 0 [83359.383913] md/raid:md1: device dm-102 operational as raid disk 9 [83359.390016] md/raid:md1: device dm-113 operational as raid disk 8 [83359.396120] md/raid:md1: device dm-96 operational as raid disk 7 [83359.402134] md/raid:md1: device dm-92 operational as raid disk 6 [83359.408144] md/raid:md1: device dm-67 operational as raid disk 5 [83359.414153] md/raid:md1: device dm-71 operational as raid disk 4 [83359.420166] md/raid:md1: device dm-112 operational as raid disk 3 [83359.426268] md/raid:md1: device dm-115 operational as raid disk 2 [83359.432370] md/raid:md1: device dm-118 operational as raid disk 1 [83359.439214] md/raid:md1: raid level 6 active with 10 out of 10 devices, algorithm 2 [83359.460496] md1: detected capacity change from 0 to 64011422924800 [83359.491278] md: md11 stopped. [83359.504426] md/raid:md11: device dm-51 operational as raid disk 0 [83359.510538] md/raid:md11: device dm-47 operational as raid disk 9 [83359.516646] md/raid:md11: device dm-50 operational as raid disk 8 [83359.522757] md/raid:md11: device dm-49 operational as raid disk 7 [83359.528866] md/raid:md11: device dm-20 operational as raid disk 6 [83359.534979] md/raid:md11: device dm-31 operational as raid disk 5 [83359.541088] md/raid:md11: device dm-32 operational as raid disk 4 [83359.547195] md/raid:md11: device dm-22 operational as raid disk 3 [83359.553301] md/raid:md11: device dm-11 operational as raid disk 2 [83359.559404] md/raid:md11: device dm-9 operational as raid disk 1 [83359.569083] md/raid:md11: raid level 6 active with 10 out of 10 devices, algorithm 2 [83359.591851] md11: detected capacity change from 0 to 64011422924800 [83359.619668] md: md7 stopped. [83359.638795] md/raid:md7: device dm-0 operational as raid disk 0 [83359.644738] md/raid:md7: device dm-55 operational as raid disk 9 [83359.650792] md/raid:md7: device dm-14 operational as raid disk 8 [83359.656817] md/raid:md7: device dm-13 operational as raid disk 7 [83359.662855] md/raid:md7: device dm-41 operational as raid disk 6 [83359.668899] md/raid:md7: device dm-29 operational as raid disk 5 [83359.674964] md/raid:md7: device dm-24 operational as raid disk 4 [83359.681029] md/raid:md7: device dm-35 operational as raid disk 3 [83359.687052] md/raid:md7: device dm-52 operational as raid disk 2 [83359.693078] md/raid:md7: device dm-30 operational as raid disk 1 [83359.700039] md/raid:md7: raid level 6 active with 10 out of 10 devices, algorithm 2 [83359.721696] md7: detected capacity change from 0 to 64011422924800 [83359.747849] md: md5 stopped. [83359.767611] md/raid:md5: device dm-110 operational as raid disk 0 [83359.773734] md/raid:md5: device dm-93 operational as raid disk 9 [83359.779766] md/raid:md5: device dm-111 operational as raid disk 8 [83359.785881] md/raid:md5: device dm-87 operational as raid disk 7 [83359.791911] md/raid:md5: device dm-90 operational as raid disk 6 [83359.797933] md/raid:md5: device dm-75 operational as raid disk 5 [83359.803959] md/raid:md5: device dm-78 operational as raid disk 4 [83359.809981] md/raid:md5: device dm-70 operational as raid disk 3 [83359.816010] md/raid:md5: device dm-62 operational as raid disk 2 [83359.822035] md/raid:md5: device dm-68 operational as raid disk 1 [83359.829120] md/raid:md5: raid level 6 active with 10 out of 10 devices, algorithm 2 [83359.863208] md5: detected capacity change from 0 to 64011422924800 [83359.900980] md: md9 stopped. [83359.924250] md/raid:md9: device dm-59 operational as raid disk 0 [83359.930270] md/raid:md9: device dm-19 operational as raid disk 9 [83359.936303] md/raid:md9: device dm-48 operational as raid disk 8 [83359.942419] md/raid:md9: device dm-39 operational as raid disk 7 [83359.948471] md/raid:md9: device dm-37 operational as raid disk 6 [83359.954508] md/raid:md9: device dm-10 operational as raid disk 5 [83359.960562] md/raid:md9: device dm-38 operational as raid disk 4 [83359.966609] md/raid:md9: device dm-2 operational as raid disk 3 [83359.972651] md/raid:md9: device dm-12 operational as raid disk 2 [83359.978740] md/raid:md9: device dm-36 operational as raid disk 1 [83359.988671] md/raid:md9: raid level 6 active with 10 out of 10 devices, algorithm 2 [83360.017458] md9: detected capacity change from 0 to 64011422924800 [83360.466294] LDISKFS-fs (md3): file extents enabled, maximum tree depth=5 [83360.515278] LDISKFS-fs (md1): file extents enabled, maximum tree depth=5 [83360.800303] LDISKFS-fs (md11): file extents enabled, maximum tree depth=5 [83360.805067] LDISKFS-fs (md3): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [83360.861175] LDISKFS-fs (md1): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [83361.154663] LDISKFS-fs (md11): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [83361.224318] LDISKFS-fs (md7): file extents enabled, maximum tree depth=5 [83361.456315] LDISKFS-fs (md5): file extents enabled, maximum tree depth=5 [83361.481350] Lustre: fir-OST0057: Not available for connect from 10.8.30.26@o2ib6 (not set up) [83361.489945] Lustre: Skipped 29 previous similar messages [83361.525126] LDISKFS-fs (md9): file extents enabled, maximum tree depth=5 [83361.602428] Lustre: fir-OST0057: Imperative Recovery enabled, recovery window shrunk from 300-900 down to 150-900 [83361.613606] Lustre: fir-OST0057: in recovery but waiting for the first client to connect [83361.637346] LDISKFS-fs (md7): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [83361.653835] Lustre: fir-OST0057: Will be in recovery for at least 2:30, or until 1291 clients reconnect [83361.815987] LDISKFS-fs (md5): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [83362.152906] LDISKFS-fs (md9): mounted filesystem with ordered data mode. Opts: errors=remount-ro,no_mbcache,nodelalloc [83362.162098] Lustre: fir-OST005f: Not available for connect from 10.9.108.14@o2ib4 (not set up) [83362.162101] Lustre: Skipped 1 previous similar message [83362.282047] Lustre: fir-OST005f: Imperative Recovery enabled, recovery window shrunk from 300-900 down to 150-900 [83362.292310] Lustre: Skipped 1 previous similar message [83362.298236] Lustre: fir-OST005f: in recovery but waiting for the first client to connect [83362.306355] Lustre: Skipped 1 previous similar message [83362.321596] Lustre: fir-OST005f: Will be in recovery for at least 2:30, or until 1291 clients reconnect [83362.330995] Lustre: Skipped 1 previous similar message [83363.239466] Lustre: fir-OST005d: Not available for connect from 10.8.20.17@o2ib6 (not set up) [83363.247993] Lustre: Skipped 1 previous similar message [83363.306303] Lustre: fir-OST005d: Imperative Recovery enabled, recovery window shrunk from 300-900 down to 150-900 [83363.316565] Lustre: Skipped 2 previous similar messages [83363.322667] Lustre: fir-OST005d: in recovery but waiting for the first client to connect [83363.330787] Lustre: Skipped 2 previous similar messages [83363.395627] Lustre: fir-OST005d: Will be in recovery for at least 2:30, or until 1290 clients reconnect [83363.405029] Lustre: Skipped 2 previous similar messages [83369.610785] Lustre: fir-OST0055: Client fc841094-f1fd-2756-1968-f74105b220e6 (at 10.8.8.30@o2ib6) reconnected, waiting for 1291 clients in recovery for 2:22 [83370.635246] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.211@o2ib7 added to recovery queue. Health = 900 [83370.635254] LustreError: 68000:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 2097152(2445312) req@ffff88f2fdf04050 x1648661001299536/t0(0) o4->226a4739-0dcd-665f-3f64-f361283e71b8@10.9.105.16@o2ib4:444/0 lens 488/448 e 0 to 0 dl 1575984424 ref 1 fl Interpret:/2/0 rc 0/0 [83370.673701] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) Skipped 2 previous similar messages [83370.813831] Lustre: fir-OST005d: Client df232092-858e-a632-396d-0cfff0b9daea (at 10.9.110.47@o2ib4) reconnected, waiting for 1290 clients in recovery for 2:22 [83370.828004] Lustre: Skipped 1 previous similar message [83372.045287] Lustre: fir-OST005d: Client 3c222422-1505-df45-a734-88e013dbd97d (at 10.9.102.41@o2ib4) reconnected, waiting for 1290 clients in recovery for 2:21 [83372.059447] Lustre: Skipped 3 previous similar messages [83375.472761] Lustre: fir-OST005d: Client 0cf25d46-002a-85b1-4e67-848f0710e2b1 (at 10.9.109.64@o2ib4) reconnected, waiting for 1290 clients in recovery for 2:17 [83375.486940] Lustre: Skipped 1 previous similar message [83379.572113] Lustre: fir-OST005d: Client 1c30f97b-7d47-8c9d-c1e8-e8bf522ea702 (at 10.8.24.11@o2ib6) reconnected, waiting for 1290 clients in recovery for 2:13 [83379.586191] Lustre: Skipped 13 previous similar messages [83380.684456] LustreError: 67723:0:(ldlm_lib.c:3271:target_bulk_io()) @@@ truncated bulk READ 3145728(4194304) req@ffff88f2fbc53050 x1650576591366400/t0(0) o3->5499f23a-1ea6-ba5d-b45d-cc3f43f05d7e@10.9.109.20@o2ib4:443/0 lens 488/440 e 1 to 0 dl 1575984423 ref 1 fl Interpret:/0/0 rc 0/0 [83380.684688] Lustre: fir-OST0056: Bulk IO read error with 5499f23a-1ea6-ba5d-b45d-cc3f43f05d7e (at 10.9.109.20@o2ib4), client will retry: rc -110 [83380.722686] LustreError: 67723:0:(ldlm_lib.c:3271:target_bulk_io()) Skipped 1 previous similar message [83387.587269] Lustre: fir-OST005b: Client 7077f577-10fa-a102-c9d8-a4ca3b92f52f (at 10.9.110.25@o2ib4) reconnected, waiting for 1291 clients in recovery for 2:05 [83387.601436] Lustre: Skipped 56 previous similar messages [83403.620422] Lustre: fir-OST0057: Client 9c41e276-bb54-ccfd-4d34-4092e6989764 (at 10.9.103.70@o2ib4) reconnected, waiting for 1291 clients in recovery for 1:48 [83403.634588] Lustre: Skipped 143 previous similar messages [83434.930767] Lustre: fir-OST0055: Client 5f11dd29-1211-44a2-2612-f8309cf085b3 (at 10.8.21.18@o2ib6) refused connection, still busy with 6 references [83436.163818] Lustre: fir-OST005d: Client c104d961-ddd0-a5eb-3382-4ecbd88b591c (at 10.8.18.16@o2ib6) reconnected, waiting for 1290 clients in recovery for 1:17 [83436.177946] Lustre: Skipped 143 previous similar messages [83436.930456] Lustre: fir-OST005b: Client a507eb44-8ff1-13e2-fab8-30d1823663f8 (at 10.8.22.24@o2ib6) refused connection, still busy with 6 references [83440.685672] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.211@o2ib7 added to recovery queue. Health = 900 [83440.685678] LustreError: 67978:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) @@@ truncated bulk GET 0(871544) req@ffff89224fd74050 x1652166659120640/t0(0) o4->da9f6e55-12b4-4@10.9.112.5@o2ib4:517/0 lens 488/448 e 0 to 0 dl 1575984497 ref 1 fl Interpret:/0/0 rc 0/0 [83440.685681] LustreError: 67978:0:(sec.c:2485:sptlrpc_svc_unwrap_bulk()) Skipped 5 previous similar messages [83440.731356] LNetError: 63837:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) Skipped 13 previous similar messages [83444.232488] Lustre: fir-OST0055: Client 3db7ac8a-faba-9fd6-d84d-1b8e92435cfb (at 10.8.26.18@o2ib6) refused connection, still busy with 6 references [83444.245695] Lustre: Skipped 2 previous similar messages [83446.930688] Lustre: fir-OST0057: Client d30d2da1-5a39-6a53-6def-eb7c150e8cb6 (at 10.8.31.1@o2ib6) refused connection, still busy with 6 references [83446.943815] Lustre: Skipped 1 previous similar message [83454.163673] Lustre: fir-OST005d: Client 72b66a84-eb6d-8862-b24a-97d6ffec93b7 (at 10.8.24.22@o2ib6) refused connection, still busy with 6 references [83454.176886] Lustre: Skipped 1 previous similar message [83464.155643] Lustre: fir-OST005d: Client ca09bd61-a4b3-111c-b997-9c7823236764 (at 10.8.22.17@o2ib6) refused connection, still busy with 6 references [83464.168857] Lustre: Skipped 158 previous similar messages [83480.159355] Lustre: fir-OST0059: Client 5028c448-3432-783a-f116-6a44a16b46a7 (at 10.8.29.8@o2ib6) refused connection, still busy with 6 references [83480.172483] Lustre: Skipped 35 previous similar messages [83500.442869] Lustre: fir-OST005d: Client b34be8aa-32d9-4 (at 10.9.113.13@o2ib4) reconnected, waiting for 1290 clients in recovery for 0:12 [83500.455221] Lustre: Skipped 1934 previous similar messages [83511.176440] Lustre: fir-OST0057: Recovery already passed deadline 0:00. If you do not want to wait more, you may force taget eviction via 'lctl --device fir-OST0057 abort_recovery. [83511.656879] Lustre: fir-OST0057: recovery is timed out, evict stale exports [83511.664127] Lustre: fir-OST0057: disconnecting 10 stale clients [83511.806156] Lustre: fir-OST0057: Recovery over after 2:30, of 1291 clients 1281 recovered and 10 were evicted. [83511.816161] Lustre: Skipped 5 previous similar messages [83511.860836] Lustre: fir-OST0057: deleting orphan objects from 0x18c0000401:11791589 to 0x18c0000401:11792961 [83511.896502] Lustre: fir-OST0057: deleting orphan objects from 0x18c0000400:3036937 to 0x18c0000400:3037633 [83511.932613] Lustre: fir-OST0057: deleting orphan objects from 0x0:27483952 to 0x0:27483969 [83512.043223] Lustre: fir-OST0057: deleting orphan objects from 0x18c0000402:922876 to 0x18c0000402:922913 [83512.155024] Lustre: fir-OST005b: Recovery already passed deadline 0:00. If you do not want to wait more, you may force taget eviction via 'lctl --device fir-OST005b abort_recovery. [83512.171098] Lustre: Skipped 1 previous similar message [83512.324646] Lustre: fir-OST005f: recovery is timed out, evict stale exports [83512.331617] Lustre: Skipped 1 previous similar message [83512.336947] Lustre: fir-OST005f: disconnecting 2 stale clients [83512.342815] Lustre: Skipped 1 previous similar message [83513.398676] Lustre: fir-OST005d: recovery is timed out, evict stale exports [83513.405644] Lustre: Skipped 2 previous similar messages [83513.411046] Lustre: fir-OST005d: disconnecting 1 stale clients [83513.416915] Lustre: Skipped 2 previous similar messages [83516.311131] Lustre: fir-OST005b: Denying connection for new client 3a18a690-f6fb-7d4d-c179-697da5c59619 (at 10.9.116.10@o2ib4), waiting for 1291 known clients (1188 recovered, 100 in progress, and 3 evicted) to recover in 0:56 [83518.150125] Lustre: fir-OST005f: Denying connection for new client 3c020cd0-089d-acb1-e879-86429192cebf (at 10.8.27.2@o2ib6), waiting for 1291 known clients (1193 recovered, 96 in progress, and 2 evicted) to recover in 0:53 [83524.142848] Lustre: fir-OST0059: Denying connection for new client 5a3d40f3-7440-8bab-3ed3-c953b35f5db5 (at 10.9.104.11@o2ib4), waiting for 1291 known clients (1199 recovered, 91 in progress, and 1 evicted) to recover in 0:48 [83526.157270] Lustre: fir-OST005f: Recovery over after 2:44, of 1291 clients 1289 recovered and 2 were evicted. [83526.227237] Lustre: fir-OST005f: deleting orphan objects from 0x1ac0000400:3045562 to 0x1ac0000400:3045857 [83526.463633] Lustre: fir-OST005f: deleting orphan objects from 0x0:27483501 to 0x0:27483521 [83526.484622] Lustre: fir-OST005f: deleting orphan objects from 0x1ac0000402:920820 to 0x1ac0000402:920897 [83526.494815] Lustre: fir-OST005f: deleting orphan objects from 0x1ac0000401:11753307 to 0x1ac0000401:11753953 [83529.508304] Lustre: fir-OST0055: Denying connection for new client 3dc3e4b3-1daf-f260-3956-f8f68e141bca (at 10.9.117.42@o2ib4), waiting for 1291 known clients (1183 recovered, 107 in progress, and 1 evicted) to recover in 0:41 [83529.528358] Lustre: Skipped 1 previous similar message [83530.457223] Lustre: fir-OST0059: Recovery over after 2:47, of 1291 clients 1290 recovered and 1 was evicted. [83530.548753] Lustre: fir-OST0059: deleting orphan objects from 0x1940000401:2999880 to 0x1940000401:3000289 [83530.672559] Lustre: fir-OST0059: deleting orphan objects from 0x1940000400:906085 to 0x1940000400:906145 [83530.685548] Lustre: fir-OST0059: deleting orphan objects from 0x0:27234514 to 0x0:27234529 [83530.709523] Lustre: fir-OST0059: deleting orphan objects from 0x1940000402:11643021 to 0x1940000402:11644417 [83537.618300] Lustre: 111966:0:(ldlm_lib.c:1765:extend_recovery_timer()) fir-OST005b: extended recovery timer reaching hard limit: 900, extend: 1 [83537.760812] Lustre: fir-OST005b: Recovery over after 2:55, of 1291 clients 1288 recovered and 3 were evicted. [83537.842315] Lustre: fir-OST005b: deleting orphan objects from 0x19c0000401:3019027 to 0x19c0000401:3020481 [83538.024106] Lustre: fir-OST005b: deleting orphan objects from 0x19c0000400:916141 to 0x19c0000400:916193 [83538.084457] Lustre: fir-OST005b: deleting orphan objects from 0x19c0000402:11725588 to 0x19c0000402:11726529 [83538.084461] Lustre: fir-OST005b: deleting orphan objects from 0x0:27420356 to 0x0:27420385 [83554.597085] Lustre: fir-OST0055: Denying connection for new client 3dc3e4b3-1daf-f260-3956-f8f68e141bca (at 10.9.117.42@o2ib4), waiting for 1291 known clients (1184 recovered, 106 in progress, and 1 evicted) to recover in 0:16 [83554.617160] Lustre: Skipped 1 previous similar message [83579.685583] Lustre: fir-OST0055: Denying connection for new client 3dc3e4b3-1daf-f260-3956-f8f68e141bca (at 10.9.117.42@o2ib4), waiting for 1291 known clients (1184 recovered, 106 in progress, and 1 evicted) already passed deadline 0:08 [83580.462432] Lustre: fir-OST0058: Client 964f90b2-201f-0e40-0c9b-d52b03dcf753 (at 10.9.105.61@o2ib4) reconnecting [83580.472632] Lustre: Skipped 7273 previous similar messages [83580.478156] Lustre: fir-OST0058: Connection restored to 964f90b2-201f-0e40-0c9b-d52b03dcf753 (at 10.9.105.61@o2ib4) [83580.488588] Lustre: Skipped 17265 previous similar messages [83593.534006] Lustre: fir-OST0055: Recovery already passed deadline 0:22. If you do not want to wait more, you may force taget eviction via 'lctl --device fir-OST0055 abort_recovery. [83593.535058] Lustre: 112020:0:(ldlm_lib.c:1765:extend_recovery_timer()) fir-OST005d: extended recovery timer reaching hard limit: 900, extend: 1 [83593.535061] Lustre: 112020:0:(ldlm_lib.c:1765:extend_recovery_timer()) Skipped 154 previous similar messages [83593.572770] Lustre: Skipped 3 previous similar messages [83593.735590] Lustre: fir-OST005d: Recovery over after 3:51, of 1290 clients 1289 recovered and 1 was evicted. [83593.787970] Lustre: fir-OST005d: deleting orphan objects from 0x1a40000402:3041004 to 0x1a40000402:3041729 [83593.958249] Lustre: fir-OST005d: deleting orphan objects from 0x0:27502209 to 0x0:27502241 [83593.958251] Lustre: fir-OST005d: deleting orphan objects from 0x1a40000400:11793335 to 0x1a40000400:11794593 [83594.116751] Lustre: fir-OST005d: deleting orphan objects from 0x1a40000401:922080 to 0x1a40000401:922113 [83600.533414] Lustre: fir-OST0055: Recovery already passed deadline 0:29. If you do not want to wait more, you may force taget eviction via 'lctl --device fir-OST0055 abort_recovery. [83604.774454] Lustre: fir-OST0055: Denying connection for new client 3dc3e4b3-1daf-f260-3956-f8f68e141bca (at 10.9.117.42@o2ib4), waiting for 1291 known clients (1184 recovered, 106 in progress, and 1 evicted) already passed deadline 0:33 [83611.533963] Lustre: fir-OST0055: Recovery already passed deadline 0:40. If you do not want to wait more, you may force taget eviction via 'lctl --device fir-OST0055 abort_recovery. [83611.550895] Lustre: 111654:0:(ldlm_lib.c:1765:extend_recovery_timer()) fir-OST0055: extended recovery timer reaching hard limit: 900, extend: 1 [83611.563857] Lustre: 111654:0:(ldlm_lib.c:1765:extend_recovery_timer()) Skipped 101 previous similar messages [83611.717356] Lustre: fir-OST0055: Recovery over after 4:10, of 1291 clients 1290 recovered and 1 was evicted. [83611.786226] Lustre: fir-OST0055: deleting orphan objects from 0x1840000400:11790357 to 0x1840000400:11790881 [83611.925706] Lustre: fir-OST0055: deleting orphan objects from 0x1840000402:3041849 to 0x1840000402:3042849 [83612.026730] Lustre: fir-OST0055: deleting orphan objects from 0x0:27493454 to 0x0:27493473 [83612.056601] Lustre: fir-OST0055: deleting orphan objects from 0x1840000401:923366 to 0x1840000401:923457 [83796.700915] Lustre: 63864:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575984813/real 1575984813] req@ffff89077f17b600 x1652452421759024/t0(0) o400->MGC10.0.10.51@o2ib7@10.0.10.51@o2ib7:26/25 lens 224/224 e 0 to 1 dl 1575984820 ref 1 fl Rpc:XN/0/ffffffff rc 0/-1 [83796.728970] Lustre: 63864:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 1 previous similar message [83796.738637] LustreError: 166-1: MGC10.0.10.51@o2ib7: Connection to MGS (at 10.0.10.51@o2ib7) was lost; in progress operations using this service will fail [83852.646060] LNetError: 63820:0:(o2iblnd_cb.c:3350:kiblnd_check_txs_locked()) Timed out tx: active_txs, 0 seconds [83852.656233] LNetError: 63820:0:(o2iblnd_cb.c:3425:kiblnd_check_conns()) Timed out RDMA with 10.0.10.51@o2ib7 (56): c: 5, oc: 0, rc: 8 [83852.668738] LNetError: 63832:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) lpni 10.0.10.51@o2ib7 added to recovery queue. Health = 900 [83852.681607] LNetError: 63832:0:(peer.c:3451:lnet_peer_ni_add_to_recoveryq_locked()) Skipped 4 previous similar messages [83852.751315] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83853.751219] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83854.751398] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83865.751890] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83875.751981] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83890.752457] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83920.753034] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83920.765121] LNetError: 108379:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 5 previous similar messages [83922.183649] Lustre: Evicted from MGS (at MGC10.0.10.51@o2ib7_1) after server handle changed from 0xdff031726fbff0e1 to 0xbba64b52f329a2a4 [83941.647915] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [83956.648211] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [83956.658316] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [83956.670341] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 1 previous similar message [83971.648522] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [83986.648829] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [84001.649136] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [84016.649449] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [84031.649779] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [84031.661773] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 4 previous similar messages [84047.650090] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 1 seconds [84047.660177] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 1 previous similar message [84072.714709] Lustre: fir-MDT0002-lwp-OST005a: Connection to fir-MDT0002 (at 10.0.10.53@o2ib7) was lost; in progress operations using this service will wait for recovery to complete [84072.730692] Lustre: Skipped 10 previous similar messages [84091.650987] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [84091.661067] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 2 previous similar messages [84094.908236] Lustre: fir-OST005f: Connection restored to 19d091c7-bad9-3fc5-d8c7-1acb2d646997 (at 10.9.114.9@o2ib4) [84094.918592] Lustre: Skipped 11 previous similar messages [84109.460494] LustreError: 68040:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0059: cli 52008b8a-1aae-c71d-80d5-aeea34862c6c claims 16801792 GRANT, real grant 16752640 [84109.475266] LustreError: 68040:0:(tgt_grant.c:758:tgt_grant_check()) Skipped 3 previous similar messages [84117.681837] LustreError: 67972:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0059: cli 52008b8a-1aae-c71d-80d5-aeea34862c6c claims 16801792 GRANT, real grant 0 [84120.010758] LustreError: 67824:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0059: cli 52008b8a-1aae-c71d-80d5-aeea34862c6c claims 4218880 GRANT, real grant 0 [84133.064019] LustreError: 67828:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0059: cli 52008b8a-1aae-c71d-80d5-aeea34862c6c claims 16801792 GRANT, real grant 0 [84141.024594] LustreError: 68046:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0059: cli 52008b8a-1aae-c71d-80d5-aeea34862c6c claims 16801792 GRANT, real grant 0 [84161.652430] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [84161.662513] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 52 previous similar messages [84161.671917] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [84161.683960] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 12 previous similar messages [84243.112384] LustreError: 68023:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0059: cli 52008b8a-1aae-c71d-80d5-aeea34862c6c claims 16801792 GRANT, real grant 13017088 [84243.127158] LustreError: 68023:0:(tgt_grant.c:758:tgt_grant_check()) Skipped 1 previous similar message [84295.432583] Lustre: fir-OST0055: deleting orphan objects from 0x1840000402:3042880 to 0x1840000402:3042913 [84295.432586] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3030047 to 0x1800000400:3030113 [84295.432610] Lustre: fir-OST0059: deleting orphan objects from 0x1940000401:3000337 to 0x1940000401:3000353 [84295.432624] Lustre: fir-OST005b: deleting orphan objects from 0x19c0000401:3020523 to 0x19c0000401:3020545 [84295.432627] Lustre: fir-OST0057: deleting orphan objects from 0x18c0000400:3037677 to 0x18c0000400:3037697 [84295.432628] Lustre: fir-OST0056: deleting orphan objects from 0x1880000402:3038989 to 0x1880000402:3039073 [84295.432630] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3045330 to 0x1900000402:3045345 [84295.432631] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3041125 to 0x1a80000402:3041217 [84295.434960] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3048955 to 0x1980000402:3049025 [84295.434965] Lustre: fir-OST005d: deleting orphan objects from 0x1a40000402:3041777 to 0x1a40000402:3041793 [84295.434975] Lustre: fir-OST005f: deleting orphan objects from 0x1ac0000400:3045911 to 0x1ac0000400:3045953 [84295.434977] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3008777 to 0x1a00000401:3008865 [84297.655221] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 1 seconds [84297.665308] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 56 previous similar messages [84298.511476] LustreError: 167-0: fir-MDT0002-lwp-OST0054: This client was evicted by fir-MDT0002; in progress operations using this service will fail. [84304.867327] LustreError: 67723:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0059: cli 52008b8a-1aae-c71d-80d5-aeea34862c6c claims 16801792 GRANT, real grant 7770112 [84420.763267] LNetError: 113073:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [84420.775355] LNetError: 113073:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 22 previous similar messages [84562.660654] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 1 seconds [84562.670737] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 9 previous similar messages [84947.668501] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [84947.680497] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 34 previous similar messages [85081.671234] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [85081.681315] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 34 previous similar messages [85557.680771] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [85557.692760] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 40 previous similar messages [85687.683334] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 1 seconds [85687.693418] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 40 previous similar messages [86166.692984] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [86166.704978] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 42 previous similar messages [86180.149321] Lustre: fir-MDT0002-lwp-OST005a: Connection to fir-MDT0002 (at 10.0.10.54@o2ib7) was lost; in progress operations using this service will wait for recovery to complete [86180.165349] Lustre: Skipped 11 previous similar messages [86257.533861] Lustre: fir-OST0054: Connection restored to fir-MDT0002-mdtlov_UUID (at 10.0.10.53@o2ib7) [86257.543088] Lustre: Skipped 28 previous similar messages [86280.503488] LustreError: 167-0: fir-MDT0002-lwp-OST005e: This client was evicted by fir-MDT0002; in progress operations using this service will fail. [86280.516908] LustreError: Skipped 11 previous similar messages [86295.454034] Lustre: fir-OST005d: deleting orphan objects from 0x1a40000402:3041953 to 0x1a40000402:3041985 [86295.454036] Lustre: fir-OST0055: deleting orphan objects from 0x1840000402:3043079 to 0x1840000402:3043137 [86295.454037] Lustre: fir-OST005f: deleting orphan objects from 0x1ac0000400:3046117 to 0x1ac0000400:3046177 [86295.454040] Lustre: fir-OST0059: deleting orphan objects from 0x1940000401:3000495 to 0x1940000401:3000577 [86295.454056] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3045508 to 0x1900000402:3045537 [86295.454063] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3030279 to 0x1800000400:3030305 [86295.454081] Lustre: fir-OST0056: deleting orphan objects from 0x1880000402:3039249 to 0x1880000402:3039265 [86295.454108] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3049184 to 0x1980000402:3049217 [86295.454148] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3041372 to 0x1a80000402:3041409 [86295.454217] Lustre: fir-OST0057: deleting orphan objects from 0x18c0000400:3037845 to 0x18c0000400:3037889 [86295.454219] Lustre: fir-OST005b: deleting orphan objects from 0x19c0000401:3020727 to 0x19c0000401:3020769 [86295.454224] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3009028 to 0x1a00000401:3009057 [86301.695719] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.51@o2ib7: 0 seconds [86301.705801] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 42 previous similar messages [86387.857445] Lustre: 63875:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575987404/real 1575987404] req@ffff8922f905ad00 x1652452422920864/t0(0) o400->fir-MDT0003-lwp-OST005f@10.0.10.54@o2ib7:12/10 lens 224/224 e 0 to 1 dl 1575987411 ref 1 fl Rpc:XN/0/ffffffff rc 0/-1 [86387.857447] Lustre: 63866:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1575987404/real 1575987404] req@ffff8922f9059680 x1652452422920912/t0(0) o400->fir-MDT0003-lwp-OST005b@10.0.10.54@o2ib7:12/10 lens 224/224 e 0 to 1 dl 1575987411 ref 1 fl Rpc:XN/0/ffffffff rc 0/-1 [86387.857451] Lustre: 63866:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 2 previous similar messages [86387.857453] Lustre: fir-MDT0003-lwp-OST0059: Connection to fir-MDT0003 (at 10.0.10.54@o2ib7) was lost; in progress operations using this service will wait for recovery to complete [86387.857455] Lustre: Skipped 2 previous similar messages [86387.945142] Lustre: 63875:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 6 previous similar messages [86476.977328] Lustre: fir-OST0054: Connection restored to fir-MDT0002-mdtlov_UUID (at 10.0.10.53@o2ib7) [86476.986563] Lustre: Skipped 20 previous similar messages [86513.340197] LustreError: 167-0: fir-MDT0003-lwp-OST0054: This client was evicted by fir-MDT0003; in progress operations using this service will fail. [86513.353585] LustreError: Skipped 11 previous similar messages [86520.840041] Lustre: fir-OST0056: deleting orphan objects from 0x1880000400:11762168 to 0x1880000400:11762241 [86520.840046] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11785079 to 0x1a80000400:11785153 [86520.840059] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11781867 to 0x1900000401:11782049 [86520.840069] Lustre: fir-OST0055: deleting orphan objects from 0x1840000400:11791176 to 0x1840000400:11791265 [86520.840073] Lustre: fir-OST0059: deleting orphan objects from 0x1940000402:11644728 to 0x1940000402:11644769 [86520.840127] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11769931 to 0x1800000401:11769985 [86520.840129] Lustre: fir-OST005b: deleting orphan objects from 0x19c0000402:11726859 to 0x19c0000402:11726881 [86520.840131] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11826483 to 0x1980000401:11826657 [86520.840132] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000400:11638479 to 0x1a00000400:11638529 [86520.840179] Lustre: fir-OST0057: deleting orphan objects from 0x18c0000401:11793269 to 0x18c0000401:11793313 [86520.840181] Lustre: fir-OST005d: deleting orphan objects from 0x1a40000400:11794873 to 0x1a40000400:11794913 [86520.840190] Lustre: fir-OST005f: deleting orphan objects from 0x1ac0000401:11754239 to 0x1ac0000401:11754273 [86588.605617] Lustre: fir-MDT0001-lwp-OST005a: Connection to fir-MDT0001 (at 10.0.10.52@o2ib7) was lost; in progress operations using this service will wait for recovery to complete [86588.621603] Lustre: Skipped 19 previous similar messages [86708.846358] Lustre: fir-OST0054: Connection restored to fir-MDT0001-mdtlov_UUID (at 10.0.10.51@o2ib7) [86708.855584] Lustre: Skipped 21 previous similar messages [86734.084538] Lustre: fir-OST0055: deleting orphan objects from 0x1840000401:923481 to 0x1840000401:923521 [86734.084540] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:918591 to 0x1800000402:918625 [86734.084543] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:922723 to 0x1900000400:922753 [86734.084545] Lustre: fir-OST0057: deleting orphan objects from 0x18c0000402:922950 to 0x18c0000402:922977 [86734.084549] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:907512 to 0x1a00000402:907553 [86734.084551] Lustre: fir-OST0059: deleting orphan objects from 0x1940000400:906186 to 0x1940000400:906209 [86734.084553] Lustre: fir-OST0056: deleting orphan objects from 0x1880000401:920974 to 0x1880000401:920993 [86734.084571] Lustre: fir-OST005f: deleting orphan objects from 0x1ac0000402:920947 to 0x1ac0000402:920993 [86734.084576] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:920757 to 0x1a80000401:920801 [86734.084583] Lustre: fir-OST005b: deleting orphan objects from 0x19c0000400:916221 to 0x19c0000400:916257 [86734.084584] Lustre: fir-OST005d: deleting orphan objects from 0x1a40000401:922148 to 0x1a40000401:922177 [86734.085560] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:924075 to 0x1980000400:924097 [86739.136744] LustreError: 167-0: fir-MDT0001-lwp-OST005e: This client was evicted by fir-MDT0001; in progress operations using this service will fail. [86739.150146] LustreError: Skipped 11 previous similar messages [87846.814365] Lustre: Failing over fir-OST005d [87846.861997] Lustre: fir-OST0059: Not available for connect from 10.8.30.2@o2ib6 (stopping) [87846.870267] Lustre: Skipped 1 previous similar message [87846.932238] LustreError: 114752:0:(ldlm_resource.c:1147:ldlm_resource_complain()) filter-fir-OST0059_UUID: namespace resource [0x1940000401:0x2dc03d:0x0].0x0 (ffff88ed360c12c0) refcount nonzero (2) after lock cleanup; forcing cleanup. [87847.376418] Lustre: fir-OST0055: Not available for connect from 10.9.113.11@o2ib4 (stopping) [87847.384864] Lustre: Skipped 343 previous similar messages [87848.380594] Lustre: fir-OST0057: Not available for connect from 10.9.105.65@o2ib4 (stopping) [87848.389032] Lustre: Skipped 628 previous similar messages [87849.162769] LustreError: 137-5: fir-OST005d_UUID: not available for connect from 10.8.23.24@o2ib6 (no target). If you are running an HA pair check that the target is mounted on the other server. [87849.180054] LustreError: Skipped 8474 previous similar messages [87849.962903] Lustre: server umount fir-OST0059 complete [87849.968053] Lustre: Skipped 2 previous similar messages [87850.574398] LustreError: 87509:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) ldlm_cancel from 10.9.108.71@o2ib4 arrived at 1575988874 with bad export cookie 8099382812963126271 [87850.589954] LustreError: 87509:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) Skipped 1 previous similar message [87851.284712] LustreError: 66057:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) ldlm_cancel from 10.9.116.1@o2ib4 arrived at 1575988874 with bad export cookie 8099382812963119894 [87853.979725] md7: detected capacity change from 64011422924800 to 0 [87853.985921] md: md7 stopped. [87854.043783] md1: detected capacity change from 64011422924800 to 0 [87854.049978] md: md1 stopped. [87854.050954] md3: detected capacity change from 64011422924800 to 0 [87854.050958] md: md3 stopped. [87854.053291] md11: detected capacity change from 64011422924800 to 0 [87854.053302] md: md11 stopped. [87854.140475] md9: detected capacity change from 64011422924800 to 0 [87854.146665] md: md9 stopped. [87855.711959] LustreError: 87509:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) ldlm_cancel from 10.9.101.49@o2ib4 arrived at 1575988879 with bad export cookie 8099382812963135658 [87855.727511] LustreError: 87509:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) Skipped 3 previous similar messages [87856.355456] md5: detected capacity change from 64011422924800 to 0 [87856.361657] md: md5 stopped. [87857.071054] md: md7 stopped. [87857.072014] md: md3 stopped. [87858.074149] md: md3 stopped. [87861.751375] LustreError: 68056:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) ldlm_cancel from 10.9.101.13@o2ib4 arrived at 1575988885 with bad export cookie 8099382812963145024 [87869.272333] LustreError: 87509:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) ldlm_cancel from 10.9.101.47@o2ib4 arrived at 1575988892 with bad export cookie 8099382812963143897 [87869.287880] LustreError: 87509:0:(ldlm_lockd.c:2324:ldlm_cancel_handler()) Skipped 3 previous similar messages [87883.295171] LustreError: 137-5: fir-OST005b_UUID: not available for connect from 10.9.117.22@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [87883.312550] LustreError: Skipped 6249 previous similar messages [87947.296381] LustreError: 137-5: fir-OST0055_UUID: not available for connect from 10.8.27.2@o2ib6 (no target). If you are running an HA pair check that the target is mounted on the other server. [87947.313575] LustreError: Skipped 7797 previous similar messages [88075.492651] LustreError: 137-5: fir-OST005f_UUID: not available for connect from 10.9.101.18@o2ib4 (no target). If you are running an HA pair check that the target is mounted on the other server. [88075.510050] LustreError: Skipped 13956 previous similar messages [89042.543427] LustreError: 11-0: fir-MDT0000-lwp-OST005e: operation ldlm_enqueue to node 10.0.10.52@o2ib7 failed: rc = -107 [89042.543431] Lustre: fir-MDT0000-lwp-OST0056: Connection to fir-MDT0000 (at 10.0.10.52@o2ib7) was lost; in progress operations using this service will wait for recovery to complete [89042.543433] Lustre: Skipped 2 previous similar messages [89042.575605] LustreError: Skipped 30 previous similar messages [89123.752807] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.54@o2ib7: 0 seconds [89123.762893] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 112 previous similar messages [89123.772391] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [89123.784389] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 34 previous similar messages [89191.907564] Lustre: fir-OST0054: Connection restored to fir-MDT0001-mdtlov_UUID (at 10.0.10.51@o2ib7) [89191.916813] Lustre: Skipped 23 previous similar messages [89222.899028] LustreError: 167-0: fir-MDT0000-lwp-OST005c: This client was evicted by fir-MDT0000; in progress operations using this service will fail. [89222.912416] LustreError: Skipped 11 previous similar messages [89222.919007] LustreError: 63873:0:(client.c:1197:ptlrpc_import_delay_req()) @@@ invalidate in flight req@ffff890de48d3f00 x1652452423870288/t0(0) o103->fir-MDT0000-lwp-OST005e@10.0.10.51@o2ib7:17/18 lens 328/224 e 0 to 0 dl 0 ref 1 fl Rpc:W/0/ffffffff rc 0/-1 [89223.159901] LustreError: 11-0: fir-MDT0000-lwp-OST005c: operation quota_acquire to node 10.0.10.51@o2ib7 failed: rc = -11 [89223.170875] LustreError: Skipped 3 previous similar messages [89224.874759] LustreError: 11-0: fir-MDT0000-lwp-OST005a: operation quota_acquire to node 10.0.10.51@o2ib7 failed: rc = -11 [89224.885718] LustreError: Skipped 16 previous similar messages [89230.696477] Lustre: fir-OST0056: deleting orphan objects from 0x0:27467647 to 0x0:27467681 [89230.696513] Lustre: fir-OST005c: deleting orphan objects from 0x0:27180265 to 0x0:27180289 [89230.696575] Lustre: fir-OST005a: deleting orphan objects from 0x0:27549741 to 0x0:27549761 [89230.696608] Lustre: fir-OST0054: deleting orphan objects from 0x0:27445580 to 0x0:27445601 [89230.696647] Lustre: fir-OST005e: deleting orphan objects from 0x0:27454900 to 0x0:27454945 [89230.697022] Lustre: fir-OST0058: deleting orphan objects from 0x0:27494442 to 0x0:27494465 [89273.755955] LNetError: 114061:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [89273.768058] LNetError: 114061:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 6 previous similar messages [89275.873243] Lustre: 63870:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has failed due to network error: [sent 1575990296/real 1575990299] req@ffff89129e81da00 x1652452423996192/t0(0) o400->MGC10.0.10.51@o2ib7@10.0.10.52@o2ib7:26/25 lens 224/224 e 0 to 1 dl 1575990303 ref 1 fl Rpc:eXN/0/ffffffff rc 0/-1 [89275.901644] Lustre: 63870:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 2 previous similar messages [89275.911396] LustreError: 166-1: MGC10.0.10.51@o2ib7: Connection to MGS (at 10.0.10.52@o2ib7) was lost; in progress operations using this service will fail [89431.759049] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.52@o2ib7: 0 seconds [89431.769136] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 23 previous similar messages [89431.778562] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [89431.790592] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 13 previous similar messages [89583.762119] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.52@o2ib7: 1 seconds [89583.772201] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 7 previous similar messages [89757.765638] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [89757.777660] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 15 previous similar messages [90008.770606] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.52@o2ib7: 0 seconds [90008.780691] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 11 previous similar messages [90410.778568] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) ni 10.0.10.115@o2ib7 added to recovery queue. Health = 900 [90410.790559] LNetError: 63820:0:(lib-msg.c:485:lnet_handle_local_failure()) Skipped 26 previous similar messages [90836.787083] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Timed out tx for 10.0.10.52@o2ib7: 0 seconds [90836.797165] LNet: 63820:0:(o2iblnd_cb.c:3396:kiblnd_check_conns()) Skipped 23 previous similar messages [90956.821732] Lustre: fir-MDT0003-lwp-OST005c: Connection to fir-MDT0003 (at 10.0.10.53@o2ib7) was lost; in progress operations using this service will wait for recovery to complete [90956.837717] Lustre: Skipped 9 previous similar messages [91042.163769] Lustre: fir-OST0054: Connection restored to fir-MDT0003-mdtlov_UUID (at 10.0.10.54@o2ib7) [91042.173003] Lustre: Skipped 8 previous similar messages [91082.264270] LustreError: 167-0: fir-MDT0003-lwp-OST005c: This client was evicted by fir-MDT0003; in progress operations using this service will fail. [91082.277659] LustreError: Skipped 5 previous similar messages [91082.285228] Lustre: fir-MDT0003-lwp-OST005c: Connection restored to 10.0.10.54@o2ib7 (at 10.0.10.54@o2ib7) [91082.294896] Lustre: Skipped 6 previous similar messages [91110.101659] Lustre: fir-OST0056: deleting orphan objects from 0x1880000400:11762647 to 0x1880000400:11762689 [91110.101679] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000400:11638916 to 0x1a00000400:11638945 [91110.101680] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11782453 to 0x1900000401:11782497 [91110.101803] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11785582 to 0x1a80000400:11785601 [91110.101804] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11770384 to 0x1800000401:11770401 [91110.101805] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11827087 to 0x1980000401:11827105 [91257.883712] Lustre: fir-MDT0001-lwp-OST005a: Connection to fir-MDT0001 (at 10.0.10.51@o2ib7) was lost; in progress operations using this service will wait for recovery to complete [91257.899712] Lustre: Skipped 5 previous similar messages [91318.892140] Lustre: fir-OST0054: Connection restored to fir-MDT0001-mdtlov_UUID (at 10.0.10.52@o2ib7) [91318.901362] Lustre: Skipped 3 previous similar messages [91383.326212] LustreError: 167-0: fir-MDT0001-lwp-OST005a: This client was evicted by fir-MDT0001; in progress operations using this service will fail. [91383.339597] LustreError: Skipped 5 previous similar messages [91383.347176] Lustre: fir-MDT0001-lwp-OST005a: Connection restored to 10.0.10.52@o2ib7 (at 10.0.10.52@o2ib7) [91383.356830] Lustre: Skipped 6 previous similar messages [91396.114129] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:907594 to 0x1a00000402:907617 [91396.114130] Lustre: fir-OST0056: deleting orphan objects from 0x1880000401:921038 to 0x1880000401:921057 [91396.114134] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:918672 to 0x1800000402:918689 [91396.114203] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:924139 to 0x1980000400:924161 [91396.114266] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:920847 to 0x1a80000401:920865 [91396.114268] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:922805 to 0x1900000400:922849 [91458.591755] Lustre: Evicted from MGS (at 10.0.10.51@o2ib7) after server handle changed from 0xbba64b52f329a2a4 to 0xc3c20c0652556a2a [91458.603833] Lustre: MGC10.0.10.51@o2ib7: Connection restored to 10.0.10.51@o2ib7 (at 10.0.10.51@o2ib7) [92189.693251] Lustre: fir-OST0058: haven't heard from client 82b9ac9e-bd42-fb9c-cb3e-f327857b510c (at 10.9.0.62@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892272a71400, cur 1575993213 expire 1575993063 last 1575992986 [92189.714880] Lustre: Skipped 5 previous similar messages [94647.453315] Lustre: fir-OST0054: Connection restored to 82b9ac9e-bd42-fb9c-cb3e-f327857b510c (at 10.9.0.62@o2ib4) [94647.463585] Lustre: Skipped 5 previous similar messages [94647.725707] Lustre: fir-OST0058: haven't heard from client cec884d3-ca4b-8127-2f6b-7762665aa5f8 (at 10.9.0.64@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892266f68800, cur 1575995671 expire 1575995521 last 1575995444 [94647.747347] Lustre: Skipped 5 previous similar messages [96836.831164] Lustre: fir-OST0054: Connection restored to cec884d3-ca4b-8127-2f6b-7762665aa5f8 (at 10.9.0.64@o2ib4) [96836.841425] Lustre: Skipped 5 previous similar messages [97268.763743] Lustre: fir-OST005a: haven't heard from client fb9a2d5e-e9b3-4fb9-b988-9954fcfb0920 (at 10.8.0.66@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8912f8ae5000, cur 1575998292 expire 1575998142 last 1575998065 [97268.785369] Lustre: Skipped 5 previous similar messages [99322.533302] Lustre: fir-OST0054: Connection restored to fb9a2d5e-e9b3-4fb9-b988-9954fcfb0920 (at 10.8.0.66@o2ib6) [99322.543567] Lustre: Skipped 5 previous similar messages [100810.839701] Lustre: fir-OST0058: haven't heard from client 40a204f8-61bd-7bf5-8e8b-66a640362528 (at 10.8.21.28@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8922569b5400, cur 1576001834 expire 1576001684 last 1576001607 [100810.861510] Lustre: Skipped 5 previous similar messages [102548.503191] Lustre: fir-OST0054: Connection restored to 0af2ee10-72ea-97a8-65e7-44544fdbc0b9 (at 10.9.108.39@o2ib4) [102548.513721] Lustre: Skipped 5 previous similar messages [102887.676821] Lustre: fir-OST0054: Connection restored to 6943a6ac-ba36-d287-3012-a3d9ab556566 (at 10.8.21.14@o2ib6) [102887.687255] Lustre: Skipped 5 previous similar messages [102902.279630] Lustre: fir-OST0054: Connection restored to 98c710cf-a183-35fe-d60d-8494e153f1c3 (at 10.8.21.13@o2ib6) [102902.290069] Lustre: Skipped 5 previous similar messages [102904.746400] Lustre: fir-OST0054: Connection restored to (at 10.8.21.8@o2ib6) [102904.746401] Lustre: fir-OST0056: Connection restored to (at 10.8.21.8@o2ib6) [102904.746404] Lustre: Skipped 6 previous similar messages [102904.766189] Lustre: Skipped 4 previous similar messages [102914.701723] Lustre: fir-OST0054: Connection restored to 40a204f8-61bd-7bf5-8e8b-66a640362528 (at 10.8.21.28@o2ib6) [102914.712164] Lustre: Skipped 11 previous similar messages [102933.490929] Lustre: fir-OST0054: Connection restored to 07312e22-36ea-cbe1-f5a7-b2f2d00651b0 (at 10.8.20.22@o2ib6) [102933.501368] Lustre: Skipped 23 previous similar messages [102967.732545] Lustre: fir-OST0054: Connection restored to b5be2f5f-0f09-196f-7061-da3a3aa7cecb (at 10.8.20.31@o2ib6) [102967.742984] Lustre: Skipped 83 previous similar messages [103036.782165] Lustre: fir-OST0054: Connection restored to a77d579b-bc84-7eca-11a1-85e2fd56cb4e (at 10.8.20.23@o2ib6) [103036.792604] Lustre: Skipped 106 previous similar messages [103113.335081] LustreError: 66125:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005e: cli 20463417-fb32-2f92-5aae-59bfa8e287e3 claims 14893056 GRANT, real grant 0 [103139.403106] LustreError: 67963:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0054: cli aa2a15c8-736e-708a-65eb-dfabc669a063 claims 28672 GRANT, real grant 0 [103916.737798] Lustre: fir-OST0054: Connection restored to 8003aaab-bcab-ef28-dd2b-704b0d862745 (at 10.8.22.12@o2ib6) [103916.748231] Lustre: Skipped 11 previous similar messages [103939.599954] Lustre: fir-OST0058: Connection restored to 92c08489-d99f-9692-0d8e-5d862ef77698 (at 10.8.22.5@o2ib6) [103939.610312] Lustre: Skipped 4 previous similar messages [104013.821679] Lustre: fir-OST0054: Connection restored to 37454ba9-0898-97b6-5a68-4a0682e739f8 (at 10.8.20.8@o2ib6) [104013.832031] Lustre: Skipped 18 previous similar messages [105981.538288] Lustre: fir-OST0054: Connection restored to b11f5302-9207-4a63-91bc-6141fa0b09e3 (at 10.8.22.4@o2ib6) [105981.548641] Lustre: Skipped 5 previous similar messages [106064.964622] LustreError: 68035:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0058: cli 5d5036fa-60c3-4 claims 16752640 GRANT, real grant 0 [107096.674363] LustreError: 67973:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0054: cli 295209bb-0224-d868-bd7c-cd75c3b19a1c claims 200704 GRANT, real grant 0 [107295.378991] LustreError: 68000:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005c: cli 31722a42-53ae-b678-363c-dc0a8c0b6d11 claims 28672 GRANT, real grant 0 [107864.528049] LustreError: 67965:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005c: cli 5d5036fa-60c3-4 claims 2076672 GRANT, real grant 0 [109001.246063] perf: interrupt took too long (3130 > 3128), lowering kernel.perf_event_max_sample_rate to 63000 [111057.203872] Lustre: fir-OST0054: Connection restored to a5709ca0-bfe0-cc30-835f-99ba0583ca05 (at 10.8.20.27@o2ib6) [111057.214309] Lustre: Skipped 5 previous similar messages [112225.415351] LustreError: 67989:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005a: cli a83208a9-361d-4 claims 1597440 GRANT, real grant 0 [113265.085246] Lustre: fir-OST005e: haven't heard from client 8fbd1a16-d09d-1ef7-e10d-4e68dc0a9f97 (at 10.8.23.32@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892250f16c00, cur 1576014288 expire 1576014138 last 1576014061 [113265.107060] Lustre: Skipped 61 previous similar messages [113267.080987] Lustre: fir-OST005a: haven't heard from client 8fbd1a16-d09d-1ef7-e10d-4e68dc0a9f97 (at 10.8.23.32@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff88e322a4a800, cur 1576014290 expire 1576014140 last 1576014063 [113267.102809] Lustre: Skipped 1 previous similar message [114926.010694] LustreError: 67711:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0054: cli 57b26761-b79f-628f-0ec2-0a10fd7ac3bd claims 28672 GRANT, real grant 0 [115441.676815] Lustre: fir-OST0054: Connection restored to (at 10.8.23.32@o2ib6) [115441.684128] Lustre: Skipped 5 previous similar messages [117054.836037] Lustre: fir-OST0054: Connection restored to 0bbd53e2-6989-83e6-f126-86a473496205 (at 10.8.21.36@o2ib6) [117054.846482] Lustre: Skipped 5 previous similar messages [119510.220122] Lustre: fir-OST0058: haven't heard from client ee4590b6-1057-e690-5db0-89b0af3963cd (at 10.8.22.30@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8922bfba1000, cur 1576020533 expire 1576020383 last 1576020306 [119510.241935] Lustre: Skipped 3 previous similar messages [119519.215315] Lustre: fir-OST0056: haven't heard from client ee4590b6-1057-e690-5db0-89b0af3963cd (at 10.8.22.30@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892251931400, cur 1576020542 expire 1576020392 last 1576020315 [119519.237123] Lustre: Skipped 3 previous similar messages [119887.411759] Lustre: fir-OST0054: Connection restored to c20915b7-72a8-8f0f-a961-7c81095a2283 (at 10.8.23.29@o2ib6) [119887.422195] Lustre: Skipped 5 previous similar messages [121596.087729] Lustre: fir-OST0054: Connection restored to ee4590b6-1057-e690-5db0-89b0af3963cd (at 10.8.22.30@o2ib6) [121596.098177] Lustre: Skipped 5 previous similar messages [125060.118327] Lustre: fir-OST0054: Connection restored to 7898fb8f-92c0-6a6b-8c01-20f1dcd2c072 (at 10.8.23.20@o2ib6) [125060.128767] Lustre: Skipped 5 previous similar messages [125522.869119] LustreError: 67911:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0054: cli da9f6e55-12b4-4 claims 1597440 GRANT, real grant 0 [128448.096826] LustreError: 67954:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005a: cli da9f6e55-12b4-4 claims 36864 GRANT, real grant 0 [129713.385351] Lustre: fir-OST0054: Connection restored to 43d748a2-b8c5-e7f9-8b00-d16d4390ff4d (at 10.8.22.6@o2ib6) [129713.395703] Lustre: Skipped 3 previous similar messages [130416.428824] Lustre: fir-OST005c: haven't heard from client b6bab463-5f5c-8f5c-f09a-8f0ce0f6e1cd (at 10.8.21.31@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8922f2259800, cur 1576031439 expire 1576031289 last 1576031212 [130416.450651] Lustre: Skipped 1 previous similar message [130492.437526] Lustre: fir-OST005e: haven't heard from client 7515dbe4-f1c8-844a-9186-76f9c6288c34 (at 10.9.104.2@o2ib4) in 222 seconds. I think it's dead, and I am evicting it. exp ffff892250bf5400, cur 1576031515 expire 1576031365 last 1576031293 [130492.459325] Lustre: Skipped 29 previous similar messages [131645.782803] Lustre: fir-OST0054: Connection restored to (at 10.9.114.14@o2ib4) [131645.790234] Lustre: Skipped 5 previous similar messages [131693.053219] Lustre: fir-OST0056: Connection restored to d66c3860-7975-f3f1-3866-4386eb6742ed (at 10.8.19.6@o2ib6) [131693.063577] Lustre: Skipped 5 previous similar messages [131881.923365] Lustre: fir-OST0054: Connection restored to 2295c161-47a8-c199-f8c4-4e53eff1b957 (at 10.9.110.71@o2ib4) [131881.923366] Lustre: fir-OST0058: Connection restored to 2295c161-47a8-c199-f8c4-4e53eff1b957 (at 10.9.110.71@o2ib4) [131881.944410] Lustre: Skipped 4 previous similar messages [131921.259280] Lustre: fir-OST0054: Connection restored to 67fb2ec4-7a5a-f103-4386-bc08c967f193 (at 10.9.107.9@o2ib4) [131921.269718] Lustre: Skipped 5 previous similar messages [132005.721784] Lustre: fir-OST0054: Connection restored to e8872901-9e69-2d9a-e57a-55077a64186b (at 10.9.109.25@o2ib4) [132005.721785] Lustre: fir-OST0058: Connection restored to e8872901-9e69-2d9a-e57a-55077a64186b (at 10.9.109.25@o2ib4) [132005.742828] Lustre: Skipped 4 previous similar messages [132136.020350] Lustre: fir-OST0054: Connection restored to 2ad8ff13-d978-9373-7245-882c6479cc4c (at 10.9.110.63@o2ib4) [132136.030875] Lustre: Skipped 5 previous similar messages [132377.338818] Lustre: fir-OST0054: Connection restored to b5acf087-1850-f5e1-236a-4cc1bab1a9f0 (at 10.9.104.34@o2ib4) [132377.349350] Lustre: Skipped 34 previous similar messages [132484.938830] Lustre: fir-OST0054: Connection restored to b6bab463-5f5c-8f5c-f09a-8f0ce0f6e1cd (at 10.8.21.31@o2ib6) [132484.938830] Lustre: fir-OST0056: Connection restored to b6bab463-5f5c-8f5c-f09a-8f0ce0f6e1cd (at 10.8.21.31@o2ib6) [132484.938833] Lustre: Skipped 18 previous similar messages [132484.965098] Lustre: Skipped 4 previous similar messages [132702.203103] Lustre: fir-OST0054: Connection restored to (at 10.8.28.9@o2ib6) [132702.210335] Lustre: Skipped 29 previous similar messages [134728.568717] Lustre: fir-OST0056: haven't heard from client aadbd140-afe6-3cc5-5efa-1bf64465f6e7 (at 10.8.20.34@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8922515e2800, cur 1576035751 expire 1576035601 last 1576035524 [134728.590521] Lustre: Skipped 77 previous similar messages [134734.512090] Lustre: fir-OST0054: haven't heard from client aadbd140-afe6-3cc5-5efa-1bf64465f6e7 (at 10.8.20.34@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892252a14c00, cur 1576035757 expire 1576035607 last 1576035530 [134738.545470] Lustre: fir-OST005c: haven't heard from client aadbd140-afe6-3cc5-5efa-1bf64465f6e7 (at 10.8.20.34@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8922f23b4000, cur 1576035761 expire 1576035611 last 1576035534 [134738.567278] Lustre: Skipped 3 previous similar messages [136846.637044] Lustre: fir-OST0054: Connection restored to (at 10.8.20.34@o2ib6) [136846.644360] Lustre: Skipped 53 previous similar messages [141589.169921] Lustre: fir-OST0054: Connection restored to 55ff50e7-08a4-be07-5499-ccc18f03f2c9 (at 10.8.23.17@o2ib6) [141589.180357] Lustre: Skipped 5 previous similar messages [145583.045993] LustreError: 68031:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0058: cli c9911b4c-e55e-f4aa-416a-b652019239f7 claims 28672 GRANT, real grant 0 [148556.094351] Lustre: fir-OST0054: Connection restored to 77f07ca8-e3bd-72f6-4ac1-3da8889522b3 (at 10.8.22.19@o2ib6) [148556.094352] Lustre: fir-OST0056: Connection restored to 77f07ca8-e3bd-72f6-4ac1-3da8889522b3 (at 10.8.22.19@o2ib6) [148556.115220] Lustre: Skipped 4 previous similar messages [149058.222678] Lustre: fir-OST0054: Connection restored to 10918197-1d43-5fa6-1aea-d8f3cfbab80a (at 10.8.20.5@o2ib6) [149058.233028] Lustre: Skipped 5 previous similar messages [154493.133417] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055508/real 1576055508] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055515 ref 1 fl Rpc:X/0/ffffffff rc 0/-1 [154500.160563] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055515/real 1576055515] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055522 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154507.187710] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055522/real 1576055522] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055529 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154514.214855] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055529/real 1576055529] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055536 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154521.241994] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055536/real 1576055536] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055543 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154535.269278] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055550/real 1576055550] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055557 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154535.296553] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 1 previous similar message [154556.307704] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055571/real 1576055571] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055578 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154556.334976] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 2 previous similar messages [154591.345414] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055606/real 1576055606] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055613 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154591.372668] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 4 previous similar messages [154661.384828] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1576055676/real 1576055676] req@ffff88ec68d40480 x1652452551974832/t0(0) o106->fir-OST0056@10.8.22.1@o2ib6:15/16 lens 296/280 e 0 to 1 dl 1576055683 ref 1 fl Rpc:X/2/ffffffff rc 0/-1 [154661.412114] Lustre: 67842:0:(client.c:2133:ptlrpc_expire_one_request()) Skipped 9 previous similar messages [154686.606341] LNet: Service thread pid 67842 was inactive for 200.46s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [154686.623374] Pid: 67842, comm: ll_ost00_056 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [154686.633949] Call Trace: [154686.636506] [] ptlrpc_set_wait+0x480/0x790 [ptlrpc] [154686.643193] [] ldlm_run_ast_work+0xd5/0x3a0 [ptlrpc] [154686.649970] [] ldlm_glimpse_locks+0x3b/0x100 [ptlrpc] [154686.656875] [] ofd_intent_policy+0x69b/0x920 [ofd] [154686.663456] [] ldlm_lock_enqueue+0x356/0xa20 [ptlrpc] [154686.670309] [] ldlm_handle_enqueue0+0xa56/0x15f0 [ptlrpc] [154686.677505] [] tgt_enqueue+0x62/0x210 [ptlrpc] [154686.683824] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [154686.690877] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [154686.698716] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [154686.705172] [] kthread+0xd1/0xe0 [154686.710175] [] ret_from_fork_nospec_begin+0xe/0x21 [154686.716742] [] 0xffffffffffffffff [154686.721919] LustreError: dumping log to /tmp/lustre-log.1576055708.67842 [154692.916159] Lustre: fir-OST005c: haven't heard from client 09a03217-f2a1-2632-097f-38339f6cbc7c (at 10.8.22.1@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892260120000, cur 1576055715 expire 1576055565 last 1576055488 [154692.937983] LustreError: 67842:0:(ldlm_lockd.c:681:ldlm_handle_ast_error()) ### client (nid 10.8.22.1@o2ib6) failed to reply to glimpse AST (req@ffff88ec68d40480 x1652452551974832 status 0 rc -5), evict it ns: filter-fir-OST0056_UUID lock: ffff88fdf7a76c00/0x7066c9c18d907795 lrc: 3/0,0 mode: PW/PW res: [0x1880000402:0x2e6270:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 67108864->68719476735) flags: 0x40000000000000 nid: 10.8.22.1@o2ib6 remote: 0x891f0e0311d6012b expref: 6 pid: 66438 timeout: 0 lvb_type: 0 [154692.983786] LustreError: 138-a: fir-OST0056: A client on nid 10.8.22.1@o2ib6 was evicted due to a lock glimpse callback time out: rc -5 [154692.996079] LustreError: Skipped 1 previous similar message [154693.001792] LNet: Service thread pid 67842 completed after 206.86s. This indicates the system was overloaded (too many service threads, or there were not enough hardware resources). [154695.053939] Lustre: fir-OST005e: haven't heard from client 09a03217-f2a1-2632-097f-38339f6cbc7c (at 10.8.22.1@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892250855400, cur 1576055717 expire 1576055567 last 1576055490 [154695.075659] Lustre: Skipped 4 previous similar messages [154744.471386] Lustre: fir-OST0054: Connection restored to 37c7e464-6686-fdc0-1c81-eae75026a910 (at 10.8.22.2@o2ib6) [154744.481736] Lustre: Skipped 5 previous similar messages [154776.988984] LustreError: 67983:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0058: cli 75ca7fbe-4dbb-5345-e1bf-3a337b10784c claims 28672 GRANT, real grant 0 [154805.181795] LustreError: 67824:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0058: cli 1ca33a17-2a16-9d12-d021-e37db0ce1d5c claims 28672 GRANT, real grant 0 [156639.530128] Lustre: fir-OST0054: Connection restored to 1b1ace85-4b01-f903-bb83-ddb9142a20b0 (at 10.8.23.25@o2ib6) [156639.540596] Lustre: Skipped 5 previous similar messages [156814.118108] Lustre: fir-OST0054: Connection restored to (at 10.8.22.1@o2ib6) [156814.125342] Lustre: Skipped 5 previous similar messages [157998.975312] Lustre: fir-OST0056: haven't heard from client d48dfcab-ce8f-b93c-3409-a3e76df7c945 (at 10.8.23.22@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892251bfa000, cur 1576059021 expire 1576058871 last 1576058794 [158003.001566] Lustre: fir-OST005c: haven't heard from client d48dfcab-ce8f-b93c-3409-a3e76df7c945 (at 10.8.23.22@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892276b4d000, cur 1576059025 expire 1576058875 last 1576058798 [158003.023385] Lustre: Skipped 1 previous similar message [160185.543119] Lustre: fir-OST0054: Connection restored to d48dfcab-ce8f-b93c-3409-a3e76df7c945 (at 10.8.23.22@o2ib6) [160185.553555] Lustre: Skipped 5 previous similar messages [175952.204662] Lustre: fir-OST0054: Connection restored to 54375174-855e-4eb5-233f-bff7110a15a5 (at 10.8.22.7@o2ib6) [175952.215018] Lustre: Skipped 5 previous similar messages [180747.446024] Lustre: fir-OST0054: haven't heard from client 5a6b489d-8a0c-1dc7-c222-8c5330c92213 (at 10.8.8.20@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892252f4d800, cur 1576081769 expire 1576081619 last 1576081542 [180747.467764] Lustre: Skipped 3 previous similar messages [180928.431481] Lustre: fir-OST0054: haven't heard from client dcb788f4-67f3-4 (at 10.9.109.25@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff88f00a8e3c00, cur 1576081950 expire 1576081800 last 1576081723 [180928.451542] Lustre: Skipped 53 previous similar messages [180936.847782] Lustre: fir-OST0054: Connection restored to (at 10.9.107.20@o2ib4) [180936.855189] Lustre: Skipped 5 previous similar messages [181184.073424] Lustre: fir-OST0054: Connection restored to 2295c161-47a8-c199-f8c4-4e53eff1b957 (at 10.9.110.71@o2ib4) [181184.083951] Lustre: Skipped 5 previous similar messages [181199.055734] Lustre: fir-OST0056: Connection restored to e8872901-9e69-2d9a-e57a-55077a64186b (at 10.9.109.25@o2ib4) [181199.066249] Lustre: Skipped 5 previous similar messages [181684.308691] LustreError: 67945:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005a: cli da9f6e55-12b4-4 claims 1605632 GRANT, real grant 36864 [182181.803276] Lustre: fir-OST0054: Connection restored to 907ff646-c0ba-4 (at 10.9.117.46@o2ib4) [182181.811976] Lustre: Skipped 5 previous similar messages [182214.739773] Lustre: fir-OST0054: Connection restored to (at 10.8.9.1@o2ib6) [182214.746916] Lustre: Skipped 5 previous similar messages [182428.920092] Lustre: fir-OST0054: Connection restored to 8a77a7b3-28b8-5200-390a-7fe51bf1be0a (at 10.8.7.5@o2ib6) [182428.930351] Lustre: Skipped 5 previous similar messages [182522.314512] Lustre: fir-OST0054: Connection restored to 0df17536-86d5-4 (at 10.9.101.60@o2ib4) [182522.323216] Lustre: Skipped 5 previous similar messages [182538.970365] Lustre: fir-OST0054: Connection restored to 54fd6f2e-cb6c-4 (at 10.9.101.57@o2ib4) [182538.979070] Lustre: Skipped 5 previous similar messages [182549.155961] Lustre: fir-OST0054: Connection restored to (at 10.9.101.59@o2ib4) [182549.163362] Lustre: Skipped 5 previous similar messages [182638.397737] Lustre: fir-OST0054: Connection restored to 5a6b489d-8a0c-1dc7-c222-8c5330c92213 (at 10.8.8.20@o2ib6) [182638.408090] Lustre: Skipped 5 previous similar messages [182842.722377] Lustre: fir-OST0054: Connection restored to fc841094-f1fd-2756-1968-f74105b220e6 (at 10.8.8.30@o2ib6) [182842.732727] Lustre: Skipped 5 previous similar messages [183093.965890] Lustre: fir-OST0054: Connection restored to 8393b8d6-d8ea-1574-4a69-552de6648def (at 10.9.102.48@o2ib4) [183093.976416] Lustre: Skipped 16 previous similar messages [183552.110674] Lustre: fir-OST0054: Connection restored to 6676e5f3-c59e-c628-05b4-c9153b23c3f7 (at 10.8.21.16@o2ib6) [183552.121113] Lustre: Skipped 11 previous similar messages [187603.803944] LustreError: 67812:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0054: cli 57b26761-b79f-628f-0ec2-0a10fd7ac3bd claims 212992 GRANT, real grant 28672 [187984.504138] LustreError: 67963:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0054: cli da9f6e55-12b4-4 claims 1605632 GRANT, real grant 1597440 [188497.658275] LustreError: 67893:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005e: cli 837641b0-d89a-c20b-3139-4eb8fe8d733b claims 110592 GRANT, real grant 0 [191519.399673] Lustre: fir-OST0054: Connection restored to 5ce2e68e-76b2-bbc3-75c5-66a5c2b02651 (at 10.8.23.15@o2ib6) [191519.410118] Lustre: Skipped 10 previous similar messages [192928.681711] Lustre: fir-OST005e: haven't heard from client 45ffa07c-203c-dad9-8f0d-e714fc6465b8 (at 10.8.22.11@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892250928400, cur 1576093950 expire 1576093800 last 1576093723 [192928.703538] Lustre: Skipped 11 previous similar messages [194605.727054] Lustre: fir-OST005e: haven't heard from client 704e8622-7442-8eb3-b4e3-c86a69ef45af (at 10.8.20.21@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892250aacc00, cur 1576095627 expire 1576095477 last 1576095400 [194605.748848] Lustre: Skipped 5 previous similar messages [194624.706806] Lustre: fir-OST0054: haven't heard from client 704e8622-7442-8eb3-b4e3-c86a69ef45af (at 10.8.20.21@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892253eacc00, cur 1576095646 expire 1576095496 last 1576095419 [194624.728603] Lustre: Skipped 4 previous similar messages [194995.860637] Lustre: fir-OST0054: Connection restored to (at 10.8.22.11@o2ib6) [194995.867949] Lustre: Skipped 5 previous similar messages [195007.252896] Lustre: fir-OST0054: Connection restored to 4f86dcb5-8d8c-1599-bd44-005eb718eb65 (at 10.8.22.10@o2ib6) [195007.263331] Lustre: Skipped 5 previous similar messages [195063.004671] Lustre: fir-OST0054: Connection restored to a8841932-bc4a-ab11-1ace-8e1fdda46930 (at 10.8.23.23@o2ib6) [195063.015129] Lustre: Skipped 5 previous similar messages [195644.721852] Lustre: fir-OST0058: haven't heard from client c3415e6e-dda3-8602-28df-a932f656881d (at 10.9.112.17@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff89225390b000, cur 1576096666 expire 1576096516 last 1576096439 [196724.523607] Lustre: fir-OST0054: Connection restored to 704e8622-7442-8eb3-b4e3-c86a69ef45af (at 10.8.20.21@o2ib6) [196724.534047] Lustre: Skipped 5 previous similar messages [196756.062990] Lustre: fir-OST0054: Connection restored to (at 10.9.112.17@o2ib4) [196756.070397] Lustre: Skipped 5 previous similar messages [197116.468514] Lustre: fir-OST0054: Connection restored to (at 10.8.9.1@o2ib6) [197116.468515] Lustre: fir-OST0056: Connection restored to (at 10.8.9.1@o2ib6) [197116.482806] Lustre: Skipped 3 previous similar messages [197131.014869] Lustre: fir-OST0054: Connection restored to bdb2a993-354c-ddce-bf9d-5960b01c7975 (at 10.8.23.13@o2ib6) [197131.025313] Lustre: Skipped 5 previous similar messages [197225.346656] Lustre: fir-OST0054: Connection restored to 37c7e464-6686-fdc0-1c81-eae75026a910 (at 10.8.22.2@o2ib6) [197225.357004] Lustre: Skipped 5 previous similar messages [197323.297435] Lustre: fir-OST0054: Connection restored to (at 10.9.113.13@o2ib4) [197323.304838] Lustre: Skipped 5 previous similar messages [197404.416039] Lustre: fir-OST0054: Connection restored to 0df17536-86d5-4 (at 10.9.101.60@o2ib4) [197404.424752] Lustre: Skipped 5 previous similar messages [197736.574442] Lustre: fir-OST0054: Connection restored to 48f67746-6174-d4eb-bf6b-7295eeca30af (at 10.8.24.7@o2ib6) [197736.574443] Lustre: fir-OST0056: Connection restored to 48f67746-6174-d4eb-bf6b-7295eeca30af (at 10.8.24.7@o2ib6) [197736.574446] Lustre: Skipped 6 previous similar messages [197736.600487] Lustre: Skipped 4 previous similar messages [199238.819613] Lustre: fir-OST0058: haven't heard from client 000d6715-906a-fe00-99d9-1ba39760e7f7 (at 10.8.22.16@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892253cbbc00, cur 1576100260 expire 1576100110 last 1576100033 [199238.841428] Lustre: Skipped 5 previous similar messages [199731.802323] Lustre: fir-OST0058: haven't heard from client 85fbdf3d-35db-072c-03b7-e9977baaa2bf (at 10.8.23.12@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892261a48c00, cur 1576100753 expire 1576100603 last 1576100526 [199731.824114] Lustre: Skipped 11 previous similar messages [199942.960635] Lustre: fir-OST0054: Connection restored to (at 10.8.23.12@o2ib6) [199942.967956] Lustre: Skipped 5 previous similar messages [201318.492614] Lustre: fir-OST0054: Connection restored to cea3a46a-6e64-ecd2-2636-1b7611592cd3 (at 10.8.23.8@o2ib6) [201318.502968] Lustre: Skipped 5 previous similar messages [201326.401557] Lustre: fir-OST0054: Connection restored to 60e7dd38-7049-6086-949c-b7f68f3f00ca (at 10.8.23.18@o2ib6) [201326.411991] Lustre: Skipped 5 previous similar messages [201337.051897] Lustre: fir-OST0054: Connection restored to 5bbbecd7-709a-9f29-693e-a19d73c8cefb (at 10.8.22.18@o2ib6) [201337.062358] Lustre: Skipped 5 previous similar messages [201354.235845] Lustre: fir-OST0054: Connection restored to 94396c8b-eccd-7da2-de85-f79420b2e641 (at 10.8.23.33@o2ib6) [201354.246291] Lustre: Skipped 11 previous similar messages [202422.854023] Lustre: fir-OST0054: haven't heard from client 8c2fd243-a078-4 (at 10.9.117.46@o2ib4) in 227 seconds. I think it's dead, and I am evicting it. exp ffff8902f96e1000, cur 1576103444 expire 1576103294 last 1576103217 [202422.874103] Lustre: Skipped 5 previous similar messages [202578.475456] Lustre: fir-OST0054: Connection restored to 907ff646-c0ba-4 (at 10.9.117.46@o2ib4) [202578.484188] Lustre: Skipped 17 previous similar messages [202817.834063] Lustre: fir-OST0054: Connection restored to fb63a42c-93f0-576d-f57c-a83fc4375277 (at 10.8.21.2@o2ib6) [202817.844415] Lustre: Skipped 2 previous similar messages [202829.180467] Lustre: fir-OST0056: Connection restored to (at 10.8.22.32@o2ib6) [202829.187793] Lustre: Skipped 5 previous similar messages [204494.660619] Lustre: fir-OST0054: Connection restored to 98b70d1a-7357-ff1b-1e1d-8bd68b6592c2 (at 10.8.23.27@o2ib6) [204494.671065] Lustre: Skipped 5 previous similar messages [204744.214693] Lustre: fir-OST0054: Connection restored to 84c69ebc-7dc0-678f-942c-60a0d29de5a5 (at 10.8.22.27@o2ib6) [204744.225127] Lustre: Skipped 5 previous similar messages [205309.175062] LustreError: 67988:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005c: cli 8442b5f1-7da8-4 claims 28672 GRANT, real grant 0 [218476.265970] Lustre: fir-OST0054: Connection restored to 0aa269ad-def9-3be3-d596-fd7c0af955fb (at 10.8.20.26@o2ib6) [218476.276413] Lustre: Skipped 5 previous similar messages [220654.286010] Lustre: fir-OST0054: Connection restored to 207217ac-1163-df36-3120-8bf6c3ecbb93 (at 10.8.23.21@o2ib6) [220654.296456] Lustre: Skipped 5 previous similar messages [221415.472120] LustreError: 67718:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005c: cli 35ba350a-bccc-3fd9-39f0-a94eca80785d claims 16752640 GRANT, real grant 0 [228194.663383] Lustre: fir-OST0054: Connection restored to e15078c5-8209-4 (at 10.8.25.17@o2ib6) [228194.671998] Lustre: Skipped 5 previous similar messages [228247.375046] Lustre: fir-OST0054: haven't heard from client e15078c5-8209-4 (at 10.8.25.17@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff892253ace800, cur 1576129268 expire 1576129118 last 1576129041 [228247.395022] Lustre: Skipped 5 previous similar messages [228622.376907] Lustre: fir-OST0054: haven't heard from client 208ccf09-d6ca-4 (at 10.8.25.17@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff88f4f5725c00, cur 1576129643 expire 1576129493 last 1576129416 [228622.396920] Lustre: Skipped 5 previous similar messages [229763.991868] Lustre: fir-OST0054: Connection restored to e15078c5-8209-4 (at 10.8.25.17@o2ib6) [229764.000503] Lustre: Skipped 5 previous similar messages [230242.409048] Lustre: fir-OST0056: haven't heard from client 0cfc0c49-f407-4 (at 10.8.25.17@o2ib6) in 227 seconds. I think it's dead, and I am evicting it. exp ffff890858777400, cur 1576131263 expire 1576131113 last 1576131036 [230242.429025] Lustre: Skipped 5 previous similar messages [232660.307500] Lustre: fir-OST0054: Connection restored to f8d5264b-1de7-5abd-fef8-60297df9f169 (at 10.8.22.20@o2ib6) [232660.317943] Lustre: Skipped 5 previous similar messages [232666.429524] Lustre: fir-OST0054: Connection restored to bd358c1a-07c6-3f9f-7c84-efdb04e29ef9 (at 10.8.21.1@o2ib6) [232666.439875] Lustre: Skipped 5 previous similar messages [232730.711982] Lustre: fir-OST0054: Connection restored to 26627d4d-9b72-83d5-02a3-73c7f9501a91 (at 10.8.22.26@o2ib6) [232730.722419] Lustre: Skipped 5 previous similar messages [236433.088011] LustreError: 67718:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST005c: cli 8442b5f1-7da8-4 claims 3637248 GRANT, real grant 28672 [236623.856599] LustreError: 67971:0:(tgt_grant.c:758:tgt_grant_check()) fir-OST0056: cli cc645112-3584-d084-5d6b-c64af0bf19ce claims 299008 GRANT, real grant 0 [237227.255864] LNet: Service thread pid 67997 was inactive for 200.48s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [237227.272892] Pid: 67997, comm: ll_ost_io00_031 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237227.283678] Call Trace: [237227.286234] [] bitmap_startwrite+0x1f5/0x210 [237227.292286] [] add_stripe_bio+0x451/0x7f0 [raid456] [237227.298953] [] raid5_make_request+0x1e4/0xca0 [raid456] [237227.305972] [] md_handle_request+0xd0/0x150 [237227.311936] [] md_make_request+0x79/0x190 [237227.317759] [] generic_make_request+0x147/0x380 [237227.324088] [] submit_bio+0x70/0x150 [237227.329487] [] osd_submit_bio+0x1c/0x60 [osd_ldiskfs] [237227.336326] [] osd_do_bio.isra.35+0x9b5/0xab0 [osd_ldiskfs] [237227.343723] [] osd_write_commit+0x3ec/0x8c0 [osd_ldiskfs] [237227.350903] [] ofd_commitrw_write+0xfbe/0x1d40 [ofd] [237227.357653] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237227.363835] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237227.370543] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237227.377579] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237227.385406] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237227.391836] [] kthread+0xd1/0xe0 [237227.396855] [] ret_from_fork_nospec_begin+0xe/0x21 [237227.403431] [] 0xffffffffffffffff [237227.408592] LustreError: dumping log to /tmp/lustre-log.1576138247.67997 [237227.417351] Pid: 67875, comm: ll_ost_io01_019 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237227.428137] Call Trace: [237227.430693] [] osd_trans_stop+0x265/0x8e0 [osd_ldiskfs] [237227.437702] [] ofd_trans_stop+0x25/0x60 [ofd] [237227.443864] [] ofd_commitrw_write+0x9d4/0x1d40 [ofd] [237227.450606] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237227.456750] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237227.463458] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237227.470523] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237227.478351] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237227.484804] [] kthread+0xd1/0xe0 [237227.489810] [] ret_from_fork_nospec_begin+0xe/0x21 [237227.496411] [] 0xffffffffffffffff [237227.501547] Pid: 66127, comm: ll_ost_io01_000 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237227.512325] Call Trace: [237227.514878] [] bitmap_startwrite+0x1f5/0x210 [237227.520928] [] add_stripe_bio+0x451/0x7f0 [raid456] [237227.527599] [] raid5_make_request+0x1e4/0xca0 [raid456] [237227.534635] [] md_handle_request+0xd0/0x150 [237227.540635] [] md_make_request+0x79/0x190 [237227.546452] [] generic_make_request+0x147/0x380 [237227.552776] [] submit_bio+0x70/0x150 [237227.558158] [] osd_submit_bio+0x1c/0x60 [osd_ldiskfs] [237227.565041] [] osd_do_bio.isra.35+0x489/0xab0 [osd_ldiskfs] [237227.572395] [] osd_write_commit+0x3ec/0x8c0 [osd_ldiskfs] [237227.579591] [] ofd_commitrw_write+0xfbe/0x1d40 [ofd] [237227.586343] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237227.592521] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237227.599219] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237227.606309] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237227.614163] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237227.620594] [] kthread+0xd1/0xe0 [237227.625610] [] ret_from_fork_nospec_begin+0xe/0x21 [237227.632189] [] 0xffffffffffffffff [237227.637348] Pid: 67903, comm: ll_ost_io02_025 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237227.648139] Call Trace: [237227.650703] [] bitmap_startwrite+0x1f5/0x210 [237227.656754] [] add_stripe_bio+0x451/0x7f0 [raid456] [237227.663473] [] raid5_make_request+0x1e4/0xca0 [raid456] [237227.670548] [] md_handle_request+0xd0/0x150 [237227.676521] [] md_make_request+0x79/0x190 [237227.682317] [] generic_make_request+0x147/0x380 [237227.688659] [] submit_bio+0x70/0x150 [237227.694018] [] osd_submit_bio+0x1c/0x60 [osd_ldiskfs] [237227.700923] [] osd_do_bio.isra.35+0x9b5/0xab0 [osd_ldiskfs] [237227.708290] [] osd_write_commit+0x3ec/0x8c0 [osd_ldiskfs] [237227.715539] [] ofd_commitrw_write+0xfbe/0x1d40 [ofd] [237227.722288] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237227.728448] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237227.735151] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237227.742187] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237227.750006] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237227.756489] [] kthread+0xd1/0xe0 [237227.761507] [] ret_from_fork_nospec_begin+0xe/0x21 [237227.768122] [] 0xffffffffffffffff [237227.773261] LNet: Service thread pid 67874 was inactive for 201.01s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [237227.790313] LNet: Skipped 3 previous similar messages [237227.795467] Pid: 67874, comm: ll_ost_io02_019 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237227.806299] Call Trace: [237227.808854] [] bitmap_startwrite+0x1f5/0x210 [237227.814913] [] add_stripe_bio+0x451/0x7f0 [raid456] [237227.821567] [] raid5_make_request+0x1e4/0xca0 [raid456] [237227.828571] [] md_handle_request+0xd0/0x150 [237227.834545] [] md_make_request+0x79/0x190 [237227.840363] [] generic_make_request+0x147/0x380 [237227.846684] [] submit_bio+0x70/0x150 [237227.852096] [] osd_submit_bio+0x1c/0x60 [osd_ldiskfs] [237227.858932] [] osd_do_bio.isra.35+0x9b5/0xab0 [osd_ldiskfs] [237227.866313] [] osd_write_commit+0x3ec/0x8c0 [osd_ldiskfs] [237227.873502] [] ofd_commitrw_write+0xfbe/0x1d40 [ofd] [237227.880275] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237227.886422] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237227.893190] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237227.900239] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237227.908063] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237227.914493] [] kthread+0xd1/0xe0 [237227.919539] [] ret_from_fork_nospec_begin+0xe/0x21 [237227.926108] [] 0xffffffffffffffff [237227.931236] LNet: Service thread pid 66126 was inactive for 201.17s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237229.815918] LNet: Service thread pid 68032 was inactive for 200.37s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237229.828881] LustreError: dumping log to /tmp/lustre-log.1576138250.68032 [237230.839941] LNet: Service thread pid 67718 was inactive for 200.50s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237230.852901] LNet: Skipped 3 previous similar messages [237230.858054] LustreError: dumping log to /tmp/lustre-log.1576138251.67718 [237231.863959] LustreError: dumping log to /tmp/lustre-log.1576138252.67781 [237232.887976] LNet: Service thread pid 68043 was inactive for 200.48s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237232.900923] LNet: Skipped 2 previous similar messages [237232.906072] LustreError: dumping log to /tmp/lustre-log.1576138253.68043 [237234.424012] LustreError: dumping log to /tmp/lustre-log.1576138254.67827 [237235.960051] LustreError: dumping log to /tmp/lustre-log.1576138256.67824 [237238.008081] LNet: Service thread pid 67769 was inactive for 200.14s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237238.021049] LNet: Skipped 4 previous similar messages [237238.026194] LustreError: dumping log to /tmp/lustre-log.1576138258.67769 [237240.056127] LustreError: dumping log to /tmp/lustre-log.1576138260.67905 [237240.568137] LustreError: dumping log to /tmp/lustre-log.1576138261.67944 [237242.104166] LustreError: dumping log to /tmp/lustre-log.1576138262.67688 [237244.152206] LustreError: dumping log to /tmp/lustre-log.1576138264.67885 [237244.664220] LustreError: dumping log to /tmp/lustre-log.1576138265.67597 [237245.930444] INFO: task ll_ost_io00_000:66124 blocked for more than 120 seconds. [237245.937848] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237245.945773] ll_ost_io00_000 D ffff8912e629a080 0 66124 2 0x00000080 [237245.952969] Call Trace: [237245.955566] [] schedule+0x29/0x70 [237245.960650] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237245.967669] [] ? wake_up_atomic_t+0x30/0x30 [237245.973595] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237245.980735] [] start_this_handle+0x1a1/0x430 [jbd2] [237245.987362] [] ? osd_declare_xattr_set+0xf1/0x3a0 [osd_ldiskfs] [237245.995045] [] ? kmem_cache_alloc+0x1c2/0x1f0 [237246.001159] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237246.007874] [] ? osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237246.015146] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237246.022649] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237246.029726] [] ofd_trans_start+0x75/0xf0 [ofd] [237246.035932] [] ofd_object_punch+0x73d/0xd30 [ofd] [237246.042401] [] ofd_punch_hdl+0x493/0xa30 [ofd] [237246.048642] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237246.055670] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237246.063354] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237246.070549] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237246.078351] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237246.085236] [] ? __wake_up+0x44/0x50 [237246.090603] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237246.097021] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237246.104504] [] kthread+0xd1/0xe0 [237246.109503] [] ? insert_kthread_work+0x40/0x40 [237246.115704] [] ret_from_fork_nospec_begin+0xe/0x21 [237246.122256] [] ? insert_kthread_work+0x40/0x40 [237246.128463] INFO: task ll_ost_io00_002:66126 blocked for more than 120 seconds. [237246.135860] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237246.143801] ll_ost_io00_002 D ffff8903a9ada080 0 66126 2 0x00000080 [237246.150990] Call Trace: [237246.153538] [] schedule+0x29/0x70 [237246.158632] [] bitmap_startwrite+0x1f5/0x210 [237246.164662] [] ? wake_up_atomic_t+0x30/0x30 [237246.170594] [] add_stripe_bio+0x451/0x7f0 [raid456] [237246.177235] [] raid5_make_request+0x1e4/0xca0 [raid456] [237246.184241] [] ? find_get_pages+0x180/0x1d0 [237246.190182] [] ? wake_up_atomic_t+0x30/0x30 [237246.196135] [] ? mempool_alloc_slab+0x15/0x20 [237246.202251] [] md_handle_request+0xd0/0x150 [237246.208205] [] ? generic_make_request_checks+0x2a7/0x440 [237246.215287] [] md_make_request+0x79/0x190 [237246.221066] [] generic_make_request+0x147/0x380 [237246.227341] [] ? md_mergeable_bvec+0x46/0x50 [237246.233373] [] submit_bio+0x70/0x150 [237246.238725] [] ? lprocfs_oh_tally+0x17/0x40 [obdclass] [237246.245612] [] osd_submit_bio+0x1c/0x60 [osd_ldiskfs] [237246.252426] [] osd_do_bio.isra.35+0x489/0xab0 [osd_ldiskfs] [237246.259757] [] ? __find_get_page+0x1e/0xa0 [237246.265603] [] osd_write_commit+0x3ec/0x8c0 [osd_ldiskfs] [237246.272769] [] ? osd_trans_start+0x235/0x4e0 [osd_ldiskfs] [237246.280027] [] ofd_commitrw_write+0xfbe/0x1d40 [ofd] [237246.286754] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237246.292923] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237246.299576] [] ? lustre_msg_buf+0x17/0x60 [ptlrpc] [237246.306182] [] ? __req_capsule_get+0x163/0x740 [ptlrpc] [237246.313180] [] ? class_handle2object+0xb9/0x1c0 [obdclass] [237246.320427] [] ? update_curr+0x14c/0x1e0 [237246.326092] [] ? account_entity_dequeue+0xae/0xd0 [237246.332569] [] ? target_send_reply_msg+0x170/0x170 [ptlrpc] [237246.339919] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237246.346937] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237246.354605] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237246.361824] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237246.369605] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237246.376489] [] ? wake_up_state+0x20/0x20 [237246.382187] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237246.388599] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237246.396088] [] kthread+0xd1/0xe0 [237246.401092] [] ? insert_kthread_work+0x40/0x40 [237246.407284] [] ret_from_fork_nospec_begin+0xe/0x21 [237246.413835] [] ? insert_kthread_work+0x40/0x40 [237246.420038] INFO: task ll_ost_io01_000:66127 blocked for more than 120 seconds. [237246.427467] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237246.435405] ll_ost_io01_000 D ffff8912f2f12080 0 66127 2 0x00000080 [237246.442596] Call Trace: [237246.445142] [] schedule+0x29/0x70 [237246.450243] [] bitmap_startwrite+0x1f5/0x210 [237246.456264] [] ? wake_up_atomic_t+0x30/0x30 [237246.462193] [] add_stripe_bio+0x451/0x7f0 [raid456] [237246.468826] [] raid5_make_request+0x1e4/0xca0 [raid456] [237246.475794] [] ? wake_up_atomic_t+0x30/0x30 [237246.481740] [] ? mempool_alloc_slab+0x15/0x20 [237246.487842] [] md_handle_request+0xd0/0x150 [237246.493819] [] ? generic_make_request_checks+0x2a7/0x440 [237246.500868] [] md_make_request+0x79/0x190 [237246.506640] [] generic_make_request+0x147/0x380 [237246.512913] [] ? md_mergeable_bvec+0x46/0x50 [237246.518927] [] submit_bio+0x70/0x150 [237246.524274] [] ? lprocfs_oh_tally+0x17/0x40 [obdclass] [237246.531183] [] osd_submit_bio+0x1c/0x60 [osd_ldiskfs] [237246.537985] [] osd_do_bio.isra.35+0x489/0xab0 [osd_ldiskfs] [237246.545318] [] ? __find_get_page+0x1e/0xa0 [237246.551159] [] osd_write_commit+0x3ec/0x8c0 [osd_ldiskfs] [237246.558307] [] ? osd_trans_start+0x235/0x4e0 [osd_ldiskfs] [237246.565536] [] ofd_commitrw_write+0xfbe/0x1d40 [ofd] [237246.572276] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237246.578433] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237246.585121] [] ? lustre_msg_buf+0x17/0x60 [ptlrpc] [237246.591685] [] ? __req_capsule_get+0x163/0x740 [ptlrpc] [237246.598707] [] ? class_handle2object+0xb9/0x1c0 [obdclass] [237246.605937] [] ? update_curr+0x14c/0x1e0 [237246.611602] [] ? account_entity_dequeue+0xae/0xd0 [237246.618095] [] ? target_send_reply_msg+0x170/0x170 [ptlrpc] [237246.625443] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237246.632471] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237246.640132] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237246.647314] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237246.655089] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237246.661982] [] ? __wake_up+0x44/0x50 [237246.667343] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237246.673762] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237246.681285] [] kthread+0xd1/0xe0 [237246.686274] [] ? insert_kthread_work+0x40/0x40 [237246.692473] [] ret_from_fork_nospec_begin+0xe/0x21 [237246.699022] [] ? insert_kthread_work+0x40/0x40 [237246.705209] INFO: task ll_ost_io03_002:66137 blocked for more than 120 seconds. [237246.712622] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237246.720553] ll_ost_io03_002 D ffff89125d170000 0 66137 2 0x00000080 [237246.727754] Call Trace: [237246.730299] [] schedule+0x29/0x70 [237246.735378] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237246.742396] [] ? wake_up_atomic_t+0x30/0x30 [237246.748326] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237246.755505] [] start_this_handle+0x1a1/0x430 [jbd2] [237246.762128] [] ? osd_declare_xattr_set+0xf1/0x3a0 [osd_ldiskfs] [237246.769803] [] ? kmem_cache_alloc+0x1c2/0x1f0 [237246.775906] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237246.782647] [] ? osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237246.789882] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237246.797411] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237246.804477] [] ofd_trans_start+0x75/0xf0 [ofd] [237246.810721] [] ofd_commitrw_write+0xa31/0x1d40 [ofd] [237246.817431] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237246.823617] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237246.830265] [] ? lustre_msg_buf+0x17/0x60 [ptlrpc] [237246.836853] [] ? __req_capsule_get+0x163/0x740 [ptlrpc] [237246.843861] [] ? class_handle2object+0xb9/0x1c0 [obdclass] [237246.851092] [] ? update_curr+0x14c/0x1e0 [237246.856771] [] ? mutex_lock+0x12/0x2f [237246.862216] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237246.869251] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237246.876919] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237246.884102] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237246.891882] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237246.898797] [] ? __wake_up+0x44/0x50 [237246.904158] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237246.910586] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237246.918075] [] kthread+0xd1/0xe0 [237246.923079] [] ? insert_kthread_work+0x40/0x40 [237246.929271] [] ret_from_fork_nospec_begin+0xe/0x21 [237246.935851] [] ? insert_kthread_work+0x40/0x40 [237246.942039] INFO: task jbd2/md2-8:66168 blocked for more than 120 seconds. [237246.949004] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237246.956928] jbd2/md2-8 D ffff8912f5c68000 0 66168 2 0x00000080 [237246.964142] Call Trace: [237246.966690] [] schedule+0x29/0x70 [237246.971790] [] jbd2_journal_commit_transaction+0x23c/0x19b0 [jbd2] [237246.979729] [] ? dequeue_task_fair+0x41e/0x660 [237246.985943] [] ? __switch_to+0xce/0x580 [237246.991525] [] ? wake_up_atomic_t+0x30/0x30 [237246.997454] [] ? __schedule+0x42a/0x860 [237247.003050] [] ? try_to_del_timer_sync+0x5e/0x90 [237247.009427] [] kjournald2+0xc9/0x260 [jbd2] [237247.015366] [] ? wake_up_atomic_t+0x30/0x30 [237247.021307] [] ? commit_timeout+0x10/0x10 [jbd2] [237247.027681] [] kthread+0xd1/0xe0 [237247.032649] [] ? insert_kthread_work+0x40/0x40 [237247.038853] [] ret_from_fork_nospec_begin+0xe/0x21 [237247.045381] [] ? insert_kthread_work+0x40/0x40 [237247.051588] INFO: task ll_ost02_005:66899 blocked for more than 120 seconds. [237247.058743] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237247.066672] ll_ost02_005 D ffff8912c7616180 0 66899 2 0x00000080 [237247.073875] Call Trace: [237247.076430] [] schedule+0x29/0x70 [237247.081491] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237247.088472] [] ? wake_up_atomic_t+0x30/0x30 [237247.094404] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237247.101577] [] start_this_handle+0x1a1/0x430 [jbd2] [237247.108210] [] ? osd_declare_write+0x350/0x490 [osd_ldiskfs] [237247.115646] [] ? kmem_cache_alloc+0x1c2/0x1f0 [237247.121746] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237247.128487] [] ? osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.135725] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237247.143248] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.150305] [] ofd_trans_start+0x75/0xf0 [ofd] [237247.156495] [] ofd_attr_set+0x464/0xb60 [ofd] [237247.162595] [] ofd_setattr_hdl+0x31d/0x8e0 [ofd] [237247.169036] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237247.176040] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237247.183720] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237247.190916] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237247.198730] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237247.205626] [] ? __wake_up+0x44/0x50 [237247.210993] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237247.217392] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237247.224271] LNet: Service thread pid 67794 was inactive for 200.41s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237247.224273] LNet: Skipped 11 previous similar messages [237247.224275] LustreError: dumping log to /tmp/lustre-log.1576138267.67794 [237247.249902] [] kthread+0xd1/0xe0 [237247.254877] [] ? insert_kthread_work+0x40/0x40 [237247.261095] [] ret_from_fork_nospec_begin+0xe/0x21 [237247.267641] [] ? insert_kthread_work+0x40/0x40 [237247.273844] INFO: task ll_ost_io00_003:67590 blocked for more than 120 seconds. [237247.281255] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237247.289197] ll_ost_io00_003 D ffff8903a99c8000 0 67590 2 0x00000080 [237247.296406] Call Trace: [237247.298950] [] schedule+0x29/0x70 [237247.304052] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237247.311037] [] ? wake_up_atomic_t+0x30/0x30 [237247.316992] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237247.324151] [] ? crypto_mod_get+0x19/0x40 [237247.329921] [] start_this_handle+0x1a1/0x430 [jbd2] [237247.336548] [] ? osd_declare_qid+0x200/0x4a0 [osd_ldiskfs] [237247.343793] [] ? kmem_cache_alloc+0x1c2/0x1f0 [237247.349907] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237247.356635] [] ? osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.363885] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237247.371405] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.378481] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237247.385205] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237247.391384] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237247.398040] [] ? lustre_msg_buf+0x17/0x60 [ptlrpc] [237247.404643] [] ? __req_capsule_get+0x163/0x740 [ptlrpc] [237247.411612] [] ? __enqueue_entity+0x78/0x80 [237247.417609] [] ? target_send_reply_msg+0x170/0x170 [ptlrpc] [237247.424957] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237247.431972] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237247.439650] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237247.446832] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237247.454624] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237247.461529] [] ? __wake_up+0x44/0x50 [237247.466896] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237247.473323] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237247.480864] [] kthread+0xd1/0xe0 [237247.485861] [] ? insert_kthread_work+0x40/0x40 [237247.492049] [] ret_from_fork_nospec_begin+0xe/0x21 [237247.498591] [] ? insert_kthread_work+0x40/0x40 [237247.504796] INFO: task ll_ost_io01_003:67594 blocked for more than 120 seconds. [237247.512214] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237247.520135] ll_ost_io01_003 D ffff8912f1b12080 0 67594 2 0x00000080 [237247.527326] Call Trace: [237247.529870] [] schedule+0x29/0x70 [237247.534962] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237247.541940] [] ? wake_up_atomic_t+0x30/0x30 [237247.547888] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237247.555045] [] ? crypto_mod_get+0x19/0x40 [237247.560821] [] start_this_handle+0x1a1/0x430 [jbd2] [237247.567468] [] ? osd_declare_qid+0x200/0x4a0 [osd_ldiskfs] [237247.574710] [] ? kmem_cache_alloc+0x1c2/0x1f0 [237247.580829] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237247.587557] [] ? osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.594794] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237247.602325] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.609385] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237247.616127] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237247.622290] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237247.628940] [] ? lustre_msg_buf+0x17/0x60 [ptlrpc] [237247.635523] [] ? __req_capsule_get+0x163/0x740 [ptlrpc] [237247.642489] [] ? __enqueue_entity+0x78/0x80 [237247.648457] [] ? target_send_reply_msg+0x170/0x170 [ptlrpc] [237247.655804] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237247.662857] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237247.670521] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237247.677729] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237247.685502] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237247.692381] [] ? __wake_up+0x44/0x50 [237247.697762] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237247.704154] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237247.711655] [] kthread+0xd1/0xe0 [237247.716644] [] ? insert_kthread_work+0x40/0x40 [237247.722848] [] ret_from_fork_nospec_begin+0xe/0x21 [237247.729411] [] ? insert_kthread_work+0x40/0x40 [237247.735620] INFO: task ll_ost_io01_004:67597 blocked for more than 120 seconds. [237247.736285] LustreError: dumping log to /tmp/lustre-log.1576138268.67917 [237247.749831] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237247.757760] ll_ost_io01_004 D ffff8912f4bd9040 0 67597 2 0x00000080 [237247.764963] Call Trace: [237247.767520] [] schedule+0x29/0x70 [237247.772586] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237247.779603] [] ? wake_up_atomic_t+0x30/0x30 [237247.785536] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237247.792692] [] start_this_handle+0x1a1/0x430 [jbd2] [237247.799318] [] ? osd_declare_xattr_set+0xf1/0x3a0 [osd_ldiskfs] [237247.807018] [] ? kmem_cache_alloc+0x1c2/0x1f0 [237247.813123] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237247.819871] [] ? osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.827108] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237247.834623] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237247.841698] [] ofd_trans_start+0x75/0xf0 [ofd] [237247.847884] [] ofd_commitrw_write+0xa31/0x1d40 [ofd] [237247.854608] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237247.860778] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237247.867430] [] ? lustre_msg_buf+0x17/0x60 [ptlrpc] [237247.873991] [] ? __req_capsule_get+0x163/0x740 [ptlrpc] [237247.880973] [] ? mutex_lock+0x12/0x2f [237247.886432] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237247.893448] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237247.901127] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237247.908295] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237247.916082] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237247.922993] [] ? __wake_up+0x44/0x50 [237247.928341] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237247.934743] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237247.942231] [] kthread+0xd1/0xe0 [237247.947218] [] ? insert_kthread_work+0x40/0x40 [237247.953433] [] ret_from_fork_nospec_begin+0xe/0x21 [237247.959961] [] ? insert_kthread_work+0x40/0x40 [237247.966184] INFO: task ll_ost03_027:67684 blocked for more than 120 seconds. [237247.973326] "echo 0 > /proc/sys/kernel/hung_task_timeout_secs" disables this message. [237247.981279] ll_ost03_027 D ffff8922cb341040 0 67684 2 0x00000080 [237247.988473] Call Trace: [237247.991025] [] ? fid_is_on_ost+0x3f4/0x420 [osd_ldiskfs] [237247.998109] [] schedule+0x29/0x70 [237248.003179] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237248.010159] [] ? wake_up_atomic_t+0x30/0x30 [237248.016098] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237248.023269] [] ? lprocfs_counter_add+0xf9/0x160 [obdclass] [237248.030529] [] start_this_handle+0x1a1/0x430 [jbd2] [237248.037185] [] ? osd_declare_qid+0x200/0x4a0 [osd_ldiskfs] [237248.044415] [] ? kmem_cache_alloc+0x1c2/0x1f0 [237248.050545] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237248.057273] [] ? osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237248.064524] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237248.072039] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237248.079108] [] ofd_precreate_objects+0xa57/0x1d80 [ofd] [237248.086096] [] ofd_create_hdl+0x474/0x20e0 [ofd] [237248.092515] [] ? lustre_pack_reply_v2+0x135/0x290 [ptlrpc] [237248.099797] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237248.106809] [] ? ptlrpc_nrs_req_get_nolock0+0xd1/0x170 [ptlrpc] [237248.114494] [] ? ktime_get_real_seconds+0xe/0x10 [libcfs] [237248.121697] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237248.129494] [] ? ptlrpc_wait_event+0xa5/0x360 [ptlrpc] [237248.136392] [] ? __wake_up+0x44/0x50 [237248.141753] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237248.148156] [] ? ptlrpc_register_service+0xf80/0xf80 [ptlrpc] [237248.155646] [] kthread+0xd1/0xe0 [237248.160645] [] ? insert_kthread_work+0x40/0x40 [237248.166833] [] ret_from_fork_nospec_begin+0xe/0x21 [237248.173395] [] ? insert_kthread_work+0x40/0x40 [237249.272305] LustreError: dumping log to /tmp/lustre-log.1576138269.67928 [237249.784318] LustreError: dumping log to /tmp/lustre-log.1576138270.68019 [237251.320348] LustreError: dumping log to /tmp/lustre-log.1576138271.68044 [237254.392416] LustreError: dumping log to /tmp/lustre-log.1576138274.67962 [237255.416430] LustreError: dumping log to /tmp/lustre-log.1576138275.68025 [237256.440464] LustreError: dumping log to /tmp/lustre-log.1576138276.67720 [237257.464471] LustreError: dumping log to /tmp/lustre-log.1576138277.68037 [237257.976484] LustreError: dumping log to /tmp/lustre-log.1576138278.68000 [237263.608604] LNet: Service thread pid 67732 was inactive for 200.10s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237263.621573] LNet: Skipped 20 previous similar messages [237263.626807] LustreError: dumping log to /tmp/lustre-log.1576138284.67732 [237267.704683] LustreError: dumping log to /tmp/lustre-log.1576138288.67971 [237272.312775] LustreError: dumping log to /tmp/lustre-log.1576138292.68046 [237273.848807] LustreError: dumping log to /tmp/lustre-log.1576138294.67728 [237274.872830] LustreError: dumping log to /tmp/lustre-log.1576138295.68018 [237275.896850] LustreError: dumping log to /tmp/lustre-log.1576138296.67959 [237278.968904] LustreError: dumping log to /tmp/lustre-log.1576138299.112549 [237284.089008] LustreError: dumping log to /tmp/lustre-log.1576138304.68001 [237285.625037] LustreError: dumping log to /tmp/lustre-log.1576138306.66124 [237286.649056] LustreError: dumping log to /tmp/lustre-log.1576138307.67880 [237290.233133] LustreError: dumping log to /tmp/lustre-log.1576138310.67770 [237299.449316] LNet: Service thread pid 67594 was inactive for 200.23s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237299.462289] LNet: Skipped 17 previous similar messages [237299.467526] LustreError: dumping log to /tmp/lustre-log.1576138319.67594 [237300.473336] LustreError: dumping log to /tmp/lustre-log.1576138320.67713 [237303.033394] LustreError: dumping log to /tmp/lustre-log.1576138323.67951 [237305.593441] LustreError: dumping log to /tmp/lustre-log.1576138326.67912 [237316.857665] LustreError: dumping log to /tmp/lustre-log.1576138337.67722 [237317.881688] LustreError: dumping log to /tmp/lustre-log.1576138338.67976 [237318.905707] LustreError: dumping log to /tmp/lustre-log.1576138339.67887 [237323.513803] LustreError: dumping log to /tmp/lustre-log.1576138343.66899 [237326.073852] LustreError: dumping log to /tmp/lustre-log.1576138346.87151 [237327.097865] LustreError: dumping log to /tmp/lustre-log.1576138347.67983 [237327.609877] LustreError: dumping log to /tmp/lustre-log.1576138348.67684 [237330.169932] LustreError: dumping log to /tmp/lustre-log.1576138350.67715 [237331.193955] LustreError: dumping log to /tmp/lustre-log.1576138351.67590 [237335.802048] LustreError: dumping log to /tmp/lustre-log.1576138356.67945 [237338.362100] LustreError: dumping log to /tmp/lustre-log.1576138358.68035 [237339.386117] LustreError: dumping log to /tmp/lustre-log.1576138359.68013 [237339.898126] LustreError: dumping log to /tmp/lustre-log.1576138360.67859 [237341.699169] LustreError: 67688:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576138062, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0056_UUID lock: ffff89018ca4ee40/0x7066c9c1908b2888 lrc: 3/0,1 mode: --/PW res: [0x1880000401:0x11129f:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67688 timeout: 0 lvb_type: 0 [237341.742968] LustreError: dumping log to /tmp/lustre-log.1576138362.67688 [237353.210396] LustreError: dumping log to /tmp/lustre-log.1576138373.67618 [237363.962609] LNet: Service thread pid 67892 was inactive for 236.60s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237363.975560] LNet: Skipped 21 previous similar messages [237363.980831] LustreError: dumping log to /tmp/lustre-log.1576138384.67892 [237364.986631] LustreError: dumping log to /tmp/lustre-log.1576138385.67904 [237378.279904] LustreError: 112549:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576138098, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0056_UUID lock: ffff8920d77c7740/0x7066c9c1908c0acd lrc: 3/0,1 mode: --/PW res: [0x1880000402:0x2efb73:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112549 timeout: 0 lvb_type: 0 [237378.323825] LustreError: 112549:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 1 previous similar message [237394.683221] LustreError: dumping log to /tmp/lustre-log.1576138415.67940 [237396.731277] LustreError: dumping log to /tmp/lustre-log.1576138417.67754 [237401.851365] LustreError: dumping log to /tmp/lustre-log.1576138422.67970 [237403.899424] LustreError: dumping log to /tmp/lustre-log.1576138424.67894 [237405.947447] LustreError: dumping log to /tmp/lustre-log.1576138426.67982 [237406.971470] LustreError: dumping log to /tmp/lustre-log.1576138427.66134 [237409.019513] LustreError: dumping log to /tmp/lustre-log.1576138429.68042 [237413.115598] LustreError: dumping log to /tmp/lustre-log.1576138433.68031 [237436.668067] LustreError: dumping log to /tmp/lustre-log.1576138457.67989 [237440.744152] LustreError: 67618:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576138161, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0056_UUID lock: ffff88f482c39f80/0x7066c9c1908d9f29 lrc: 3/0,1 mode: --/PW res: [0x1880000401:0x1112a1:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67618 timeout: 0 lvb_type: 0 [237468.412705] LustreError: dumping log to /tmp/lustre-log.1576138488.68023 [237477.628890] LustreError: dumping log to /tmp/lustre-log.1576138498.67963 [237487.869097] LustreError: dumping log to /tmp/lustre-log.1576138508.113352 [237496.061263] LNet: Service thread pid 112519 was inactive for 313.06s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237496.074297] LNet: Skipped 20 previous similar messages [237496.079557] LustreError: dumping log to /tmp/lustre-log.1576138516.112519 [237514.549421] Lustre: fir-OST0054: Connection restored to 687b1eea-b865-b791-9de5-a67096eac725 (at 10.8.23.26@o2ib6) [237514.559886] Lustre: Skipped 2 previous similar messages [237527.781354] Lustre: fir-OST0054: Connection restored to ca09bd61-a4b3-111c-b997-9c7823236764 (at 10.8.22.17@o2ib6) [237527.791798] Lustre: Skipped 4 previous similar messages [237530.102940] Lustre: fir-OST0054: Connection restored to 00850750-7463-78da-94ee-623be2781c44 (at 10.8.22.22@o2ib6) [237530.113398] Lustre: Skipped 4 previous similar messages [237541.118188] LNet: Service thread pid 67986 was inactive for 362.65s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [237541.135213] Pid: 67986, comm: ll_ost_io01_039 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237541.145992] Call Trace: [237541.148566] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237541.155561] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237541.162728] [] start_this_handle+0x1a1/0x430 [jbd2] [237541.169368] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237541.176102] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237541.183617] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237541.190700] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237541.197433] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237541.203561] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237541.210260] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237541.217289] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237541.225106] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237541.231519] [] kthread+0xd1/0xe0 [237541.236535] [] ret_from_fork_nospec_begin+0xe/0x21 [237541.243098] [] 0xffffffffffffffff [237541.248197] LustreError: dumping log to /tmp/lustre-log.1576138561.67986 [237542.142185] Pid: 67591, comm: ll_ost_io02_003 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237542.152975] Call Trace: [237542.155545] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237542.162566] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237542.169736] [] start_this_handle+0x1a1/0x430 [jbd2] [237542.176415] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237542.183151] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237542.190720] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237542.197808] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237542.204580] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237542.210728] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237542.217443] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237542.224476] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237542.232306] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237542.238722] [] kthread+0xd1/0xe0 [237542.243738] [] ret_from_fork_nospec_begin+0xe/0x21 [237542.250318] [] 0xffffffffffffffff [237542.255444] LustreError: dumping log to /tmp/lustre-log.1576138562.67591 [237546.903790] Lustre: fir-OST0054: Connection restored to a507eb44-8ff1-13e2-fab8-30d1823663f8 (at 10.8.22.24@o2ib6) [237546.914227] Lustre: Skipped 4 previous similar messages [237549.311321] LNet: Service thread pid 67746 was inactive for 362.01s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [237549.328363] LNet: Skipped 1 previous similar message [237549.333426] Pid: 67746, comm: ll_ost02_036 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237549.343970] Call Trace: [237549.346539] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237549.353560] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237549.360748] [] start_this_handle+0x1a1/0x430 [jbd2] [237549.367413] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237549.374169] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237549.381704] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237549.388816] [] ofd_trans_start+0x75/0xf0 [ofd] [237549.395050] [] ofd_attr_set+0x464/0xb60 [ofd] [237549.401208] [] ofd_setattr_hdl+0x31d/0x8e0 [ofd] [237549.407622] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237549.414743] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237549.422570] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237549.429009] [] kthread+0xd1/0xe0 [237549.434011] [] ret_from_fork_nospec_begin+0xe/0x21 [237549.440573] [] 0xffffffffffffffff [237549.445687] LustreError: dumping log to /tmp/lustre-log.1576138569.67746 [237553.406410] LNet: Service thread pid 67937 was inactive for 362.21s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [237553.423451] Pid: 67937, comm: ll_ost_io03_025 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237553.434231] Call Trace: [237553.436808] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237553.443803] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237553.450988] [] start_this_handle+0x1a1/0x430 [jbd2] [237553.457639] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237553.464381] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237553.471910] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237553.479017] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237553.485759] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237553.491940] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237553.498659] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237553.505703] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237553.513505] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237553.519944] [] kthread+0xd1/0xe0 [237553.524967] [] ret_from_fork_nospec_begin+0xe/0x21 [237553.531529] [] 0xffffffffffffffff [237553.536641] LustreError: dumping log to /tmp/lustre-log.1576138573.67937 [237555.454451] Pid: 67964, comm: ll_ost_io02_043 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237555.465236] Call Trace: [237555.467815] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237555.474837] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237555.482026] [] start_this_handle+0x1a1/0x430 [jbd2] [237555.488692] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237555.495444] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237555.502984] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237555.510090] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237555.516869] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237555.523014] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237555.529756] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237555.536842] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237555.544687] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237555.551136] [] kthread+0xd1/0xe0 [237555.556185] [] ret_from_fork_nospec_begin+0xe/0x21 [237555.562777] [] 0xffffffffffffffff [237555.567895] LustreError: dumping log to /tmp/lustre-log.1576138576.67964 [237557.502496] LustreError: dumping log to /tmp/lustre-log.1576138577.67973 [237558.526510] LustreError: dumping log to /tmp/lustre-log.1576138578.67960 [237561.598572] LustreError: dumping log to /tmp/lustre-log.1576138582.68049 [237563.646619] LustreError: dumping log to /tmp/lustre-log.1576138584.113613 [237565.308284] Lustre: fir-OST0056: Export ffff89036eda0800 already connecting from 10.8.23.26@o2ib6 [237578.501849] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [237580.894384] Lustre: fir-OST0056: Export ffff88e42919e800 already connecting from 10.8.22.22@o2ib6 [237590.271148] LustreError: dumping log to /tmp/lustre-log.1576138610.68026 [237597.632231] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [237615.485143] Lustre: fir-OST0056: Export ffff89036eda0800 already connecting from 10.8.23.26@o2ib6 [237618.943722] LustreError: dumping log to /tmp/lustre-log.1576138639.68050 [237620.991766] LustreError: dumping log to /tmp/lustre-log.1576138641.67955 [237621.801792] Lustre: 67968:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff890f1b8c4850 x1652591481752896/t0(0) o4->0f4b0f7a-80c1-4@10.9.110.62@o2ib4:647/0 lens 1352/664 e 24 to 0 dl 1576138647 ref 2 fl Interpret:/0/0 rc 0/0 [237622.509804] Lustre: 68038:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88e4f04ec050 x1652591481755520/t0(0) o4->0f4b0f7a-80c1-4@10.9.110.62@o2ib4:647/0 lens 1352/664 e 24 to 0 dl 1576138647 ref 2 fl Interpret:/0/0 rc 0/0 [237622.536983] Lustre: 68038:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3 previous similar messages [237623.807832] Lustre: 67953:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff890712200050 x1648846345542000/t0(0) o4->75b6516e-d912-63bd-698a-8f68fc05bdf0@10.9.110.15@o2ib4:649/0 lens 488/448 e 24 to 0 dl 1576138649 ref 2 fl Interpret:/0/0 rc 0/0 [237623.836762] Lustre: 67953:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1 previous similar message [237625.087842] LustreError: dumping log to /tmp/lustre-log.1576138645.67994 [237627.203908] Lustre: 67877:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff89224f575050 x1649530969658688/t0(0) o4->1c192c26-6a2d-8fff-8f45-c6fac242e547@10.9.104.15@o2ib4:652/0 lens 488/448 e 24 to 0 dl 1576138652 ref 2 fl Interpret:/0/0 rc 0/0 [237627.232835] Lustre: 67877:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 4 previous similar messages [237628.030913] Lustre: fir-OST0056: Client 0f4b0f7a-80c1-4 (at 10.9.110.62@o2ib4) reconnecting [237628.039357] Lustre: Skipped 5 previous similar messages [237628.044707] Lustre: fir-OST0056: Connection restored to d849fafe-3a33-7fd6-08c1-09a87a8abd8b (at 10.9.110.62@o2ib4) [237628.678722] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [237629.183925] LustreError: dumping log to /tmp/lustre-log.1576138649.67768 [237631.231975] LustreError: dumping log to /tmp/lustre-log.1576138651.67995 [237633.050024] Lustre: 68022:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8902fd4f3850 x1648532634593856/t0(0) o4->a5082367-d733-7058-3cc5-0eedec6c0c1c@10.8.30.16@o2ib6:658/0 lens 2488/448 e 24 to 0 dl 1576138658 ref 2 fl Interpret:/0/0 rc 0/0 [237633.078952] Lustre: 68022:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 6 previous similar messages [237633.280013] LustreError: dumping log to /tmp/lustre-log.1576138653.67952 [237634.304028] LustreError: dumping log to /tmp/lustre-log.1576138654.68005 [237635.328055] LustreError: dumping log to /tmp/lustre-log.1576138655.67897 [237637.376092] LustreError: dumping log to /tmp/lustre-log.1576138657.68039 [237638.085392] Lustre: fir-OST0056: Connection restored to (at 10.9.108.56@o2ib4) [237638.092791] Lustre: Skipped 9 previous similar messages [237639.424133] LustreError: dumping log to /tmp/lustre-log.1576138659.68006 [237642.232218] Lustre: 66135:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff891a4f63b050 x1649046890263936/t0(0) o10->7126efc2-9676-1db9-94d0-ae09c1520697@10.9.101.26@o2ib4:667/0 lens 440/432 e 17 to 0 dl 1576138667 ref 2 fl Interpret:/0/0 rc 0/0 [237642.261241] Lustre: 66135:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 10 previous similar messages [237642.629898] Lustre: fir-OST0056: deleting orphan objects from 0x1880000401:1118894 to 0x1880000401:1118913 [237645.568265] LustreError: dumping log to /tmp/lustre-log.1576138666.112531 [237647.809088] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [237647.818055] Lustre: Skipped 1 previous similar message [237654.608801] Lustre: fir-OST0056: Connection restored to 8d232f07-b6ab-bc70-4dd8-277e82f65db5 (at 10.9.107.58@o2ib4) [237654.619327] Lustre: Skipped 9 previous similar messages [237659.904550] LustreError: dumping log to /tmp/lustre-log.1576138680.67598 [237661.888598] Lustre: 67966:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8903a9aa0850 x1649656820965840/t0(0) o4->d5b9405e-1c60-945f-2d9d-6a877d61380f@10.8.30.30@o2ib6:687/0 lens 1720/448 e 12 to 0 dl 1576138687 ref 2 fl Interpret:/0/0 rc 0/0 [237661.917553] Lustre: 67966:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 19 previous similar messages [237661.952589] LustreError: dumping log to /tmp/lustre-log.1576138682.68014 [237681.247793] Lustre: fir-OST0056: Export ffff88e42919e800 already connecting from 10.8.22.22@o2ib6 [237681.256757] Lustre: Skipped 2 previous similar messages [237687.211349] Lustre: fir-OST0056: Connection restored to ec8d663e-70c3-0c7c-9511-dfaaba3f32c1 (at 10.9.104.45@o2ib4) [237687.221878] Lustre: Skipped 7 previous similar messages [237693.697223] LustreError: dumping log to /tmp/lustre-log.1576138714.67884 [237694.529253] Lustre: 26939:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8902569da050 x1649291658560656/t0(0) o4->89038d66-847b-1ff4-ff67-a551d70b6de8@10.9.110.70@o2ib4:719/0 lens 488/448 e 8 to 0 dl 1576138719 ref 2 fl Interpret:/0/0 rc 0/0 [237694.558093] Lustre: 26939:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 11 previous similar messages [237696.573443] Lustre: fir-OST0056: Client 72ec26e6-8490-9625-4bfc-aa584f79f189 (at 10.9.102.25@o2ib4) reconnecting [237696.583710] Lustre: Skipped 29 previous similar messages [237697.793305] LustreError: dumping log to /tmp/lustre-log.1576138718.67730 [237700.865378] LustreError: dumping log to /tmp/lustre-log.1576138721.67820 [237702.913411] LustreError: dumping log to /tmp/lustre-log.1576138723.67907 [237707.009489] LustreError: dumping log to /tmp/lustre-log.1576138727.68003 [237711.105570] LustreError: dumping log to /tmp/lustre-log.1576138731.67906 [237740.560319] Lustre: fir-OST0056: haven't heard from client 7e5bcac9-70c5-4 (at ) in 227 seconds. I think it's dead, and I am evicting it. exp ffff89036eda0800, cur 1576138761 expire 1576138611 last 1576138534 [237740.579755] Lustre: Skipped 5 previous similar messages [237748.163230] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [237748.172193] Lustre: Skipped 4 previous similar messages [237751.479667] Lustre: fir-OST0056: Connection restored to 860089bf-2de2-f0b4-c239-a266e1c756b4 (at 10.9.102.54@o2ib4) [237751.490187] Lustre: Skipped 19 previous similar messages [237760.386572] Lustre: 27017:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff89079a151850 x1648858224189808/t0(0) o4->8e4fe161-7440-1bc3-60cf-ef16452a7501@10.9.105.43@o2ib4:30/0 lens 6576/448 e 4 to 0 dl 1576138785 ref 2 fl Interpret:/0/0 rc 0/0 [237760.415403] Lustre: 27017:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 37 previous similar messages [237775.706527] Lustre: fir-OST0056: deleting orphan objects from 0x0:27479877 to 0x0:27479905 [237776.642884] LNet: Service thread pid 67991 was inactive for 512.09s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [237776.655854] LNet: Skipped 42 previous similar messages [237776.661088] LustreError: dumping log to /tmp/lustre-log.1576138797.67991 [237786.883094] LustreError: dumping log to /tmp/lustre-log.1576138807.67753 [237788.931149] LustreError: dumping log to /tmp/lustre-log.1576138809.67822 [237809.411550] LustreError: dumping log to /tmp/lustre-log.1576138829.68008 [237811.459584] LustreError: dumping log to /tmp/lustre-log.1576138831.67931 [237826.287005] Lustre: fir-OST0056: Client 7520ece1-a22b-161c-9a9c-7f1c99e6d5c6 (at 10.9.108.37@o2ib4) reconnecting [237826.297268] Lustre: Skipped 29 previous similar messages [237860.612574] LNet: Service thread pid 68004 was inactive for 563.65s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [237860.629597] LNet: Skipped 1 previous similar message [237860.634660] Pid: 68004, comm: ll_ost_io00_034 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237860.645456] Call Trace: [237860.648027] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237860.655041] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237860.662208] [] start_this_handle+0x1a1/0x430 [jbd2] [237860.668886] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237860.675626] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237860.683185] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237860.690278] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237860.697014] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237860.703141] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237860.709830] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237860.716876] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237860.724678] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237860.731107] [] kthread+0xd1/0xe0 [237860.736109] [] ret_from_fork_nospec_begin+0xe/0x21 [237860.742703] [] 0xffffffffffffffff [237860.747812] LustreError: dumping log to /tmp/lustre-log.1576138881.68004 [237870.852778] Pid: 67777, comm: ll_ost01_049 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237870.863330] Call Trace: [237870.865907] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237870.872901] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237870.880114] [] start_this_handle+0x1a1/0x430 [jbd2] [237870.886779] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237870.893527] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237870.901052] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237870.908193] [] ofd_trans_start+0x75/0xf0 [ofd] [237870.914407] [] ofd_destroy+0x5d0/0x960 [ofd] [237870.920445] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [237870.927094] [] ofd_destroy_hdl+0x267/0x970 [ofd] [237870.933480] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237870.940528] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237870.948336] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237870.954762] [] kthread+0xd1/0xe0 [237870.959769] [] ret_from_fork_nospec_begin+0xe/0x21 [237870.966329] [] 0xffffffffffffffff [237870.971437] LustreError: dumping log to /tmp/lustre-log.1576138891.67777 [237870.978842] Pid: 67725, comm: ll_ost01_041 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237870.989398] Call Trace: [237870.991974] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237870.998970] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237871.006135] [] start_this_handle+0x1a1/0x430 [jbd2] [237871.012783] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237871.019533] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237871.027059] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237871.034147] [] ofd_trans_start+0x75/0xf0 [ofd] [237871.040378] [] ofd_destroy+0x5d0/0x960 [ofd] [237871.046436] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [237871.053092] [] ofd_destroy_hdl+0x267/0x970 [ofd] [237871.059482] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237871.066575] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237871.074388] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237871.080810] [] kthread+0xd1/0xe0 [237871.085811] [] ret_from_fork_nospec_begin+0xe/0x21 [237871.092398] [] 0xffffffffffffffff [237879.563699] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [237879.572669] Lustre: Skipped 8 previous similar messages [237891.845210] Lustre: 27074:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f2fbd54050 x1650958282745904/t0(0) o4->0c302cf4-1147-d945-dfa2-e9bc796b3175@10.9.101.32@o2ib4:162/0 lens 7904/448 e 3 to 0 dl 1576138917 ref 2 fl Interpret:/0/0 rc 0/0 [237891.874134] Lustre: 27074:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 52 previous similar messages [237907.467083] Lustre: fir-OST0056: Connection restored to b4c9913c-f59e-b8ac-70a9-c2d8d6c39257 (at 10.9.101.34@o2ib4) [237907.477608] Lustre: Skipped 23 previous similar messages [237917.957722] LNet: Service thread pid 68041 was inactive for 612.09s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [237917.974748] LNet: Skipped 2 previous similar messages [237917.979913] Pid: 68041, comm: ll_ost_io00_053 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237917.990726] Call Trace: [237917.993304] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237918.000304] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237918.007520] [] start_this_handle+0x1a1/0x430 [jbd2] [237918.014166] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237918.020918] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237918.028459] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237918.035563] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [237918.042325] [] ofd_commitrw+0x48c/0x9e0 [ofd] [237918.048452] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [237918.055153] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237918.062197] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237918.070008] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237918.076475] [] kthread+0xd1/0xe0 [237918.081484] [] ret_from_fork_nospec_begin+0xe/0x21 [237918.088079] [] 0xffffffffffffffff [237918.093185] LustreError: dumping log to /tmp/lustre-log.1576138938.68041 [237920.005760] Pid: 67941, comm: ll_ost_io01_029 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [237920.016543] Call Trace: [237920.019118] [] wait_transaction_locked+0x85/0xd0 [jbd2] [237920.026145] [] add_transaction_credits+0x268/0x2f0 [jbd2] [237920.033333] [] start_this_handle+0x1a1/0x430 [jbd2] [237920.040001] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [237920.046772] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [237920.054298] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [237920.061400] [] ofd_trans_start+0x75/0xf0 [ofd] [237920.067637] [] ofd_object_punch+0x73d/0xd30 [ofd] [237920.074111] [] ofd_punch_hdl+0x493/0xa30 [ofd] [237920.080340] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [237920.087405] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [237920.095229] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [237920.101681] [] kthread+0xd1/0xe0 [237920.106680] [] ret_from_fork_nospec_begin+0xe/0x21 [237920.113267] [] 0xffffffffffffffff [237920.118406] LustreError: dumping log to /tmp/lustre-log.1576138940.67941 [237928.197929] LustreError: dumping log to /tmp/lustre-log.1576138948.67750 [238008.071530] LustreError: dumping log to /tmp/lustre-log.1576139028.112488 [238011.731862] Lustre: fir-OST0056: deleting orphan objects from 0x1880000400:11777327 to 0x1880000400:11777377 [238014.215651] LustreError: dumping log to /tmp/lustre-log.1576139034.112522 [238069.512760] LustreError: dumping log to /tmp/lustre-log.1576139089.67896 [238075.656890] LustreError: dumping log to /tmp/lustre-log.1576139096.67930 [238087.489627] Lustre: fir-OST0056: Client dffc1cc0-26ab-9b78-f3a0-8d9b8d410b62 (at 10.9.108.46@o2ib4) reconnecting [238087.499887] Lustre: Skipped 17 previous similar messages [238142.343776] Lustre: fir-OST0056: Export ffff891d99820800 already connecting from 10.8.23.26@o2ib6 [238142.352760] Lustre: Skipped 26 previous similar messages [238153.610462] Lustre: 27127:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f2acc5c050 x1650958603467184/t0(0) o4->717fa73e-8071-a76f-931e-8957a8ca32aa@10.9.101.41@o2ib4:424/0 lens 2056/448 e 2 to 0 dl 1576139179 ref 2 fl Interpret:/0/0 rc 0/0 [238153.639401] Lustre: 27127:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 21 previous similar messages [238155.530496] LustreError: dumping log to /tmp/lustre-log.1576139175.67974 [238159.626567] LustreError: dumping log to /tmp/lustre-log.1576139180.67949 [238173.612853] Lustre: fir-OST0056: Connection restored to bb7d080c-8ae8-f7ed-5d33-d34ca54d93de (at 10.9.108.19@o2ib4) [238173.623374] Lustre: Skipped 16 previous similar messages [238178.058942] LNet: Service thread pid 67807 was inactive for 763.33s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [238178.075968] LNet: Skipped 1 previous similar message [238178.081030] Pid: 67807, comm: ll_ost01_056 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238178.091578] Call Trace: [238178.094150] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238178.101149] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238178.108346] [] start_this_handle+0x1a1/0x430 [jbd2] [238178.114996] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238178.121761] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238178.129290] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238178.136394] [] ofd_trans_start+0x75/0xf0 [ofd] [238178.142611] [] ofd_destroy+0x5d0/0x960 [ofd] [238178.148665] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [238178.155314] [] ofd_destroy_hdl+0x267/0x970 [ofd] [238178.161718] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238178.168767] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238178.176590] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238178.183006] [] kthread+0xd1/0xe0 [238178.188023] [] ret_from_fork_nospec_begin+0xe/0x21 [238178.194586] [] 0xffffffffffffffff [238178.199699] LustreError: dumping log to /tmp/lustre-log.1576139198.67807 [238221.067805] Pid: 87152, comm: ll_ost_io01_067 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238221.078590] Call Trace: [238221.081166] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238221.088162] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238221.095345] [] start_this_handle+0x1a1/0x430 [jbd2] [238221.101992] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238221.108728] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238221.116254] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238221.123365] [] ofd_trans_start+0x75/0xf0 [ofd] [238221.129581] [] ofd_commitrw_write+0xa31/0x1d40 [ofd] [238221.136316] [] ofd_commitrw+0x48c/0x9e0 [ofd] [238221.142458] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [238221.149174] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238221.156206] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238221.164013] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238221.170453] [] kthread+0xd1/0xe0 [238221.175456] [] ret_from_fork_nospec_begin+0xe/0x21 [238221.182057] [] 0xffffffffffffffff [238221.187166] LustreError: dumping log to /tmp/lustre-log.1576139241.87152 [238223.115840] Pid: 67714, comm: ll_ost_io03_004 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238223.126625] Call Trace: [238223.129215] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238223.136213] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238223.143412] [] start_this_handle+0x1a1/0x430 [jbd2] [238223.150064] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238223.156814] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238223.164354] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238223.171463] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [238223.178227] [] ofd_commitrw+0x48c/0x9e0 [ofd] [238223.184378] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [238223.191099] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238223.198145] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238223.205958] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238223.212387] [] kthread+0xd1/0xe0 [238223.217389] [] ret_from_fork_nospec_begin+0xe/0x21 [238223.224000] [] 0xffffffffffffffff [238223.229100] LustreError: dumping log to /tmp/lustre-log.1576139243.67714 [238225.163886] Pid: 67657, comm: ll_ost02_023 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238225.174436] Call Trace: [238225.177012] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238225.184019] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238225.191282] [] start_this_handle+0x1a1/0x430 [jbd2] [238225.197939] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238225.204725] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238225.212282] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238225.219407] [] ofd_trans_start+0x75/0xf0 [ofd] [238225.225636] [] ofd_destroy+0x5d0/0x960 [ofd] [238225.231696] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [238225.238360] [] ofd_destroy_hdl+0x267/0x970 [ofd] [238225.244768] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238225.251844] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238225.259685] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238225.266158] [] kthread+0xd1/0xe0 [238225.271178] [] ret_from_fork_nospec_begin+0xe/0x21 [238225.277742] [] 0xffffffffffffffff [238225.282870] LustreError: dumping log to /tmp/lustre-log.1576139245.67657 [238372.622843] LNet: Service thread pid 67943 was inactive for 912.97s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [238372.639862] LNet: Skipped 3 previous similar messages [238372.645009] Pid: 67943, comm: ll_ost_io01_030 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238372.655808] Call Trace: [238372.658383] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238372.665382] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238372.672563] [] start_this_handle+0x1a1/0x430 [jbd2] [238372.679214] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238372.685964] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238372.693489] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238372.700596] [] ofd_trans_start+0x75/0xf0 [ofd] [238372.706810] [] ofd_object_punch+0x73d/0xd30 [ofd] [238372.713299] [] ofd_punch_hdl+0x493/0xa30 [ofd] [238372.719514] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238372.726575] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238372.734379] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238372.740806] [] kthread+0xd1/0xe0 [238372.745809] [] ret_from_fork_nospec_begin+0xe/0x21 [238372.752386] [] 0xffffffffffffffff [238372.757485] LustreError: dumping log to /tmp/lustre-log.1576139393.67943 [238450.448399] LNet: Service thread pid 67762 was inactive for 964.37s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [238450.461346] LNet: Skipped 13 previous similar messages [238450.466583] LustreError: dumping log to /tmp/lustre-log.1576139470.67762 [238458.640557] LustreError: dumping log to /tmp/lustre-log.1576139479.67745 [238520.081793] LNet: Service thread pid 66128 was inactive for 1015.91s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [238520.098899] Pid: 66128, comm: ll_ost_io01_001 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238520.109680] Call Trace: [238520.112254] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238520.119251] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238520.126436] [] start_this_handle+0x1a1/0x430 [jbd2] [238520.133085] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238520.139835] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238520.147359] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238520.154463] [] ofd_trans_start+0x75/0xf0 [ofd] [238520.160680] [] ofd_commitrw_write+0xa31/0x1d40 [ofd] [238520.167431] [] ofd_commitrw+0x48c/0x9e0 [ofd] [238520.173560] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [238520.180275] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238520.187319] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238520.195137] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238520.201552] [] kthread+0xd1/0xe0 [238520.206553] [] ret_from_fork_nospec_begin+0xe/0x21 [238520.213122] [] 0xffffffffffffffff [238520.218222] LustreError: dumping log to /tmp/lustre-log.1576139540.66128 [238528.273957] Pid: 112533, comm: ll_ost02_080 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238528.284564] Call Trace: [238528.287134] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238528.294131] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238528.301329] [] start_this_handle+0x1a1/0x430 [jbd2] [238528.307979] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238528.314729] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238528.322255] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238528.329349] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [238528.336660] [] tgt_client_new+0x41b/0x610 [ptlrpc] [238528.343270] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [238528.349658] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [238528.356953] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [238528.363985] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238528.371814] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238528.378225] [] kthread+0xd1/0xe0 [238528.383241] [] ret_from_fork_nospec_begin+0xe/0x21 [238528.389805] [] 0xffffffffffffffff [238528.394917] LustreError: dumping log to /tmp/lustre-log.1576139548.112533 [238540.562197] Pid: 67868, comm: ll_ost01_068 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238540.572718] Call Trace: [238540.575287] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238540.582297] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238540.589492] [] start_this_handle+0x1a1/0x430 [jbd2] [238540.596161] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238540.602912] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238540.610438] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238540.617541] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [238540.624850] [] tgt_client_new+0x41b/0x610 [ptlrpc] [238540.631461] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [238540.637849] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [238540.645143] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [238540.652178] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238540.660000] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238540.666434] [] kthread+0xd1/0xe0 [238540.671448] [] ret_from_fork_nospec_begin+0xe/0x21 [238540.678013] [] 0xffffffffffffffff [238540.683124] LustreError: dumping log to /tmp/lustre-log.1576139561.67868 [238544.658296] Pid: 67671, comm: ll_ost00_024 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238544.668816] Call Trace: [238544.671388] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238544.678391] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238544.685574] [] start_this_handle+0x1a1/0x430 [jbd2] [238544.692225] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238544.698987] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238544.706532] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238544.713636] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [238544.720941] [] tgt_client_new+0x41b/0x610 [ptlrpc] [238544.727567] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [238544.733956] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [238544.741259] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [238544.748283] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238544.756097] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238544.762514] [] kthread+0xd1/0xe0 [238544.767543] [] ret_from_fork_nospec_begin+0xe/0x21 [238544.774117] [] 0xffffffffffffffff [238544.779231] LustreError: dumping log to /tmp/lustre-log.1576139565.67671 [238565.138725] Pid: 26830, comm: ll_ost_io03_048 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238565.149503] Call Trace: [238565.152072] [] wait_transaction_locked+0x85/0xd0 [jbd2] [238565.159070] [] add_transaction_credits+0x268/0x2f0 [jbd2] [238565.166256] [] start_this_handle+0x1a1/0x430 [jbd2] [238565.172919] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [238565.179669] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [238565.187195] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [238565.194299] [] ofd_trans_start+0x75/0xf0 [ofd] [238565.200515] [] ofd_object_punch+0x73d/0xd30 [ofd] [238565.207004] [] ofd_punch_hdl+0x493/0xa30 [ofd] [238565.213221] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238565.220281] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238565.228085] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238565.234513] [] kthread+0xd1/0xe0 [238565.239520] [] ret_from_fork_nospec_begin+0xe/0x21 [238565.246118] [] 0xffffffffffffffff [238565.251220] LustreError: dumping log to /tmp/lustre-log.1576139585.26830 [238610.195609] LustreError: dumping log to /tmp/lustre-log.1576139630.112514 [238610.850728] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [238610.860907] Lustre: Skipped 92 previous similar messages [238666.472729] Lustre: 112543:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88f2af871850 x1652161335594432/t0(0) o19->ae1d0080-04fa-5436-e145-ffdf0db9990d@10.0.10.3@o2ib7:181/0 lens 336/336 e 0 to 0 dl 1576139691 ref 2 fl Interpret:/0/0 rc 0/0 [238666.501801] Lustre: 112543:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 192 previous similar messages [238675.732910] LustreError: dumping log to /tmp/lustre-log.1576139696.68007 [238679.828995] LustreError: dumping log to /tmp/lustre-log.1576139700.67771 [238682.395663] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [238682.404631] Lustre: Skipped 52 previous similar messages [238683.925084] LustreError: dumping log to /tmp/lustre-log.1576139704.67958 [238688.549482] Lustre: fir-OST0056: Connection restored to dffc1cc0-26ab-9b78-f3a0-8d9b8d410b62 (at 10.9.108.46@o2ib4) [238688.560020] Lustre: Skipped 91 previous similar messages [238745.366307] LustreError: dumping log to /tmp/lustre-log.1576139765.67936 [238749.462393] LustreError: dumping log to /tmp/lustre-log.1576139769.67813 [238753.558481] LustreError: dumping log to /tmp/lustre-log.1576139773.68021 [238757.654552] LustreError: dumping log to /tmp/lustre-log.1576139778.67946 [238761.750633] LustreError: dumping log to /tmp/lustre-log.1576139782.67662 [238765.846715] LustreError: dumping log to /tmp/lustre-log.1576139786.112537 [238806.807531] LustreError: dumping log to /tmp/lustre-log.1576139827.67957 [238831.384032] LNet: Service thread pid 68010 was inactive for 1201.76s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [238831.401160] LNet: Skipped 4 previous similar messages [238831.406327] Pid: 68010, comm: ll_ost_io00_036 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238831.417115] Call Trace: [238831.419670] [] call_rwsem_down_read_failed+0x18/0x30 [238831.426409] [] osd_read_lock+0x5c/0xe0 [osd_ldiskfs] [238831.433181] [] ofd_preprw_write.isra.31+0xd3/0xea0 [ofd] [238831.440274] [] ofd_preprw+0x422/0x11b0 [ofd] [238831.446328] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [238831.452947] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238831.459990] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238831.467792] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238831.474222] [] kthread+0xd1/0xe0 [238831.479224] [] ret_from_fork_nospec_begin+0xe/0x21 [238831.485800] [] 0xffffffffffffffff [238831.490910] LustreError: dumping log to /tmp/lustre-log.1576139851.68010 [238831.498312] Pid: 67948, comm: ll_ost_io03_026 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238831.509122] Call Trace: [238831.511670] [] __lock_page+0x74/0x90 [238831.517017] [] __find_lock_page+0x54/0x70 [238831.522805] [] find_or_create_page+0x34/0xa0 [238831.528841] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [238831.535684] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [238831.542856] [] ofd_preprw+0x422/0x11b0 [ofd] [238831.548910] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [238831.555499] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238831.562550] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238831.570346] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238831.576776] [] kthread+0xd1/0xe0 [238831.581769] [] ret_from_fork_nospec_begin+0xe/0x21 [238831.588337] [] 0xffffffffffffffff [238831.593430] Pid: 68051, comm: ll_ost_io01_063 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238831.604226] Call Trace: [238831.606770] [] __lock_page+0x74/0x90 [238831.612113] [] __find_lock_page+0x54/0x70 [238831.617900] [] find_or_create_page+0x34/0xa0 [238831.623950] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [238831.630785] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [238831.637957] [] ofd_preprw+0x422/0x11b0 [ofd] [238831.644016] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [238831.650602] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238831.657639] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238831.665438] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238831.671867] [] kthread+0xd1/0xe0 [238831.676863] [] ret_from_fork_nospec_begin+0xe/0x21 [238831.683430] [] 0xffffffffffffffff [238831.688529] Pid: 26875, comm: ll_ost_io03_050 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238831.699317] Call Trace: [238831.701860] [] call_rwsem_down_read_failed+0x18/0x30 [238831.708584] [] osd_read_lock+0x5c/0xe0 [osd_ldiskfs] [238831.715335] [] ofd_preprw_write.isra.31+0xd3/0xea0 [ofd] [238831.722415] [] ofd_preprw+0x422/0x11b0 [ofd] [238831.728474] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [238831.735060] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238831.742095] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238831.749898] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238831.756328] [] kthread+0xd1/0xe0 [238831.761321] [] ret_from_fork_nospec_begin+0xe/0x21 [238831.767887] [] 0xffffffffffffffff [238831.772981] Pid: 67980, comm: ll_ost_io00_027 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [238831.783777] Call Trace: [238831.786323] [] __lock_page+0x74/0x90 [238831.791664] [] __find_lock_page+0x54/0x70 [238831.797452] [] find_or_create_page+0x34/0xa0 [238831.803485] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [238831.810322] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [238831.817507] [] ofd_preprw+0x422/0x11b0 [ofd] [238831.823563] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [238831.830153] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [238831.837187] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [238831.844990] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [238831.851418] [] kthread+0xd1/0xe0 [238831.856414] [] ret_from_fork_nospec_begin+0xe/0x21 [238831.862967] [] 0xffffffffffffffff [238835.480122] LustreError: dumping log to /tmp/lustre-log.1576139855.26895 [238839.576203] LustreError: dumping log to /tmp/lustre-log.1576139859.26898 [238843.672276] LustreError: dumping log to /tmp/lustre-log.1576139864.67947 [238847.768367] LustreError: dumping log to /tmp/lustre-log.1576139868.26831 [238851.864439] LustreError: dumping log to /tmp/lustre-log.1576139872.68047 [238855.960522] LustreError: dumping log to /tmp/lustre-log.1576139876.67817 [238860.056602] LustreError: dumping log to /tmp/lustre-log.1576139880.67942 [238872.344849] LustreError: dumping log to /tmp/lustre-log.1576139892.26937 [238876.440930] LustreError: dumping log to /tmp/lustre-log.1576139896.67966 [238880.537017] LustreError: dumping log to /tmp/lustre-log.1576139900.26919 [238884.633101] LustreError: dumping log to /tmp/lustre-log.1576139905.26943 [238888.729178] LustreError: dumping log to /tmp/lustre-log.1576139909.26936 [238896.921342] LustreError: dumping log to /tmp/lustre-log.1576139917.26948 [238901.017427] LustreError: dumping log to /tmp/lustre-log.1576139921.67987 [238905.113505] LustreError: dumping log to /tmp/lustre-log.1576139925.26969 [238909.209608] LustreError: dumping log to /tmp/lustre-log.1576139929.26907 [238913.305672] LustreError: dumping log to /tmp/lustre-log.1576139933.113359 [238917.401754] LustreError: dumping log to /tmp/lustre-log.1576139937.26971 [238921.497840] LustreError: dumping log to /tmp/lustre-log.1576139941.26973 [238925.593923] LustreError: dumping log to /tmp/lustre-log.1576139946.67758 [238929.689999] LustreError: dumping log to /tmp/lustre-log.1576139950.67755 [238933.234077] LustreError: 67644:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576139653, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0054_UUID lock: ffff88e87d178480/0x7066c9c190adca24 lrc: 3/0,1 mode: --/PW res: [0x1800000402:0x110c27:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67644 timeout: 0 lvb_type: 0 [238937.882165] LustreError: dumping log to /tmp/lustre-log.1576139958.26985 [238941.978246] LustreError: dumping log to /tmp/lustre-log.1576139962.67881 [238946.074325] LustreError: dumping log to /tmp/lustre-log.1576139966.26952 [238950.170405] LustreError: dumping log to /tmp/lustre-log.1576139970.26989 [238954.266489] LustreError: dumping log to /tmp/lustre-log.1576139974.66132 [238970.650816] LustreError: dumping log to /tmp/lustre-log.1576139991.67834 [238978.842981] LustreError: dumping log to /tmp/lustre-log.1576139999.67698 [238982.939064] LustreError: dumping log to /tmp/lustre-log.1576140003.26966 [238987.035146] LustreError: dumping log to /tmp/lustre-log.1576140007.27043 [238991.131230] LustreError: dumping log to /tmp/lustre-log.1576140011.26987 [238995.227321] LustreError: dumping log to /tmp/lustre-log.1576140015.26997 [239011.611649] LustreError: dumping log to /tmp/lustre-log.1576140032.27049 [239015.707840] LustreError: dumping log to /tmp/lustre-log.1576140036.26916 [239019.803804] LustreError: dumping log to /tmp/lustre-log.1576140040.27044 [239023.899892] LustreError: dumping log to /tmp/lustre-log.1576140044.26918 [239027.995968] LustreError: dumping log to /tmp/lustre-log.1576140048.26944 [239040.284214] LustreError: dumping log to /tmp/lustre-log.1576140060.27021 [239048.476388] LustreError: dumping log to /tmp/lustre-log.1576140068.27004 [239072.928876] LustreError: 67683:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576139793, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0054_UUID lock: ffff88fc261efbc0/0x7066c9c190add4e3 lrc: 3/0,1 mode: --/PW res: [0x1800000402:0x110c28:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67683 timeout: 0 lvb_type: 0 [239073.052881] LNet: Service thread pid 26990 was inactive for 1201.65s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [239073.065932] LNet: Skipped 178 previous similar messages [239073.071248] LustreError: dumping log to /tmp/lustre-log.1576140093.26990 [239077.148952] LustreError: dumping log to /tmp/lustre-log.1576140097.27090 [239118.109781] LustreError: dumping log to /tmp/lustre-log.1576140138.112535 [239122.205857] LustreError: dumping log to /tmp/lustre-log.1576140142.27112 [239142.686281] Pid: 27079, comm: ll_ost_io03_076 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239142.697061] Call Trace: [239142.699631] [] wait_transaction_locked+0x85/0xd0 [jbd2] [239142.706638] [] add_transaction_credits+0x268/0x2f0 [jbd2] [239142.713838] [] start_this_handle+0x1a1/0x430 [jbd2] [239142.720485] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [239142.727235] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [239142.734761] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [239142.741866] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [239142.748611] [] ofd_commitrw+0x48c/0x9e0 [ofd] [239142.754752] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [239142.761455] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239142.768511] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239142.776310] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239142.782738] [] kthread+0xd1/0xe0 [239142.787742] [] ret_from_fork_nospec_begin+0xe/0x21 [239142.794317] [] 0xffffffffffffffff [239142.799419] LustreError: dumping log to /tmp/lustre-log.1576140163.27079 [239150.878428] Pid: 27113, comm: ll_ost_io03_080 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239150.889206] Call Trace: [239150.891762] [] wait_transaction_locked+0x85/0xd0 [jbd2] [239150.898764] [] add_transaction_credits+0x268/0x2f0 [jbd2] [239150.905958] [] start_this_handle+0x1a1/0x430 [jbd2] [239150.912603] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [239150.919352] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [239150.926879] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [239150.933990] [] ofd_commitrw_write+0xf1e/0x1d40 [ofd] [239150.940728] [] ofd_commitrw+0x48c/0x9e0 [ofd] [239150.946884] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [239150.953571] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239150.960606] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239150.968427] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239150.974839] [] kthread+0xd1/0xe0 [239150.979882] [] ret_from_fork_nospec_begin+0xe/0x21 [239150.986438] [] 0xffffffffffffffff [239150.991541] LustreError: dumping log to /tmp/lustre-log.1576140171.27113 [239187.743171] Pid: 27093, comm: ll_ost_io02_095 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239187.753955] Call Trace: [239187.756530] [] wait_transaction_locked+0x85/0xd0 [jbd2] [239187.763530] [] add_transaction_credits+0x268/0x2f0 [jbd2] [239187.770712] [] start_this_handle+0x1a1/0x430 [jbd2] [239187.777374] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [239187.784126] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [239187.791652] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [239187.798756] [] ofd_trans_start+0x75/0xf0 [ofd] [239187.804972] [] ofd_commitrw_write+0xa31/0x1d40 [ofd] [239187.811721] [] ofd_commitrw+0x48c/0x9e0 [ofd] [239187.817851] [] tgt_brw_write+0x10cb/0x1cf0 [ptlrpc] [239187.824564] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239187.831588] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239187.839418] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239187.845838] [] kthread+0xd1/0xe0 [239187.850851] [] ret_from_fork_nospec_begin+0xe/0x21 [239187.857418] [] 0xffffffffffffffff [239187.862528] LustreError: dumping log to /tmp/lustre-log.1576140208.27093 [239191.839250] Pid: 27066, comm: ll_ost_io03_073 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239191.850035] Call Trace: [239191.852596] [] __lock_page+0x74/0x90 [239191.857946] [] __find_lock_page+0x54/0x70 [239191.863747] [] find_or_create_page+0x34/0xa0 [239191.869808] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [239191.876656] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [239191.883824] [] ofd_preprw+0x422/0x11b0 [ofd] [239191.889878] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [239191.896494] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239191.903529] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239191.911333] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239191.917762] [] kthread+0xd1/0xe0 [239191.922765] [] ret_from_fork_nospec_begin+0xe/0x21 [239191.929341] [] 0xffffffffffffffff [239191.934464] LustreError: dumping log to /tmp/lustre-log.1576140212.27066 [239191.941836] Pid: 27089, comm: ll_ost_io00_088 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239191.952648] Call Trace: [239191.955194] [] __lock_page+0x74/0x90 [239191.960543] [] __find_lock_page+0x54/0x70 [239191.966329] [] find_or_create_page+0x34/0xa0 [239191.972373] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [239191.979210] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [239191.986379] [] ofd_preprw+0x422/0x11b0 [ofd] [239191.992435] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [239191.999039] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239192.006076] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239192.013883] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239192.020316] [] kthread+0xd1/0xe0 [239192.025309] [] ret_from_fork_nospec_begin+0xe/0x21 [239192.031887] [] 0xffffffffffffffff [239211.879472] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [239211.889654] Lustre: Skipped 241 previous similar messages [239212.319668] LustreError: dumping log to /tmp/lustre-log.1576140232.27054 [239216.415741] LustreError: dumping log to /tmp/lustre-log.1576140236.27070 [239252.857485] LustreError: 67592:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576139973, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0054_UUID lock: ffff88f028303a80/0x7066c9c190addd87 lrc: 3/0,1 mode: --/PW res: [0x1800000401:0xb3d37e:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67592 timeout: 0 lvb_type: 0 [239265.568733] LustreError: dumping log to /tmp/lustre-log.1576140285.27053 [239266.586759] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8912fbf3d050 x1651926262561472/t0(0) o4->360200e4-9bb2-dc52-b96e-5f48834c2e13@10.8.27.21@o2ib6:26/0 lens 488/0 e 1 to 0 dl 1576140291 ref 2 fl New:/0/ffffffff rc 0/-1 [239266.615399] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 530 previous similar messages [239284.519561] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [239284.528524] Lustre: Skipped 59 previous similar messages [239288.861084] Lustre: fir-OST0058: Connection restored to c93954af-761b-f1eb-f651-9881322a7a72 (at 10.9.108.51@o2ib4) [239288.871607] Lustre: Skipped 286 previous similar messages [239290.145220] LustreError: dumping log to /tmp/lustre-log.1576140310.27073 [239310.625632] LustreError: dumping log to /tmp/lustre-log.1576140331.27186 [239314.721711] LustreError: dumping log to /tmp/lustre-log.1576140335.27126 [239328.033980] LustreError: dumping log to /tmp/lustre-log.1576140348.67696 [239356.626551] LustreError: 112566:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576140077, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff8905d3ecc5c0/0x7066c9c190ade131 lrc: 3/0,1 mode: --/PW res: [0x1980000401:0xb4b138:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112566 timeout: 0 lvb_type: 0 [239363.874698] LustreError: dumping log to /tmp/lustre-log.1576140384.27076 [239376.162942] LustreError: dumping log to /tmp/lustre-log.1576140396.26988 [239388.451179] LustreError: dumping log to /tmp/lustre-log.1576140408.27185 [239389.256104] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117226 to 0x1800000402:1117281 [239404.835509] LustreError: dumping log to /tmp/lustre-log.1576140425.27219 [239427.358966] LustreError: 67696:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576140147, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff88fcfae90000/0x7066c9c190ade2a4 lrc: 3/0,1 mode: --/PW res: [0x1980000402:0x2f245a:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67696 timeout: 0 lvb_type: 0 [239429.412004] LustreError: dumping log to /tmp/lustre-log.1576140449.27226 [239433.508086] LustreError: dumping log to /tmp/lustre-log.1576140453.27075 [239437.604174] LustreError: dumping log to /tmp/lustre-log.1576140458.27254 [239441.700246] LustreError: dumping log to /tmp/lustre-log.1576140462.27259 [239445.796336] LNet: Service thread pid 27255 was inactive for 1202.86s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [239445.813444] LNet: Skipped 9 previous similar messages [239445.818595] Pid: 27255, comm: ll_ost_io00_103 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239445.829372] Call Trace: [239445.831922] [] __lock_page+0x74/0x90 [239445.837275] [] __find_lock_page+0x54/0x70 [239445.843064] [] find_or_create_page+0x34/0xa0 [239445.849096] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [239445.855943] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [239445.863114] [] ofd_preprw+0x422/0x11b0 [ofd] [239445.869166] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [239445.875774] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239445.882816] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239445.890620] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239445.897049] [] kthread+0xd1/0xe0 [239445.902053] [] ret_from_fork_nospec_begin+0xe/0x21 [239445.908636] [] 0xffffffffffffffff [239445.913744] LustreError: dumping log to /tmp/lustre-log.1576140466.27255 [239449.892412] Pid: 27231, comm: ll_ost_io01_097 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239449.903196] Call Trace: [239449.905748] [] __lock_page+0x74/0x90 [239449.911099] [] __find_lock_page+0x54/0x70 [239449.916898] [] find_or_create_page+0x34/0xa0 [239449.922931] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [239449.929775] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [239449.936958] [] ofd_preprw+0x422/0x11b0 [ofd] [239449.943015] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [239449.949633] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239449.956667] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239449.964469] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239449.970900] [] kthread+0xd1/0xe0 [239449.975903] [] ret_from_fork_nospec_begin+0xe/0x21 [239449.982477] [] 0xffffffffffffffff [239449.987588] LustreError: dumping log to /tmp/lustre-log.1576140470.27231 [239449.994976] Pid: 27258, comm: ll_ost_io00_106 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239450.005769] Call Trace: [239450.008313] [] __lock_page+0x74/0x90 [239450.013653] [] __find_lock_page+0x54/0x70 [239450.019440] [] find_or_create_page+0x34/0xa0 [239450.025477] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [239450.032320] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [239450.039490] [] ofd_preprw+0x422/0x11b0 [ofd] [239450.045546] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [239450.052145] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239450.059180] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239450.067000] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239450.073428] [] kthread+0xd1/0xe0 [239450.078422] [] ret_from_fork_nospec_begin+0xe/0x21 [239450.084989] [] 0xffffffffffffffff [239450.090081] Pid: 27071, comm: ll_ost_io00_083 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239450.100868] Call Trace: [239450.103413] [] __lock_page+0x74/0x90 [239450.108755] [] __find_lock_page+0x54/0x70 [239450.114527] [] find_or_create_page+0x34/0xa0 [239450.120583] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [239450.127437] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [239450.134623] [] ofd_preprw+0x422/0x11b0 [ofd] [239450.140667] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [239450.147270] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239450.154293] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239450.162105] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239450.168523] [] kthread+0xd1/0xe0 [239450.173529] [] ret_from_fork_nospec_begin+0xe/0x21 [239450.180084] [] 0xffffffffffffffff [239450.185189] Pid: 27072, comm: ll_ost_io03_074 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239450.195977] Call Trace: [239450.198525] [] __lock_page+0x74/0x90 [239450.203867] [] __find_lock_page+0x54/0x70 [239450.209653] [] find_or_create_page+0x34/0xa0 [239450.215688] [] osd_bufs_get+0x413/0x870 [osd_ldiskfs] [239450.222524] [] ofd_preprw_write.isra.31+0x476/0xea0 [ofd] [239450.229694] [] ofd_preprw+0x422/0x11b0 [ofd] [239450.235748] [] tgt_brw_write+0xc7c/0x1cf0 [ptlrpc] [239450.242338] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239450.249373] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239450.257191] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239450.263622] [] kthread+0xd1/0xe0 [239450.268617] [] ret_from_fork_nospec_begin+0xe/0x21 [239450.275183] [] 0xffffffffffffffff [239453.988496] LustreError: dumping log to /tmp/lustre-log.1576140474.27250 [239457.037560] LustreError: 67629:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576140177, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0058_UUID lock: ffff890987068fc0/0x7066c9c190ade472 lrc: 3/0,1 mode: --/PW res: [0x1900000401:0xb402ed:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67629 timeout: 0 lvb_type: 0 [239458.084571] LustreError: dumping log to /tmp/lustre-log.1576140478.27279 [239462.180656] LustreError: dumping log to /tmp/lustre-log.1576140482.27263 [239470.372840] LustreError: dumping log to /tmp/lustre-log.1576140490.27260 [239478.564989] LustreError: dumping log to /tmp/lustre-log.1576140498.27264 [239486.757152] LustreError: dumping log to /tmp/lustre-log.1576140507.27275 [239490.853236] LustreError: dumping log to /tmp/lustre-log.1576140511.27261 [239494.949315] LustreError: dumping log to /tmp/lustre-log.1576140515.27225 [239503.141478] LustreError: dumping log to /tmp/lustre-log.1576140523.27278 [239507.237556] LustreError: dumping log to /tmp/lustre-log.1576140527.27310 [239511.333635] LustreError: dumping log to /tmp/lustre-log.1576140531.27312 [239515.429722] LustreError: dumping log to /tmp/lustre-log.1576140535.27085 [239519.525802] LustreError: dumping log to /tmp/lustre-log.1576140539.27272 [239523.621887] LustreError: dumping log to /tmp/lustre-log.1576140544.27232 [239527.717968] LustreError: dumping log to /tmp/lustre-log.1576140548.27298 [239531.814059] LustreError: dumping log to /tmp/lustre-log.1576140552.27314 [239552.294461] LustreError: dumping log to /tmp/lustre-log.1576140572.27324 [239556.390543] LustreError: dumping log to /tmp/lustre-log.1576140576.27322 [239572.671876] LustreError: 112528:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576140293, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff890020f49440/0x7066c9c190ade74a lrc: 3/0,1 mode: --/PW res: [0x1a80000401:0x111eb5:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112528 timeout: 0 lvb_type: 0 [239589.159211] LustreError: dumping log to /tmp/lustre-log.1576140609.67861 [239597.351356] LustreError: dumping log to /tmp/lustre-log.1576140617.27336 [239601.447451] LustreError: dumping log to /tmp/lustre-log.1576140621.112525 [239605.543530] LustreError: dumping log to /tmp/lustre-log.1576140625.67921 [239613.735683] LustreError: dumping log to /tmp/lustre-log.1576140634.27339 [239621.927847] LustreError: dumping log to /tmp/lustre-log.1576140642.27357 [239675.176916] LNet: Service thread pid 67602 was inactive for 1201.88s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [239675.189983] LNet: Skipped 118 previous similar messages [239675.195311] LustreError: dumping log to /tmp/lustre-log.1576140695.67602 [239709.333597] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785095 to 0x1800000401:11785185 [239716.137736] LustreError: dumping log to /tmp/lustre-log.1576140736.67843 [239736.618140] LustreError: dumping log to /tmp/lustre-log.1576140757.67614 [239811.969335] Lustre: fir-OST005a: Client 882378af-0b41-73ee-5c10-5cc51464645c (at 10.9.108.22@o2ib4) reconnecting [239811.979602] Lustre: Skipped 300 previous similar messages [239813.287543] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11841861 to 0x1980000401:11841953 [239815.080705] LustreError: 112521:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576140535, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff890fa9aae9c0/0x7066c9c190adf5ba lrc: 3/0,1 mode: --/PW res: [0x1a31e13:0x0:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112521 timeout: 0 lvb_type: 0 [239834.924106] Pid: 67644, comm: ll_ost01_022 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239834.934621] Call Trace: [239834.937173] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [239834.944215] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [239834.951514] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [239834.958160] [] ofd_destroy_hdl+0x267/0x970 [ofd] [239834.964565] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239834.971602] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239834.979418] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239834.985834] [] kthread+0xd1/0xe0 [239834.990835] [] ret_from_fork_nospec_begin+0xe/0x21 [239834.997402] [] 0xffffffffffffffff [239835.002510] LustreError: dumping log to /tmp/lustre-log.1576140855.67644 [239866.600723] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f2b0727850 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:626/0 lens 440/0 e 0 to 0 dl 1576140891 ref 2 fl New:/2/ffffffff rc 0/-1 [239866.629355] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 710 previous similar messages [239871.788812] Pid: 66094, comm: ll_ost00_001 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239871.799329] Call Trace: [239871.801896] [] wait_transaction_locked+0x85/0xd0 [jbd2] [239871.808894] [] add_transaction_credits+0x268/0x2f0 [jbd2] [239871.816096] [] start_this_handle+0x1a1/0x430 [jbd2] [239871.822745] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [239871.829494] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [239871.837020] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [239871.844115] [] dqget+0x3fa/0x450 [239871.849119] [] dquot_get_dqblk+0x14/0x1f0 [239871.854927] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [239871.862549] [] lquotactl_slv+0x27d/0x9d0 [lquota] [239871.869040] [] ofd_quotactl+0x13c/0x380 [ofd] [239871.875172] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239871.882215] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239871.890017] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239871.896446] [] kthread+0xd1/0xe0 [239871.901449] [] ret_from_fork_nospec_begin+0xe/0x21 [239871.908025] [] 0xffffffffffffffff [239871.913127] LustreError: dumping log to /tmp/lustre-log.1576140892.66094 [239875.884892] Pid: 67782, comm: ll_ost00_045 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239875.895408] Call Trace: [239875.897977] [] wait_transaction_locked+0x85/0xd0 [jbd2] [239875.904976] [] add_transaction_credits+0x268/0x2f0 [jbd2] [239875.912158] [] start_this_handle+0x1a1/0x430 [jbd2] [239875.918808] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [239875.925557] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [239875.933082] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [239875.940179] [] dqget+0x3fa/0x450 [239875.945181] [] dquot_get_dqblk+0x14/0x1f0 [239875.950971] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [239875.958594] [] lquotactl_slv+0x27d/0x9d0 [lquota] [239875.965075] [] ofd_quotactl+0x13c/0x380 [ofd] [239875.971226] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239875.978263] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239875.986064] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239875.992490] [] kthread+0xd1/0xe0 [239875.997487] [] ret_from_fork_nospec_begin+0xe/0x21 [239876.004062] [] 0xffffffffffffffff [239876.009155] LustreError: dumping log to /tmp/lustre-log.1576140896.67782 [239883.857031] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089507 to 0x1980000402:3089569 [239886.643484] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [239886.652449] Lustre: Skipped 62 previous similar messages [239888.989286] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [239888.999806] Lustre: Skipped 345 previous similar messages [239913.208552] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797241 to 0x1900000401:11797281 [239916.845706] Pid: 67901, comm: ll_ost01_070 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239916.856230] Call Trace: [239916.858809] [] wait_transaction_locked+0x85/0xd0 [jbd2] [239916.865806] [] add_transaction_credits+0x268/0x2f0 [jbd2] [239916.872991] [] start_this_handle+0x1a1/0x430 [jbd2] [239916.879638] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [239916.886386] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [239916.893911] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [239916.901005] [] dqget+0x3fa/0x450 [239916.906011] [] dquot_get_dqblk+0x14/0x1f0 [239916.911796] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [239916.919410] [] lquotactl_slv+0x27d/0x9d0 [lquota] [239916.925908] [] ofd_quotactl+0x13c/0x380 [ofd] [239916.932051] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239916.939115] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239916.946919] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239916.953348] [] kthread+0xd1/0xe0 [239916.958349] [] ret_from_fork_nospec_begin+0xe/0x21 [239916.964925] [] 0xffffffffffffffff [239916.970027] LustreError: dumping log to /tmp/lustre-log.1576140937.67901 [239920.941801] Pid: 67702, comm: ll_ost03_031 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [239920.952318] Call Trace: [239920.954889] [] wait_transaction_locked+0x85/0xd0 [jbd2] [239920.961886] [] add_transaction_credits+0x268/0x2f0 [jbd2] [239920.969068] [] start_this_handle+0x1a1/0x430 [jbd2] [239920.975717] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [239920.982468] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [239920.989991] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [239920.997096] [] dqget+0x3fa/0x450 [239921.002099] [] dquot_get_dqblk+0x14/0x1f0 [239921.007885] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [239921.015498] [] lquotactl_slv+0x27d/0x9d0 [lquota] [239921.021986] [] ofd_quotactl+0x13c/0x380 [ofd] [239921.028131] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [239921.035196] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [239921.042998] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [239921.049426] [] kthread+0xd1/0xe0 [239921.054430] [] ret_from_fork_nospec_begin+0xe/0x21 [239921.061006] [] 0xffffffffffffffff [239921.066106] LustreError: dumping log to /tmp/lustre-log.1576140941.67702 [239929.133944] LustreError: dumping log to /tmp/lustre-log.1576140949.67900 [239959.099553] LustreError: 67744:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576140679, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff88e670859680/0x7066c9c190ae10c5 lrc: 3/0,1 mode: --/PW res: [0x1980000400:0x112c96:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67744 timeout: 0 lvb_type: 0 [239970.094745] LustreError: dumping log to /tmp/lustre-log.1576140990.67694 [239974.190821] LustreError: dumping log to /tmp/lustre-log.1576140994.67683 [240028.686582] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1121977 to 0x1a80000401:1122017 [240145.394789] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117286 to 0x1800000402:1117313 [240154.418384] LNet: Service thread pid 67592 was inactive for 1201.54s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [240154.435493] LNet: Skipped 9 previous similar messages [240154.440636] Pid: 67592, comm: ll_ost02_009 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240154.451171] Call Trace: [240154.453730] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [240154.460772] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [240154.468068] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [240154.474717] [] ofd_destroy_hdl+0x267/0x970 [ofd] [240154.481120] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240154.488159] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240154.495974] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240154.502392] [] kthread+0xd1/0xe0 [240154.507404] [] ret_from_fork_nospec_begin+0xe/0x21 [240154.513970] [] 0xffffffffffffffff [240154.519101] LustreError: dumping log to /tmp/lustre-log.1576141174.67592 [240232.243942] Pid: 112494, comm: ll_ost00_075 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240232.254546] Call Trace: [240232.257127] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240232.264131] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240232.271314] [] start_this_handle+0x1a1/0x430 [jbd2] [240232.277961] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240232.284716] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240232.292237] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [240232.299340] [] dqget+0x3fa/0x450 [240232.304366] [] dquot_get_dqblk+0x14/0x1f0 [240232.310150] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [240232.317777] [] lquotactl_slv+0x27d/0x9d0 [lquota] [240232.324253] [] ofd_quotactl+0x13c/0x380 [ofd] [240232.330395] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240232.337433] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240232.345257] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240232.351674] [] kthread+0xd1/0xe0 [240232.356674] [] ret_from_fork_nospec_begin+0xe/0x21 [240232.363252] [] 0xffffffffffffffff [240232.368350] LustreError: dumping log to /tmp/lustre-log.1576141252.112494 [240236.340028] Pid: 67736, comm: ll_ost02_034 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240236.350548] Call Trace: [240236.353118] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240236.360121] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240236.367304] [] start_this_handle+0x1a1/0x430 [jbd2] [240236.373954] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240236.380704] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240236.388229] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [240236.395323] [] dqget+0x3fa/0x450 [240236.400344] [] dquot_get_dqblk+0x14/0x1f0 [240236.406140] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [240236.413754] [] lquotactl_slv+0x27d/0x9d0 [lquota] [240236.420242] [] ofd_quotactl+0x13c/0x380 [ofd] [240236.426372] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240236.433433] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240236.441235] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240236.447664] [] kthread+0xd1/0xe0 [240236.452667] [] ret_from_fork_nospec_begin+0xe/0x21 [240236.459243] [] 0xffffffffffffffff [240236.464356] LustreError: dumping log to /tmp/lustre-log.1576141256.67736 [240240.436112] Pid: 67785, comm: ll_ost02_040 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240240.446628] Call Trace: [240240.449190] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240240.456184] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240240.463367] [] start_this_handle+0x1a1/0x430 [jbd2] [240240.470018] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240240.476766] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240240.484291] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [240240.491390] [] dqget+0x3fa/0x450 [240240.496391] [] dquot_get_dqblk+0x14/0x1f0 [240240.502199] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [240240.509799] [] lquotactl_slv+0x27d/0x9d0 [lquota] [240240.516286] [] ofd_quotactl+0x13c/0x380 [ofd] [240240.522417] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240240.529469] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240240.537271] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240240.543699] [] kthread+0xd1/0xe0 [240240.548704] [] ret_from_fork_nospec_begin+0xe/0x21 [240240.555269] [] 0xffffffffffffffff [240240.560377] LustreError: dumping log to /tmp/lustre-log.1576141260.67785 [240256.820444] Pid: 112566, comm: ll_ost02_086 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240256.831052] Call Trace: [240256.833606] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [240256.840646] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [240256.847941] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [240256.854590] [] ofd_destroy_hdl+0x267/0x970 [ofd] [240256.860991] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240256.868034] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240256.875847] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240256.882282] [] kthread+0xd1/0xe0 [240256.887282] [] ret_from_fork_nospec_begin+0xe/0x21 [240256.893850] [] 0xffffffffffffffff [240256.898960] LustreError: dumping log to /tmp/lustre-log.1576141277.112566 [240260.434521] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125531 to 0x1980000400:1125569 [240270.998164] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467286 to 0x0:27467329 [240314.616600] LustreError: 67741:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576141035, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff88e75d68f740/0x7066c9c190ae6a91 lrc: 3/0,1 mode: --/PW res: [0x1980000400:0x112c98:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67741 timeout: 0 lvb_type: 0 [240318.261695] LNet: Service thread pid 67675 was inactive for 1203.78s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [240318.274731] LNet: Skipped 6 previous similar messages [240318.279879] LustreError: dumping log to /tmp/lustre-log.1576141338.67675 [240346.934237] LustreError: dumping log to /tmp/lustre-log.1576141367.112557 [240355.126422] LustreError: dumping log to /tmp/lustre-log.1576141375.67687 [240359.222485] LustreError: dumping log to /tmp/lustre-log.1576141379.67629 [240363.318573] LustreError: dumping log to /tmp/lustre-log.1576141383.67682 [240371.510737] LustreError: dumping log to /tmp/lustre-log.1576141391.67599 [240375.606814] LustreError: dumping log to /tmp/lustre-log.1576141395.112499 [240414.463197] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [240414.473370] Lustre: Skipped 412 previous similar messages [240416.567635] LustreError: dumping log to /tmp/lustre-log.1576141436.112543 [240424.759800] LustreError: dumping log to /tmp/lustre-log.1576141445.67709 [240428.855879] LustreError: dumping log to /tmp/lustre-log.1576141449.67814 [240432.952007] LustreError: dumping log to /tmp/lustre-log.1576141453.112517 [240465.432406] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785188 to 0x1800000401:11785217 [240467.576679] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88fd6e0f9050 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:472/0 lens 440/0 e 0 to 0 dl 1576141492 ref 2 fl New:/2/ffffffff rc 0/-1 [240467.605315] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 700 previous similar messages [240473.912790] Pid: 112528, comm: ll_ost01_085 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240473.923403] Call Trace: [240473.925961] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [240473.933002] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [240473.940317] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [240473.946979] [] ofd_destroy_hdl+0x267/0x970 [ofd] [240473.953396] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240473.960439] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240473.968271] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240473.974688] [] kthread+0xd1/0xe0 [240473.979689] [] ret_from_fork_nospec_begin+0xe/0x21 [240473.986265] [] 0xffffffffffffffff [240473.991373] LustreError: dumping log to /tmp/lustre-log.1576141494.112528 [240473.998923] Pid: 67595, comm: ll_ost00_008 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240474.009462] Call Trace: [240474.012021] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240474.019018] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240474.026201] [] start_this_handle+0x1a1/0x430 [jbd2] [240474.032851] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240474.039586] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240474.047123] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [240474.054216] [] dqget+0x3fa/0x450 [240474.059231] [] dquot_get_dqblk+0x14/0x1f0 [240474.065006] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [240474.072629] [] lquotactl_slv+0x27d/0x9d0 [lquota] [240474.079106] [] ofd_quotactl+0x13c/0x380 [ofd] [240474.085249] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240474.092279] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240474.100094] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240474.106510] [] kthread+0xd1/0xe0 [240474.111503] [] ret_from_fork_nospec_begin+0xe/0x21 [240474.118073] [] 0xffffffffffffffff [240488.767686] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [240488.776652] Lustre: Skipped 71 previous similar messages [240489.986614] Lustre: fir-OST005c: Connection restored to cec884d3-ca4b-8127-2f6b-7762665aa5f8 (at 10.9.0.64@o2ib4) [240489.996967] Lustre: Skipped 365 previous similar messages [240494.393191] Pid: 67651, comm: ll_ost02_021 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240494.403725] Call Trace: [240494.406297] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [240494.412703] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240494.419773] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240494.427610] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240494.434032] [] kthread+0xd1/0xe0 [240494.439037] [] ret_from_fork_nospec_begin+0xe/0x21 [240494.445631] [] 0xffffffffffffffff [240494.450750] LustreError: dumping log to /tmp/lustre-log.1576141514.67651 [240569.618538] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11841959 to 0x1980000401:11841985 [240584.506993] Pid: 67889, comm: ll_ost02_059 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240584.517517] Call Trace: [240584.520095] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240584.527093] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240584.534273] [] start_this_handle+0x1a1/0x430 [jbd2] [240584.540924] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240584.547672] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240584.555212] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [240584.562310] [] dqget+0x3fa/0x450 [240584.567314] [] dquot_get_dqblk+0x14/0x1f0 [240584.573108] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [240584.580721] [] lquotactl_slv+0x27d/0x9d0 [lquota] [240584.587209] [] ofd_quotactl+0x13c/0x380 [ofd] [240584.593339] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240584.600401] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240584.608205] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240584.614634] [] kthread+0xd1/0xe0 [240584.619649] [] ret_from_fork_nospec_begin+0xe/0x21 [240584.626228] [] 0xffffffffffffffff [240584.631329] LustreError: dumping log to /tmp/lustre-log.1576141605.67889 [240592.699158] Pid: 66242, comm: ll_ost03_003 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240592.709677] Call Trace: [240592.712246] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240592.719242] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240592.726431] [] start_this_handle+0x1a1/0x430 [jbd2] [240592.733078] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240592.739825] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240592.747365] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [240592.754463] [] dqget+0x3fa/0x450 [240592.759467] [] dquot_get_dqblk+0x14/0x1f0 [240592.765254] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [240592.772865] [] lquotactl_slv+0x27d/0x9d0 [lquota] [240592.779355] [] ofd_quotactl+0x13c/0x380 [ofd] [240592.785485] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240592.792544] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240592.800350] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240592.806778] [] kthread+0xd1/0xe0 [240592.811794] [] ret_from_fork_nospec_begin+0xe/0x21 [240592.818373] [] 0xffffffffffffffff [240592.823464] LustreError: dumping log to /tmp/lustre-log.1576141613.66242 [240596.795243] LustreError: dumping log to /tmp/lustre-log.1576141617.67716 [240625.467815] LustreError: dumping log to /tmp/lustre-log.1576141645.67677 [240629.563893] LustreError: dumping log to /tmp/lustre-log.1576141649.67878 [240633.659971] LustreError: dumping log to /tmp/lustre-log.1576141654.67778 [240637.756058] LustreError: dumping log to /tmp/lustre-log.1576141658.112501 [240640.052001] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089578 to 0x1980000402:3089601 [240669.116318] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797283 to 0x1900000401:11797313 [240715.581615] LustreError: dumping log to /tmp/lustre-log.1576141735.112521 [240727.869858] LustreError: dumping log to /tmp/lustre-log.1576141748.67607 [240733.302978] LustreError: 112496:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576141453, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0058_UUID lock: ffff89134edb5580/0x7066c9c190ae820e lrc: 3/0,1 mode: --/PW res: [0x1a3b7cb:0x0:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112496 timeout: 0 lvb_type: 0 [240784.663581] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122023 to 0x1a80000401:1122049 [240861.033099] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125573 to 0x1980000400:1125601 [240863.040457] LNet: Service thread pid 67744 was inactive for 1203.92s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [240863.057568] LNet: Skipped 9 previous similar messages [240863.062720] Pid: 67744, comm: ll_ost01_045 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240863.073256] Call Trace: [240863.075816] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [240863.082855] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [240863.090152] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [240863.096808] [] ofd_destroy_hdl+0x267/0x970 [ofd] [240863.103211] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240863.110268] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240863.118083] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240863.124499] [] kthread+0xd1/0xe0 [240863.129515] [] ret_from_fork_nospec_begin+0xe/0x21 [240863.136080] [] 0xffffffffffffffff [240863.141201] LustreError: dumping log to /tmp/lustre-log.1576141883.67744 [240899.905137] Pid: 67865, comm: ll_ost02_057 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240899.915661] Call Trace: [240899.918230] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240899.925235] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240899.932424] [] start_this_handle+0x1a1/0x430 [jbd2] [240899.939095] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240899.945845] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240899.953371] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [240899.960483] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [240899.967802] [] tgt_client_new+0x41b/0x610 [ptlrpc] [240899.974420] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [240899.980811] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [240899.988112] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [240899.995145] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240900.002973] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240900.009393] [] kthread+0xd1/0xe0 [240900.014410] [] ret_from_fork_nospec_begin+0xe/0x21 [240900.020973] [] 0xffffffffffffffff [240900.026094] LustreError: dumping log to /tmp/lustre-log.1576141920.67865 [240901.521950] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117319 to 0x1800000402:1117345 [240916.289468] Pid: 112529, comm: ll_ost00_086 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240916.300076] Call Trace: [240916.302647] [] wait_transaction_locked+0x85/0xd0 [jbd2] [240916.309643] [] add_transaction_credits+0x268/0x2f0 [jbd2] [240916.316830] [] start_this_handle+0x1a1/0x430 [jbd2] [240916.323491] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [240916.330243] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [240916.337767] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [240916.344864] [] dqget+0x3fa/0x450 [240916.349868] [] dquot_get_dqblk+0x14/0x1f0 [240916.355638] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [240916.363264] [] lquotactl_slv+0x27d/0x9d0 [lquota] [240916.369739] [] ofd_quotactl+0x13c/0x380 [ofd] [240916.375883] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240916.382922] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240916.390749] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240916.397162] [] kthread+0xd1/0xe0 [240916.402176] [] ret_from_fork_nospec_begin+0xe/0x21 [240916.408739] [] 0xffffffffffffffff [240916.413853] LustreError: dumping log to /tmp/lustre-log.1576141936.112529 [240961.346362] Pid: 66097, comm: ll_ost01_000 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [240961.356882] Call Trace: [240961.359434] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [240961.365827] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [240961.372875] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [240961.380680] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [240961.387113] [] kthread+0xd1/0xe0 [240961.392113] [] ret_from_fork_nospec_begin+0xe/0x21 [240961.398689] [] 0xffffffffffffffff [240961.403796] LustreError: dumping log to /tmp/lustre-log.1576141981.66097 [241015.435044] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [241015.445224] Lustre: Skipped 494 previous similar messages [241028.022137] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467286 to 0x0:27467361 [241048.732841] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1104956 to 0x1a00000402:1105313 [241068.036509] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f8ca67e050 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:318/0 lens 440/0 e 0 to 0 dl 1576142093 ref 2 fl New:/2/ffffffff rc 0/-1 [241068.065164] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 970 previous similar messages [241071.940600] Pid: 67604, comm: ll_ost00_010 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241071.951129] Call Trace: [241071.953697] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241071.960713] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241071.967898] [] start_this_handle+0x1a1/0x430 [jbd2] [241071.974544] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241071.981293] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241071.988820] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241071.995916] [] dqget+0x3fa/0x450 [241072.000920] [] dquot_get_dqblk+0x14/0x1f0 [241072.006692] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241072.014309] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241072.020783] [] ofd_quotactl+0x13c/0x380 [ofd] [241072.026926] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241072.033966] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241072.041781] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241072.048197] [] kthread+0xd1/0xe0 [241072.053213] [] ret_from_fork_nospec_begin+0xe/0x21 [241072.059790] [] 0xffffffffffffffff [241072.064906] LustreError: dumping log to /tmp/lustre-log.1576142092.67604 [241090.891516] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [241090.900478] Lustre: Skipped 71 previous similar messages [241090.992913] Lustre: fir-OST005c: Connection restored to cec884d3-ca4b-8127-2f6b-7762665aa5f8 (at 10.9.0.64@o2ib4) [241091.003253] Lustre: Skipped 502 previous similar messages [241100.613140] LNet: Service thread pid 67739 was inactive for 1201.53s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [241100.626170] LNet: Skipped 22 previous similar messages [241100.631400] LustreError: dumping log to /tmp/lustre-log.1576142121.67739 [241188.704342] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506640 to 0x0:27506657 [241190.726934] Pid: 67743, comm: ll_ost03_040 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241190.737459] Call Trace: [241190.740019] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [241190.746416] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241190.753481] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241190.761282] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241190.767717] [] kthread+0xd1/0xe0 [241190.772738] [] ret_from_fork_nospec_begin+0xe/0x21 [241190.779333] [] 0xffffffffffffffff [241190.784484] LustreError: dumping log to /tmp/lustre-log.1576142211.67743 [241215.303418] Pid: 67741, comm: ll_ost01_044 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241215.313938] Call Trace: [241215.316490] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [241215.323533] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [241215.330828] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [241215.337476] [] ofd_destroy_hdl+0x267/0x970 [ofd] [241215.343878] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241215.350937] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241215.358753] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241215.365168] [] kthread+0xd1/0xe0 [241215.370167] [] ret_from_fork_nospec_begin+0xe/0x21 [241215.376736] [] 0xffffffffffffffff [241215.381845] LustreError: dumping log to /tmp/lustre-log.1576142235.67741 [241220.735308] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785223 to 0x1800000401:11785249 [241227.591675] Pid: 112495, comm: ll_ost00_076 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241227.602275] Call Trace: [241227.604843] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241227.611842] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241227.619027] [] start_this_handle+0x1a1/0x430 [jbd2] [241227.625674] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241227.632425] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241227.639949] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241227.647045] [] dqget+0x3fa/0x450 [241227.652048] [] dquot_get_dqblk+0x14/0x1f0 [241227.657821] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241227.665445] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241227.671929] [] ofd_quotactl+0x13c/0x380 [ofd] [241227.678072] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241227.685112] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241227.692927] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241227.699342] [] kthread+0xd1/0xe0 [241227.704359] [] ret_from_fork_nospec_begin+0xe/0x21 [241227.710923] [] 0xffffffffffffffff [241227.716033] LustreError: dumping log to /tmp/lustre-log.1576142248.112495 [241248.072071] Pid: 67793, comm: ll_ost02_042 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241248.082590] Call Trace: [241248.085141] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [241248.091542] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241248.098620] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241248.106423] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241248.112852] [] kthread+0xd1/0xe0 [241248.117854] [] ret_from_fork_nospec_begin+0xe/0x21 [241248.124429] [] 0xffffffffffffffff [241248.129539] LustreError: dumping log to /tmp/lustre-log.1576142268.67793 [241280.840714] Pid: 113357, comm: ll_ost02_095 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241280.851317] Call Trace: [241280.853885] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241280.860889] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241280.868082] [] start_this_handle+0x1a1/0x430 [jbd2] [241280.874750] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241280.881501] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241280.889027] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241280.896121] [] dqget+0x3fa/0x450 [241280.901126] [] dquot_get_dqblk+0x14/0x1f0 [241280.906899] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241280.914517] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241280.920991] [] ofd_quotactl+0x13c/0x380 [ofd] [241280.927133] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241280.934194] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241280.942014] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241280.948429] [] kthread+0xd1/0xe0 [241280.953431] [] ret_from_fork_nospec_begin+0xe/0x21 [241280.960000] [] 0xffffffffffffffff [241280.965098] LustreError: dumping log to /tmp/lustre-log.1576142301.113357 [241284.936832] LustreError: dumping log to /tmp/lustre-log.1576142305.67407 [241289.014892] LustreError: 67855:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576142009, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff890f4781a640/0x7066c9c190aea1b8 lrc: 3/0,1 mode: --/PW res: [0x1980000400:0x112ce2:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67855 timeout: 0 lvb_type: 0 [241289.032891] LustreError: dumping log to /tmp/lustre-log.1576142309.67795 [241289.065418] LustreError: 67855:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 1 previous similar message [241326.198724] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11841995 to 0x1980000401:11842017 [241387.338834] LustreError: dumping log to /tmp/lustre-log.1576142407.67700 [241396.226912] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089607 to 0x1980000402:3089633 [241425.227585] LustreError: dumping log to /tmp/lustre-log.1576142445.67588 [241425.571294] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797320 to 0x1900000401:11797345 [241462.040565] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125604 to 0x1980000400:1125633 [241485.644788] LustreError: dumping log to /tmp/lustre-log.1576142506.67790 [241518.413451] LNet: Service thread pid 67699 was inactive for 1203.91s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [241518.430559] LNet: Skipped 9 previous similar messages [241518.435707] Pid: 67699, comm: ll_ost00_034 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241518.446243] Call Trace: [241518.448817] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241518.455823] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241518.463006] [] start_this_handle+0x1a1/0x430 [jbd2] [241518.469657] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241518.476405] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241518.483945] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241518.491050] [] dqget+0x3fa/0x450 [241518.496054] [] dquot_get_dqblk+0x14/0x1f0 [241518.501826] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241518.509447] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241518.515929] [] ofd_quotactl+0x13c/0x380 [ofd] [241518.522073] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241518.529109] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241518.536911] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241518.543345] [] kthread+0xd1/0xe0 [241518.548351] [] ret_from_fork_nospec_begin+0xe/0x21 [241518.554920] [] 0xffffffffffffffff [241518.560034] LustreError: dumping log to /tmp/lustre-log.1576142538.67699 [241540.726529] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122052 to 0x1a80000401:1122081 [241616.406994] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [241616.417195] Lustre: Skipped 536 previous similar messages [241627.983609] Pid: 67775, comm: ll_ost03_044 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241627.994133] Call Trace: [241627.996685] [] ldlm_completion_ast+0x430/0x860 [ptlrpc] [241628.003726] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [241628.011024] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [241628.017680] [] ofd_destroy_hdl+0x267/0x970 [ofd] [241628.024081] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241628.031122] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241628.038937] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241628.045351] [] kthread+0xd1/0xe0 [241628.050353] [] ret_from_fork_nospec_begin+0xe/0x21 [241628.056928] [] 0xffffffffffffffff [241628.062037] LustreError: dumping log to /tmp/lustre-log.1576142648.67775 [241637.199792] Pid: 112496, comm: ll_ost02_068 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241637.210396] Call Trace: [241637.212948] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [241637.219990] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [241637.227285] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [241637.233935] [] ofd_destroy_hdl+0x267/0x970 [ofd] [241637.240335] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241637.247377] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241637.255191] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241637.261609] [] kthread+0xd1/0xe0 [241637.266608] [] ret_from_fork_nospec_begin+0xe/0x21 [241637.273201] [] 0xffffffffffffffff [241637.278320] LustreError: dumping log to /tmp/lustre-log.1576142657.112496 [241649.488033] Pid: 67845, comm: ll_ost01_063 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241649.498553] Call Trace: [241649.501114] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [241649.508154] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [241649.515451] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [241649.522108] [] ofd_destroy_hdl+0x267/0x970 [ofd] [241649.528513] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241649.535568] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241649.543384] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241649.549799] [] kthread+0xd1/0xe0 [241649.554800] [] ret_from_fork_nospec_begin+0xe/0x21 [241649.561377] [] 0xffffffffffffffff [241649.566486] LustreError: dumping log to /tmp/lustre-log.1576142669.67845 [241650.328678] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105317 to 0x1a00000402:1105345 [241657.552831] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117354 to 0x1800000402:1117377 [241668.648423] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f298f11050 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:164/0 lens 440/0 e 0 to 0 dl 1576142694 ref 2 fl New:/2/ffffffff rc 0/-1 [241668.677062] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1066 previous similar messages [241674.064536] Pid: 67692, comm: ll_ost00_032 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241674.075058] Call Trace: [241674.077624] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241674.084616] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241674.091819] [] start_this_handle+0x1a1/0x430 [jbd2] [241674.098483] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241674.105234] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241674.112775] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241674.119871] [] dqget+0x3fa/0x450 [241674.124874] [] dquot_get_dqblk+0x14/0x1f0 [241674.130647] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241674.138271] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241674.144757] [] ofd_quotactl+0x13c/0x380 [ofd] [241674.150899] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241674.157938] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241674.165755] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241674.172170] [] kthread+0xd1/0xe0 [241674.177185] [] ret_from_fork_nospec_begin+0xe/0x21 [241674.183748] [] 0xffffffffffffffff [241674.188862] LustreError: dumping log to /tmp/lustre-log.1576142694.67692 [241691.962836] Lustre: fir-OST005c: Connection restored to cec884d3-ca4b-8127-2f6b-7762665aa5f8 (at 10.9.0.64@o2ib4) [241691.973189] Lustre: Skipped 546 previous similar messages [241693.015409] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [241693.024375] Lustre: Skipped 71 previous similar messages [241719.121448] LNet: Service thread pid 67918 was inactive for 1203.27s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [241719.134481] LNet: Skipped 6 previous similar messages [241719.139625] LustreError: dumping log to /tmp/lustre-log.1576142739.67918 [241784.612142] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467363 to 0x0:27467393 [241829.715609] Pid: 67652, comm: ll_ost00_020 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241829.726126] Call Trace: [241829.728695] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241829.735694] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241829.742896] [] start_this_handle+0x1a1/0x430 [jbd2] [241829.749543] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241829.756296] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241829.763816] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241829.770912] [] dqget+0x3fa/0x450 [241829.775929] [] dquot_get_dqblk+0x14/0x1f0 [241829.781733] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241829.789340] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241829.795828] [] ofd_quotactl+0x13c/0x380 [ofd] [241829.801975] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241829.809029] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241829.816831] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241829.823258] [] kthread+0xd1/0xe0 [241829.828255] [] ret_from_fork_nospec_begin+0xe/0x21 [241829.834832] [] 0xffffffffffffffff [241829.839945] LustreError: dumping log to /tmp/lustre-log.1576142850.67652 [241858.388217] Pid: 112491, comm: ll_ost00_072 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241858.398823] Call Trace: [241858.401395] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241858.408393] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241858.415577] [] start_this_handle+0x1a1/0x430 [jbd2] [241858.422217] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241858.428966] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241858.436498] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241858.443580] [] dqget+0x3fa/0x450 [241858.448588] [] dquot_get_dqblk+0x14/0x1f0 [241858.454362] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241858.461986] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241858.468462] [] ofd_quotactl+0x13c/0x380 [ofd] [241858.474608] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241858.481643] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241858.489444] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241858.495874] [] kthread+0xd1/0xe0 [241858.500867] [] ret_from_fork_nospec_begin+0xe/0x21 [241858.507435] [] 0xffffffffffffffff [241858.512539] LustreError: dumping log to /tmp/lustre-log.1576142878.112491 [241944.405877] Pid: 66337, comm: ll_ost03_004 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241944.416400] Call Trace: [241944.418953] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [241944.425371] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241944.432432] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241944.440246] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241944.446677] [] kthread+0xd1/0xe0 [241944.451682] [] ret_from_fork_nospec_begin+0xe/0x21 [241944.458276] [] 0xffffffffffffffff [241944.463397] LustreError: dumping log to /tmp/lustre-log.1576142964.66337 [241944.471028] Pid: 112544, comm: ll_ost03_072 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241944.481667] Call Trace: [241944.484218] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241944.491210] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241944.498399] [] start_this_handle+0x1a1/0x430 [jbd2] [241944.505049] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241944.511815] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241944.519343] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241944.526450] [] dqget+0x3fa/0x450 [241944.531442] [] dquot_get_dqblk+0x14/0x1f0 [241944.537216] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241944.544832] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241944.551304] [] ofd_quotactl+0x13c/0x380 [ofd] [241944.557449] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241944.564470] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241944.572286] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241944.578701] [] kthread+0xd1/0xe0 [241944.583695] [] ret_from_fork_nospec_begin+0xe/0x21 [241944.590277] [] 0xffffffffffffffff [241945.106104] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506661 to 0x0:27506689 [241948.940996] LustreError: 67630:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576142669, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0058_UUID lock: ffff88fcea0e69c0/0x7066c9c190aed28e lrc: 3/0,1 mode: --/PW res: [0x1900000400:0x1127a7:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67630 timeout: 0 lvb_type: 0 [241948.984752] LustreError: 67630:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 8 previous similar messages [241952.598043] Pid: 67707, comm: ll_ost03_033 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [241952.608559] Call Trace: [241952.611130] [] wait_transaction_locked+0x85/0xd0 [jbd2] [241952.618128] [] add_transaction_credits+0x268/0x2f0 [jbd2] [241952.625311] [] start_this_handle+0x1a1/0x430 [jbd2] [241952.631959] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [241952.638709] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [241952.646234] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [241952.653331] [] dqget+0x3fa/0x450 [241952.658333] [] dquot_get_dqblk+0x14/0x1f0 [241952.664119] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [241952.671732] [] lquotactl_slv+0x27d/0x9d0 [lquota] [241952.678229] [] ofd_quotactl+0x13c/0x380 [ofd] [241952.684359] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [241952.691421] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [241952.699223] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [241952.705666] [] kthread+0xd1/0xe0 [241952.710672] [] ret_from_fork_nospec_begin+0xe/0x21 [241952.717248] [] 0xffffffffffffffff [241952.722349] LustreError: dumping log to /tmp/lustre-log.1576142973.67707 [241956.694121] LustreError: dumping log to /tmp/lustre-log.1576142977.67791 [241964.886285] LustreError: dumping log to /tmp/lustre-log.1576142985.67628 [241976.707678] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785256 to 0x1800000401:11785281 [241980.782343] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3048832 to 0x1a00000401:3048865 [241985.366702] LustreError: dumping log to /tmp/lustre-log.1576143005.67871 [242005.847095] LustreError: dumping log to /tmp/lustre-log.1576143026.67797 [242063.108364] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125639 to 0x1980000400:1125665 [242082.336325] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842023 to 0x1980000401:11842049 [242116.441290] LustreError: dumping log to /tmp/lustre-log.1576143136.66715 [242141.017776] LNet: Service thread pid 67842 was inactive for 1200.82s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [242141.034883] LNet: Skipped 9 previous similar messages [242141.040025] Pid: 67842, comm: ll_ost00_056 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242141.050578] Call Trace: [242141.053143] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242141.060138] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242141.067318] [] start_this_handle+0x1a1/0x430 [jbd2] [242141.073971] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242141.080719] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242141.088245] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242141.095339] [] dqget+0x3fa/0x450 [242141.100342] [] dquot_get_dqblk+0x14/0x1f0 [242141.106114] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242141.113742] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242141.120217] [] ofd_quotactl+0x13c/0x380 [ofd] [242141.126371] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242141.133407] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242141.141220] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242141.147638] [] kthread+0xd1/0xe0 [242141.152639] [] ret_from_fork_nospec_begin+0xe/0x21 [242141.159209] [] 0xffffffffffffffff [242141.164306] LustreError: dumping log to /tmp/lustre-log.1576143161.67842 [242141.171832] Pid: 67619, comm: ll_ost00_013 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242141.182378] Call Trace: [242141.184931] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242141.191924] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242141.199123] [] start_this_handle+0x1a1/0x430 [jbd2] [242141.205779] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242141.212532] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242141.220049] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242141.227144] [] dqget+0x3fa/0x450 [242141.232144] [] dquot_get_dqblk+0x14/0x1f0 [242141.237920] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242141.245538] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242141.252016] [] ofd_quotactl+0x13c/0x380 [ofd] [242141.258180] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242141.265216] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242141.273022] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242141.279454] [] kthread+0xd1/0xe0 [242141.284445] [] ret_from_fork_nospec_begin+0xe/0x21 [242141.291016] [] 0xffffffffffffffff [242151.665744] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089644 to 0x1980000402:3089665 [242180.690249] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797355 to 0x1900000401:11797377 [242183.682367] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3085791 to 0x1900000402:3085857 [242190.170744] Pid: 67855, comm: ll_ost01_065 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242190.181262] Call Trace: [242190.183819] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [242190.190863] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [242190.198161] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [242190.204816] [] ofd_destroy_hdl+0x267/0x970 [ofd] [242190.211220] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242190.218258] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242190.226084] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242190.232514] [] kthread+0xd1/0xe0 [242190.237516] [] ret_from_fork_nospec_begin+0xe/0x21 [242190.244093] [] 0xffffffffffffffff [242190.249202] LustreError: dumping log to /tmp/lustre-log.1576143210.67855 [242194.266835] Pid: 67672, comm: ll_ost03_024 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242194.277357] Call Trace: [242194.279928] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242194.286932] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242194.294120] [] start_this_handle+0x1a1/0x430 [jbd2] [242194.300765] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242194.307515] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242194.315041] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242194.322161] [] dqget+0x3fa/0x450 [242194.327165] [] dquot_get_dqblk+0x14/0x1f0 [242194.332936] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242194.340568] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242194.347047] [] ofd_quotactl+0x13c/0x380 [ofd] [242194.353189] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242194.360236] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242194.368054] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242194.374484] [] kthread+0xd1/0xe0 [242194.379501] [] ret_from_fork_nospec_begin+0xe/0x21 [242194.386064] [] 0xffffffffffffffff [242194.391177] LustreError: dumping log to /tmp/lustre-log.1576143214.67672 [242198.362921] Pid: 67052, comm: ll_ost02_007 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242198.373438] Call Trace: [242198.376007] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242198.383005] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242198.390186] [] start_this_handle+0x1a1/0x430 [jbd2] [242198.396840] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242198.403578] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242198.411118] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242198.418225] [] dqget+0x3fa/0x450 [242198.423227] [] dquot_get_dqblk+0x14/0x1f0 [242198.429015] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242198.436626] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242198.443115] [] ofd_quotactl+0x13c/0x380 [ofd] [242198.449247] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242198.456307] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242198.464110] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242198.470551] [] kthread+0xd1/0xe0 [242198.475548] [] ret_from_fork_nospec_begin+0xe/0x21 [242198.482125] [] 0xffffffffffffffff [242198.487227] LustreError: dumping log to /tmp/lustre-log.1576143218.67052 [242202.459007] LustreError: dumping log to /tmp/lustre-log.1576143222.67784 [242217.378925] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [242217.389106] Lustre: Skipped 568 previous similar messages [242243.419813] LustreError: dumping log to /tmp/lustre-log.1576143263.113358 [242250.676570] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105356 to 0x1a00000402:1105377 [242270.612364] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f60b563050 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:10/0 lens 440/0 e 0 to 0 dl 1576143295 ref 2 fl New:/2/ffffffff rc 0/-1 [242270.640911] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1185 previous similar messages [242272.092384] LustreError: dumping log to /tmp/lustre-log.1576143292.67804 [242284.380627] LustreError: dumping log to /tmp/lustre-log.1576143304.67883 [242292.487208] Lustre: fir-OST0058: Connection restored to a8841932-bc4a-ab11-1ace-8e1fdda46930 (at 10.8.23.23@o2ib6) [242292.497646] Lustre: Skipped 591 previous similar messages [242295.139417] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [242295.148382] Lustre: Skipped 71 previous similar messages [242296.949481] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122085 to 0x1a80000401:1122113 [242354.014010] LNet: Service thread pid 67633 was inactive for 1200.75s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [242354.027048] LNet: Skipped 9 previous similar messages [242354.032195] LustreError: dumping log to /tmp/lustre-log.1576143374.67633 [242362.206172] LustreError: dumping log to /tmp/lustre-log.1576143382.67841 [242399.070925] LustreError: dumping log to /tmp/lustre-log.1576143419.112508 [242405.463637] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124270 to 0x1900000400:1124353 [242407.263092] LustreError: dumping log to /tmp/lustre-log.1576143427.67849 [242411.359167] LustreError: dumping log to /tmp/lustre-log.1576143431.66108 [242412.759801] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117382 to 0x1800000402:1117409 [242423.910203] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3081886 to 0x1a80000402:3081953 [242427.743515] LustreError: dumping log to /tmp/lustre-log.1576143448.67697 [242476.896501] Pid: 67037, comm: ll_ost01_006 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242476.907023] Call Trace: [242476.909577] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [242476.915976] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242476.923040] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242476.930846] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242476.937273] [] kthread+0xd1/0xe0 [242476.942278] [] ret_from_fork_nospec_begin+0xe/0x21 [242476.948852] [] 0xffffffffffffffff [242476.953963] LustreError: dumping log to /tmp/lustre-log.1576143497.67037 [242493.280855] Pid: 66105, comm: ll_ost01_001 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242493.291379] Call Trace: [242493.293954] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242493.300954] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242493.308138] [] start_this_handle+0x1a1/0x430 [jbd2] [242493.314786] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242493.321534] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242493.329059] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242493.336156] [] dqget+0x3fa/0x450 [242493.341161] [] dquot_get_dqblk+0x14/0x1f0 [242493.346931] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242493.354550] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242493.361024] [] ofd_quotactl+0x13c/0x380 [ofd] [242493.367169] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242493.374214] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242493.382030] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242493.388447] [] kthread+0xd1/0xe0 [242493.393461] [] ret_from_fork_nospec_begin+0xe/0x21 [242493.400026] [] 0xffffffffffffffff [242493.405139] LustreError: dumping log to /tmp/lustre-log.1576143513.66105 [242511.352773] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800376 to 0x1a80000400:11800449 [242539.832472] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467396 to 0x0:27467425 [242558.818134] Pid: 67831, comm: ll_ost02_049 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242558.828654] Call Trace: [242558.831215] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [242558.838253] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [242558.845552] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [242558.852200] [] ofd_destroy_hdl+0x267/0x970 [ofd] [242558.858614] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242558.865652] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242558.873464] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242558.879881] [] kthread+0xd1/0xe0 [242558.884882] [] ret_from_fork_nospec_begin+0xe/0x21 [242558.891451] [] 0xffffffffffffffff [242558.896558] LustreError: dumping log to /tmp/lustre-log.1576143579.67831 [242583.394627] Pid: 112493, comm: ll_ost00_074 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242583.405241] Call Trace: [242583.407815] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242583.414814] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242583.422015] [] start_this_handle+0x1a1/0x430 [jbd2] [242583.428666] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242583.435414] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242583.442938] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242583.450034] [] dqget+0x3fa/0x450 [242583.455038] [] dquot_get_dqblk+0x14/0x1f0 [242583.460818] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242583.468434] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242583.474911] [] ofd_quotactl+0x13c/0x380 [ofd] [242583.481068] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242583.488118] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242583.495933] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242583.502349] [] kthread+0xd1/0xe0 [242583.507349] [] ret_from_fork_nospec_begin+0xe/0x21 [242583.513927] [] 0xffffffffffffffff [242583.519027] LustreError: dumping log to /tmp/lustre-log.1576143603.112493 [242607.971110] Pid: 67805, comm: ll_ost02_045 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242607.981631] Call Trace: [242607.984183] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [242607.990576] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242607.997652] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242608.005455] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242608.011883] [] kthread+0xd1/0xe0 [242608.016887] [] ret_from_fork_nospec_begin+0xe/0x21 [242608.023463] [] 0xffffffffffffffff [242608.028574] LustreError: dumping log to /tmp/lustre-log.1576143628.67805 [242612.067191] LustreError: dumping log to /tmp/lustre-log.1576143632.112505 [242627.572508] LustreError: 67625:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576143347, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005c_UUID lock: ffff8910b6c8a880/0x7066c9c190af09a7 lrc: 3/0,1 mode: --/PW res: [0x1a00000401:0x2e857d:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67625 timeout: 0 lvb_type: 0 [242627.616229] LustreError: 67625:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 3 previous similar messages [242632.547610] LustreError: dumping log to /tmp/lustre-log.1576143652.112539 [242664.408412] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125670 to 0x1980000400:1125697 [242701.134334] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506691 to 0x0:27506721 [242702.181039] LustreError: dumping log to /tmp/lustre-log.1576143722.67765 [242718.565377] LustreError: dumping log to /tmp/lustre-log.1576143738.67767 [242733.250818] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785289 to 0x1800000401:11785313 [242737.645523] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3048873 to 0x1a00000401:3048897 [242739.045809] LustreError: dumping log to /tmp/lustre-log.1576143759.67673 [242818.350993] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [242818.361176] Lustre: Skipped 565 previous similar messages [242837.351756] LNet: Service thread pid 112532 was inactive for 1204.12s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [242837.368972] LNet: Skipped 9 previous similar messages [242837.374125] Pid: 112532, comm: ll_ost01_086 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242837.384932] Call Trace: [242837.387513] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242837.394522] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242837.401822] [] start_this_handle+0x1a1/0x430 [jbd2] [242837.408564] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242837.415305] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242837.422887] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242837.429981] [] dqget+0x3fa/0x450 [242837.435086] [] dquot_get_dqblk+0x14/0x1f0 [242837.440882] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242837.448552] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242837.455035] [] ofd_quotactl+0x13c/0x380 [ofd] [242837.461281] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242837.468357] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242837.476239] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242837.482674] [] kthread+0xd1/0xe0 [242837.487708] [] ret_from_fork_nospec_begin+0xe/0x21 [242837.494294] [] 0xffffffffffffffff [242837.499501] LustreError: dumping log to /tmp/lustre-log.1576143857.112532 [242837.887424] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842056 to 0x1980000401:11842081 [242845.543923] Pid: 67620, comm: ll_ost01_014 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242845.554442] Call Trace: [242845.557016] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242845.564008] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242845.571198] [] start_this_handle+0x1a1/0x430 [jbd2] [242845.577866] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242845.584618] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242845.592143] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242845.599236] [] dqget+0x3fa/0x450 [242845.604242] [] dquot_get_dqblk+0x14/0x1f0 [242845.610027] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242845.617639] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242845.624127] [] ofd_quotactl+0x13c/0x380 [ofd] [242845.630259] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242845.637333] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242845.645131] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242845.651558] [] kthread+0xd1/0xe0 [242845.656564] [] ret_from_fork_nospec_begin+0xe/0x21 [242845.663147] [] 0xffffffffffffffff [242845.668257] LustreError: dumping log to /tmp/lustre-log.1576143866.67620 [242849.640011] Pid: 67925, comm: ll_ost00_068 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242849.650531] Call Trace: [242849.653101] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242849.660099] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242849.667299] [] start_this_handle+0x1a1/0x430 [jbd2] [242849.673949] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242849.680695] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242849.688225] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242849.695320] [] dqget+0x3fa/0x450 [242849.700321] [] dquot_get_dqblk+0x14/0x1f0 [242849.706110] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242849.713720] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242849.720219] [] ofd_quotactl+0x13c/0x380 [ofd] [242849.726349] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242849.733420] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242849.741229] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242849.747643] [] kthread+0xd1/0xe0 [242849.752659] [] ret_from_fork_nospec_begin+0xe/0x21 [242849.759223] [] 0xffffffffffffffff [242849.764335] LustreError: dumping log to /tmp/lustre-log.1576143870.67925 [242849.771700] Pid: 67630, comm: ll_ost01_018 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242849.782223] Call Trace: [242849.784770] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [242849.791792] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [242849.799117] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [242849.805763] [] ofd_destroy_hdl+0x267/0x970 [ofd] [242849.812178] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242849.819204] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242849.827033] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242849.833445] [] kthread+0xd1/0xe0 [242849.838451] [] ret_from_fork_nospec_begin+0xe/0x21 [242849.845007] [] 0xffffffffffffffff [242852.570711] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105381 to 0x1a00000402:1105409 [242871.236451] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f103e63850 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:611/0 lens 440/0 e 0 to 0 dl 1576143896 ref 2 fl New:/2/ffffffff rc 0/-1 [242871.265086] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1183 previous similar messages [242874.216504] Pid: 67631, comm: ll_ost00_016 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [242874.227023] Call Trace: [242874.229600] [] wait_transaction_locked+0x85/0xd0 [jbd2] [242874.236597] [] add_transaction_credits+0x268/0x2f0 [jbd2] [242874.243781] [] start_this_handle+0x1a1/0x430 [jbd2] [242874.250431] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [242874.257179] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [242874.264704] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [242874.271799] [] dqget+0x3fa/0x450 [242874.276819] [] dquot_get_dqblk+0x14/0x1f0 [242874.282607] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [242874.290219] [] lquotactl_slv+0x27d/0x9d0 [lquota] [242874.296708] [] ofd_quotactl+0x13c/0x380 [ofd] [242874.302838] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [242874.309882] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [242874.317685] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [242874.324111] [] kthread+0xd1/0xe0 [242874.329117] [] ret_from_fork_nospec_begin+0xe/0x21 [242874.335708] [] 0xffffffffffffffff [242874.340826] LustreError: dumping log to /tmp/lustre-log.1576143894.67631 [242893.983421] Lustre: fir-OST005c: Connection restored to e2e512e9-5e98-1086-a71a-3e4545e26e0b (at 10.8.25.1@o2ib6) [242893.993788] Lustre: Skipped 625 previous similar messages [242897.264212] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [242897.273187] Lustre: Skipped 71 previous similar messages [242898.792992] LustreError: dumping log to /tmp/lustre-log.1576143919.66096 [242907.918468] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089681 to 0x1980000402:3089697 [242936.801426] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797383 to 0x1900000401:11797409 [242939.953635] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3085869 to 0x1900000402:3085889 [242956.138147] LNet: Service thread pid 67663 was inactive for 1201.35s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [242956.151186] LNet: Skipped 14 previous similar messages [242956.156422] LustreError: dumping log to /tmp/lustre-log.1576143976.67663 [243001.195060] LustreError: dumping log to /tmp/lustre-log.1576144021.67679 [243024.833392] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3081963 to 0x1a80000402:3081985 [243025.771548] LustreError: dumping log to /tmp/lustre-log.1576144046.67637 [243029.867631] LustreError: dumping log to /tmp/lustre-log.1576144050.67668 [243053.796775] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122117 to 0x1a80000401:1122145 [243149.189864] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070575 to 0x1800000400:3070625 [243152.750103] Pid: 112506, comm: ll_ost02_072 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243152.760730] Call Trace: [243152.763303] [] wait_transaction_locked+0x85/0xd0 [jbd2] [243152.770306] [] add_transaction_credits+0x268/0x2f0 [jbd2] [243152.777498] [] start_this_handle+0x1a1/0x430 [jbd2] [243152.784148] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [243152.790894] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [243152.798431] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [243152.805568] [] ofd_trans_start+0x75/0xf0 [ofd] [243152.811786] [] ofd_destroy+0x5d0/0x960 [ofd] [243152.817850] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [243152.824499] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243152.830901] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243152.837951] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243152.845766] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243152.852181] [] kthread+0xd1/0xe0 [243152.857197] [] ret_from_fork_nospec_begin+0xe/0x21 [243152.863762] [] 0xffffffffffffffff [243152.868887] LustreError: dumping log to /tmp/lustre-log.1576144173.112506 [243152.876872] Pid: 67639, comm: ll_ost02_017 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243152.887421] Call Trace: [243152.889970] [] wait_transaction_locked+0x85/0xd0 [jbd2] [243152.896964] [] add_transaction_credits+0x268/0x2f0 [jbd2] [243152.904147] [] start_this_handle+0x1a1/0x430 [jbd2] [243152.910795] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [243152.917546] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [243152.925063] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [243152.932157] [] ofd_trans_start+0x75/0xf0 [ofd] [243152.938388] [] ofd_destroy+0x5d0/0x960 [ofd] [243152.944446] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [243152.951098] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243152.957499] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243152.964531] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243152.972355] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243152.978778] [] kthread+0xd1/0xe0 [243152.983794] [] ret_from_fork_nospec_begin+0xe/0x21 [243152.990349] [] 0xffffffffffffffff [243161.526995] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124364 to 0x1900000400:1124385 [243169.631148] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117411 to 0x1800000402:1117441 [243185.518767] Pid: 67689, comm: ll_ost00_031 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243185.529287] Call Trace: [243185.531853] [] wait_transaction_locked+0x85/0xd0 [jbd2] [243185.538854] [] add_transaction_credits+0x268/0x2f0 [jbd2] [243185.546037] [] start_this_handle+0x1a1/0x430 [jbd2] [243185.552688] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [243185.559434] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [243185.566959] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [243185.574058] [] dqget+0x3fa/0x450 [243185.579060] [] dquot_get_dqblk+0x14/0x1f0 [243185.584846] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [243185.592457] [] lquotactl_slv+0x27d/0x9d0 [lquota] [243185.598947] [] ofd_quotactl+0x13c/0x380 [ofd] [243185.605077] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243185.612129] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243185.619932] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243185.626361] [] kthread+0xd1/0xe0 [243185.631363] [] ret_from_fork_nospec_begin+0xe/0x21 [243185.637940] [] 0xffffffffffffffff [243185.643040] LustreError: dumping log to /tmp/lustre-log.1576144205.67689 [243197.807078] Pid: 67863, comm: ll_ost01_067 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243197.817596] Call Trace: [243197.820150] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [243197.827189] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [243197.834478] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [243197.841127] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243197.847527] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243197.854569] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243197.862386] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243197.868800] [] kthread+0xd1/0xe0 [243197.873799] [] ret_from_fork_nospec_begin+0xe/0x21 [243197.880378] [] 0xffffffffffffffff [243197.885485] LustreError: dumping log to /tmp/lustre-log.1576144218.67863 [243230.575679] Pid: 67666, comm: ll_ost01_030 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243230.586199] Call Trace: [243230.588756] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [243230.595157] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243230.602227] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243230.610042] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243230.616474] [] kthread+0xd1/0xe0 [243230.621478] [] ret_from_fork_nospec_begin+0xe/0x21 [243230.628053] [] 0xffffffffffffffff [243230.633161] LustreError: dumping log to /tmp/lustre-log.1576144250.67666 [243264.812787] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125699 to 0x1980000400:1125729 [243268.243254] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800455 to 0x1a80000400:11800481 [243296.482338] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467430 to 0x0:27467457 [243315.830430] LustreError: 67608:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576144036, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0058_UUID lock: ffff88f5fed1ad00/0x7066c9c190af3456 lrc: 3/0,1 mode: --/PW res: [0x1900000400:0x112809:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67608 timeout: 0 lvb_type: 0 [243315.874153] LustreError: 67608:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 9 previous similar messages [243316.593424] LustreError: dumping log to /tmp/lustre-log.1576144336.66093 [243341.169907] LustreError: dumping log to /tmp/lustre-log.1576144361.67678 [243365.746399] LustreError: dumping log to /tmp/lustre-log.1576144386.112520 [243369.842480] LustreError: dumping log to /tmp/lustre-log.1576144390.67726 [243408.828570] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192412 to 0x0:27192449 [243419.323023] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [243419.333200] Lustre: Skipped 710 previous similar messages [243443.571970] LustreError: dumping log to /tmp/lustre-log.1576144463.67649 [243451.764129] LustreError: dumping log to /tmp/lustre-log.1576144472.67818 [243453.852918] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105411 to 0x1a00000402:1105441 [243456.869525] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506724 to 0x0:27506753 [243459.956299] LNet: Service thread pid 67858 was inactive for 1201.49s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [243459.973412] LNet: Skipped 9 previous similar messages [243459.978559] Pid: 67858, comm: ll_ost03_059 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243459.989099] Call Trace: [243459.991654] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [243459.998055] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243460.005117] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243460.012934] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243460.019365] [] kthread+0xd1/0xe0 [243460.024369] [] ret_from_fork_nospec_begin+0xe/0x21 [243460.030945] [] 0xffffffffffffffff [243460.036054] LustreError: dumping log to /tmp/lustre-log.1576144480.67858 [243471.880550] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff891237d13050 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:457/0 lens 440/0 e 0 to 0 dl 1576144497 ref 2 fl New:/2/ffffffff rc 0/-1 [243471.909187] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1246 previous similar messages [243472.244537] Pid: 67670, comm: ll_ost00_023 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243472.255055] Call Trace: [243472.257632] [] wait_transaction_locked+0x85/0xd0 [jbd2] [243472.264640] [] add_transaction_credits+0x268/0x2f0 [jbd2] [243472.271823] [] start_this_handle+0x1a1/0x430 [jbd2] [243472.278471] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [243472.285223] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [243472.292746] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [243472.299840] [] dqget+0x3fa/0x450 [243472.304845] [] dquot_get_dqblk+0x14/0x1f0 [243472.310640] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [243472.318253] [] lquotactl_slv+0x27d/0x9d0 [lquota] [243472.324740] [] ofd_quotactl+0x13c/0x380 [ofd] [243472.330886] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243472.337942] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243472.345744] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243472.352170] [] kthread+0xd1/0xe0 [243472.357175] [] ret_from_fork_nospec_begin+0xe/0x21 [243472.363751] [] 0xffffffffffffffff [243472.368852] LustreError: dumping log to /tmp/lustre-log.1576144492.67670 [243488.700547] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785320 to 0x1800000401:11785345 [243493.140727] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3048914 to 0x1a00000401:3048929 [243494.959161] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [243494.969719] Lustre: Skipped 658 previous similar messages [243496.821040] Pid: 67641, comm: ll_ost00_019 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243496.831560] Call Trace: [243496.834130] [] wait_transaction_locked+0x85/0xd0 [jbd2] [243496.841127] [] add_transaction_credits+0x268/0x2f0 [jbd2] [243496.848311] [] start_this_handle+0x1a1/0x430 [jbd2] [243496.854962] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [243496.861711] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [243496.869252] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [243496.876347] [] dqget+0x3fa/0x450 [243496.881352] [] dquot_get_dqblk+0x14/0x1f0 [243496.887136] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [243496.894749] [] lquotactl_slv+0x27d/0x9d0 [lquota] [243496.901239] [] ofd_quotactl+0x13c/0x380 [ofd] [243496.907368] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243496.914413] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243496.922218] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243496.928645] [] kthread+0xd1/0xe0 [243496.933662] [] ret_from_fork_nospec_begin+0xe/0x21 [243496.940241] [] 0xffffffffffffffff [243496.945340] LustreError: dumping log to /tmp/lustre-log.1576144517.67641 [243499.387679] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [243499.396651] Lustre: Skipped 71 previous similar messages [243529.589697] Pid: 67625, comm: ll_ost03_015 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243529.600223] Call Trace: [243529.602789] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [243529.609831] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [243529.617168] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [243529.623835] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243529.630239] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243529.637270] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243529.645103] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243529.651520] [] kthread+0xd1/0xe0 [243529.656518] [] ret_from_fork_nospec_begin+0xe/0x21 [243529.663088] [] 0xffffffffffffffff [243529.668198] LustreError: dumping log to /tmp/lustre-log.1576144549.67625 [243594.438729] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842091 to 0x1980000401:11842113 [243595.127017] Pid: 66114, comm: ll_ost03_001 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243595.137533] Call Trace: [243595.140111] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [243595.147141] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [243595.154464] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [243595.161125] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243595.167565] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243595.174608] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243595.182430] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243595.188847] [] kthread+0xd1/0xe0 [243595.193847] [] ret_from_fork_nospec_begin+0xe/0x21 [243595.200415] [] 0xffffffffffffffff [243595.205522] LustreError: dumping log to /tmp/lustre-log.1576144615.66114 [243595.213232] LNet: Service thread pid 67660 was inactive for 1202.61s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [243595.226294] LNet: Skipped 9 previous similar messages [243623.799598] LustreError: dumping log to /tmp/lustre-log.1576144644.67655 [243625.898880] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3081992 to 0x1a80000402:3082017 [243627.895668] LustreError: dumping log to /tmp/lustre-log.1576144648.112554 [243631.991757] LustreError: dumping log to /tmp/lustre-log.1576144652.67731 [243636.087837] LustreError: dumping log to /tmp/lustre-log.1576144656.67615 [243652.472166] LustreError: dumping log to /tmp/lustre-log.1576144672.112492 [243656.568278] LustreError: dumping log to /tmp/lustre-log.1576144676.67686 [243663.760132] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089704 to 0x1980000402:3089729 [243692.946701] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797424 to 0x1900000401:11797441 [243696.104758] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3085896 to 0x1900000402:3085921 [243754.874206] LustreError: dumping log to /tmp/lustre-log.1576144775.67740 [243783.546787] Pid: 67611, comm: ll_ost00_012 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243783.557305] Call Trace: [243783.559881] [] wait_transaction_locked+0x85/0xd0 [jbd2] [243783.566885] [] add_transaction_credits+0x268/0x2f0 [jbd2] [243783.574083] [] start_this_handle+0x1a1/0x430 [jbd2] [243783.580736] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [243783.587484] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [243783.595010] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [243783.602104] [] dqget+0x3fa/0x450 [243783.607109] [] dquot_get_dqblk+0x14/0x1f0 [243783.612882] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [243783.620498] [] lquotactl_slv+0x27d/0x9d0 [lquota] [243783.626975] [] ofd_quotactl+0x13c/0x380 [ofd] [243783.633129] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243783.640173] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243783.647987] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243783.654404] [] kthread+0xd1/0xe0 [243783.659419] [] ret_from_fork_nospec_begin+0xe/0x21 [243783.665982] [] 0xffffffffffffffff [243783.671095] LustreError: dumping log to /tmp/lustre-log.1576144803.67611 [243811.012004] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122155 to 0x1a80000401:1122177 [243820.411516] Pid: 67840, comm: ll_ost01_062 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243820.422034] Call Trace: [243820.424595] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [243820.431626] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [243820.438923] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [243820.445572] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243820.451973] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243820.459004] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243820.466820] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243820.473235] [] kthread+0xd1/0xe0 [243820.478238] [] ret_from_fork_nospec_begin+0xe/0x21 [243820.484822] [] 0xffffffffffffffff [243820.489930] LustreError: dumping log to /tmp/lustre-log.1576144840.67840 [243857.276255] Pid: 67826, comm: ll_ost02_048 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243857.286773] Call Trace: [243857.289325] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [243857.296365] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [243857.303662] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [243857.310309] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243857.316714] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243857.323753] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243857.331575] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243857.338028] [] kthread+0xd1/0xe0 [243857.343045] [] ret_from_fork_nospec_begin+0xe/0x21 [243857.349638] [] 0xffffffffffffffff [243857.354764] LustreError: dumping log to /tmp/lustre-log.1576144877.67826 [243866.349098] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125734 to 0x1980000400:1125761 [243905.236943] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070636 to 0x1800000400:3070657 [243916.958121] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124391 to 0x1900000400:1124417 [243918.717473] Pid: 112489, comm: ll_ost00_070 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243918.728081] Call Trace: [243918.730648] [] wait_transaction_locked+0x85/0xd0 [jbd2] [243918.737656] [] add_transaction_credits+0x268/0x2f0 [jbd2] [243918.744856] [] start_this_handle+0x1a1/0x430 [jbd2] [243918.751506] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [243918.758254] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [243918.765780] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [243918.772876] [] dqget+0x3fa/0x450 [243918.777880] [] dquot_get_dqblk+0x14/0x1f0 [243918.783651] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [243918.791268] [] lquotactl_slv+0x27d/0x9d0 [lquota] [243918.797743] [] ofd_quotactl+0x13c/0x380 [ofd] [243918.803901] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243918.810944] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243918.818742] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243918.825157] [] kthread+0xd1/0xe0 [243918.830172] [] ret_from_fork_nospec_begin+0xe/0x21 [243918.836737] [] 0xffffffffffffffff [243918.841847] LustreError: dumping log to /tmp/lustre-log.1576144939.112489 [243924.942253] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117450 to 0x1800000402:1117473 [243931.005736] Pid: 67695, comm: ll_ost01_037 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [243931.016255] Call Trace: [243931.018813] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [243931.025846] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [243931.033145] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [243931.039790] [] ofd_destroy_hdl+0x267/0x970 [ofd] [243931.046192] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [243931.053233] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [243931.061048] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [243931.067465] [] kthread+0xd1/0xe0 [243931.072465] [] ret_from_fork_nospec_begin+0xe/0x21 [243931.079042] [] 0xffffffffffffffff [243931.084150] LustreError: dumping log to /tmp/lustre-log.1576144951.67695 [243940.790925] LustreError: 67642:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576144661, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff890020f4f500/0x7066c9c190b32db0 lrc: 3/0,1 mode: --/PW res: [0x1a80000401:0x111f69:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67642 timeout: 0 lvb_type: 0 [243940.834649] LustreError: 67642:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 4 previous similar messages [243943.293967] LustreError: dumping log to /tmp/lustre-log.1576144963.67216 [243963.774379] LustreError: dumping log to /tmp/lustre-log.1576144984.67405 [243988.350855] LustreError: dumping log to /tmp/lustre-log.1576145008.67627 [244019.723529] Lustre: fir-OST0054: Client fe16bc49-4bbe-dc30-a069-fee92bf3e984 (at 10.9.104.23@o2ib4) reconnecting [244019.733802] Lustre: Skipped 743 previous similar messages [244023.943208] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800494 to 0x1a80000400:11800513 [244025.215578] LustreError: dumping log to /tmp/lustre-log.1576145045.67589 [244052.833348] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467461 to 0x0:27467489 [244055.592844] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105448 to 0x1a00000402:1105473 [244073.568554] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff890d6ff65050 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:303/0 lens 440/0 e 0 to 0 dl 1576145098 ref 2 fl New:/2/ffffffff rc 0/-1 [244073.597191] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1551 previous similar messages [244074.368546] LustreError: dumping log to /tmp/lustre-log.1576145094.112526 [244096.361888] Lustre: fir-OST005c: Connection restored to cec884d3-ca4b-8127-2f6b-7762665aa5f8 (at 10.9.0.64@o2ib4) [244096.372243] Lustre: Skipped 765 previous similar messages [244098.945044] LNet: Service thread pid 112509 was inactive for 1204.01s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [244098.962237] LNet: Skipped 9 previous similar messages [244098.967385] Pid: 112509, comm: ll_ost00_081 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244098.978009] Call Trace: [244098.980586] [] wait_transaction_locked+0x85/0xd0 [jbd2] [244098.987583] [] add_transaction_credits+0x268/0x2f0 [jbd2] [244098.994751] [] start_this_handle+0x1a1/0x430 [jbd2] [244099.001416] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [244099.008150] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [244099.015704] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [244099.022806] [] dqget+0x3fa/0x450 [244099.027820] [] dquot_get_dqblk+0x14/0x1f0 [244099.033606] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [244099.041229] [] lquotactl_slv+0x27d/0x9d0 [lquota] [244099.047704] [] ofd_quotactl+0x13c/0x380 [ofd] [244099.053847] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244099.060887] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244099.068712] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244099.075128] [] kthread+0xd1/0xe0 [244099.080129] [] ret_from_fork_nospec_begin+0xe/0x21 [244099.086719] [] 0xffffffffffffffff [244099.091815] LustreError: dumping log to /tmp/lustre-log.1576145119.112509 [244101.511858] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [244101.520826] Lustre: Skipped 71 previous similar messages [244123.521532] Pid: 67603, comm: ll_ost02_010 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244123.532052] Call Trace: [244123.534604] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [244123.541002] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244123.548066] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244123.555866] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244123.562296] [] kthread+0xd1/0xe0 [244123.567300] [] ret_from_fork_nospec_begin+0xe/0x21 [244123.573874] [] 0xffffffffffffffff [244123.578983] LustreError: dumping log to /tmp/lustre-log.1576145143.67603 [244127.617601] Pid: 67406, comm: ll_ost00_007 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244127.628123] Call Trace: [244127.630693] [] wait_transaction_locked+0x85/0xd0 [jbd2] [244127.637692] [] add_transaction_credits+0x268/0x2f0 [jbd2] [244127.644875] [] start_this_handle+0x1a1/0x430 [jbd2] [244127.651524] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [244127.658287] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [244127.665814] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [244127.672911] [] dqget+0x3fa/0x450 [244127.677913] [] dquot_get_dqblk+0x14/0x1f0 [244127.683701] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [244127.691311] [] lquotactl_slv+0x27d/0x9d0 [lquota] [244127.697801] [] ofd_quotactl+0x13c/0x380 [ofd] [244127.703938] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244127.710979] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244127.718792] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244127.725227] [] kthread+0xd1/0xe0 [244127.730244] [] ret_from_fork_nospec_begin+0xe/0x21 [244127.736806] [] 0xffffffffffffffff [244127.741919] LustreError: dumping log to /tmp/lustre-log.1576145148.67406 [244165.564579] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192451 to 0x0:27192481 [244213.564536] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506755 to 0x0:27506785 [244217.731400] Pid: 67608, comm: ll_ost01_011 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244217.741917] Call Trace: [244217.744478] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [244217.751519] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [244217.758816] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [244217.765472] [] ofd_destroy_hdl+0x267/0x970 [ofd] [244217.771875] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244217.778905] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244217.786718] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244217.793136] [] kthread+0xd1/0xe0 [244217.798135] [] ret_from_fork_nospec_begin+0xe/0x21 [244217.804706] [] 0xffffffffffffffff [244217.809824] LustreError: dumping log to /tmp/lustre-log.1576145238.67608 [244217.817430] Pid: 112565, comm: ll_ost03_078 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244217.828059] Call Trace: [244217.830606] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [244217.836998] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244217.844034] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244217.851836] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244217.858256] [] kthread+0xd1/0xe0 [244217.863250] [] ret_from_fork_nospec_begin+0xe/0x21 [244217.869803] [] 0xffffffffffffffff [244226.910827] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082024 to 0x1a80000402:3082049 [244230.019650] LNet: Service thread pid 67899 was inactive for 1203.17s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [244230.032681] LNet: Skipped 15 previous similar messages [244230.037916] LustreError: dumping log to /tmp/lustre-log.1576145250.67899 [244245.307605] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785353 to 0x1800000401:11785377 [244249.307760] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3048936 to 0x1a00000401:3048961 [244254.596126] LustreError: dumping log to /tmp/lustre-log.1576145274.67867 [244264.732052] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089736 to 0x1980000402:3089761 [244350.109693] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842123 to 0x1980000401:11842145 [244385.670715] LustreError: dumping log to /tmp/lustre-log.1576145405.112513 [244410.247206] Pid: 112497, comm: ll_ost00_077 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244410.257812] Call Trace: [244410.260387] [] wait_transaction_locked+0x85/0xd0 [jbd2] [244410.267386] [] add_transaction_credits+0x268/0x2f0 [jbd2] [244410.274573] [] start_this_handle+0x1a1/0x430 [jbd2] [244410.281219] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [244410.287967] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [244410.295494] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [244410.302591] [] dqget+0x3fa/0x450 [244410.307609] [] dquot_get_dqblk+0x14/0x1f0 [244410.313382] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [244410.321008] [] lquotactl_slv+0x27d/0x9d0 [lquota] [244410.327483] [] ofd_quotactl+0x13c/0x380 [ofd] [244410.333625] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244410.340664] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244410.348481] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244410.354894] [] kthread+0xd1/0xe0 [244410.359914] [] ret_from_fork_nospec_begin+0xe/0x21 [244410.366475] [] 0xffffffffffffffff [244410.371602] LustreError: dumping log to /tmp/lustre-log.1576145430.112497 [244410.379116] Pid: 112490, comm: ll_ost00_071 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244410.389754] Call Trace: [244410.392302] [] wait_transaction_locked+0x85/0xd0 [jbd2] [244410.399311] [] add_transaction_credits+0x268/0x2f0 [jbd2] [244410.406480] [] start_this_handle+0x1a1/0x430 [jbd2] [244410.413144] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [244410.419882] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [244410.427418] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [244410.434502] [] dqget+0x3fa/0x450 [244410.439529] [] dquot_get_dqblk+0x14/0x1f0 [244410.445310] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [244410.452923] [] lquotactl_slv+0x27d/0x9d0 [lquota] [244410.459400] [] ofd_quotactl+0x13c/0x380 [ofd] [244410.465544] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244410.472565] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244410.480381] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244410.486798] [] kthread+0xd1/0xe0 [244410.491802] [] ret_from_fork_nospec_begin+0xe/0x21 [244410.498358] [] 0xffffffffffffffff [244449.103660] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797453 to 0x1900000401:11797473 [244452.007748] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3085933 to 0x1900000402:3085953 [244466.964684] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125767 to 0x1980000400:1125793 [244467.592340] Pid: 113354, comm: ll_ost02_092 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244467.602943] Call Trace: [244467.605494] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [244467.612527] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [244467.619814] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [244467.626480] [] ofd_destroy_hdl+0x267/0x970 [ofd] [244467.632881] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244467.639913] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244467.647729] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244467.654147] [] kthread+0xd1/0xe0 [244467.659148] [] ret_from_fork_nospec_begin+0xe/0x21 [244467.665723] [] 0xffffffffffffffff [244467.670832] LustreError: dumping log to /tmp/lustre-log.1576145487.113354 [244488.072775] Pid: 112524, comm: ll_ost03_068 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244488.083381] Call Trace: [244488.085941] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [244488.092980] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [244488.100279] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [244488.106927] [] ofd_destroy_hdl+0x267/0x970 [ofd] [244488.113332] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244488.120370] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244488.128185] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244488.134599] [] kthread+0xd1/0xe0 [244488.139601] [] ret_from_fork_nospec_begin+0xe/0x21 [244488.146177] [] 0xffffffffffffffff [244488.151286] LustreError: dumping log to /tmp/lustre-log.1576145508.112524 [244512.649214] Pid: 67654, comm: ll_ost02_022 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244512.659732] Call Trace: [244512.662292] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [244512.668691] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244512.675754] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244512.683555] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244512.689983] [] kthread+0xd1/0xe0 [244512.694996] [] ret_from_fork_nospec_begin+0xe/0x21 [244512.701584] [] 0xffffffffffffffff [244512.706706] LustreError: dumping log to /tmp/lustre-log.1576145533.67654 [244516.745305] LustreError: dumping log to /tmp/lustre-log.1576145537.67815 [244541.321771] LustreError: dumping log to /tmp/lustre-log.1576145561.112563 [244621.267131] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [244621.277306] Lustre: Skipped 789 previous similar messages [244635.401272] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122183 to 0x1a80000401:1122209 [244656.004751] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105448 to 0x1a00000402:1105505 [244661.355914] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070661 to 0x1800000400:3070689 [244672.396368] LustreError: dumping log to /tmp/lustre-log.1576145692.67860 [244673.133159] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124428 to 0x1900000400:1124449 [244674.380427] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff890df37ee850 x1650929716577312/t0(0) o10->68d8bbaa-aab1-a78f-323f-aaff4c375c30@10.8.7.4@o2ib6:149/0 lens 440/0 e 0 to 0 dl 1576145699 ref 2 fl New:/2/ffffffff rc 0/-1 [244674.409087] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1589 previous similar messages [244680.829213] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117480 to 0x1800000402:1117505 [244696.972864] LustreError: dumping log to /tmp/lustre-log.1576145717.112502 [244696.984894] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [244696.995421] Lustre: Skipped 794 previous similar messages [244703.635682] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [244703.644647] Lustre: Skipped 71 previous similar messages [244746.125835] LNet: Service thread pid 67779 was inactive for 1200.45s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [244746.142959] LNet: Skipped 9 previous similar messages [244746.148106] Pid: 67779, comm: ll_ost01_050 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244746.158643] Call Trace: [244746.161193] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [244746.168227] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [244746.175513] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [244746.182163] [] ofd_destroy_hdl+0x267/0x970 [ofd] [244746.188565] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244746.195596] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244746.203411] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244746.209844] [] kthread+0xd1/0xe0 [244746.214859] [] ret_from_fork_nospec_begin+0xe/0x21 [244746.221423] [] 0xffffffffffffffff [244746.226545] LustreError: dumping log to /tmp/lustre-log.1576145766.67779 [244746.234164] Pid: 112545, comm: ll_ost01_089 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244746.244796] Call Trace: [244746.247343] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [244746.253733] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244746.260769] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244746.268588] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244746.275032] [] kthread+0xd1/0xe0 [244746.280038] [] ret_from_fork_nospec_begin+0xe/0x21 [244746.286604] [] 0xffffffffffffffff [244766.606244] Pid: 66903, comm: ll_ost02_006 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244766.616760] Call Trace: [244766.619312] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [244766.626355] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [244766.633651] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [244766.640310] [] ofd_destroy_hdl+0x267/0x970 [ofd] [244766.646712] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244766.653768] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244766.661591] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244766.668010] [] kthread+0xd1/0xe0 [244766.673009] [] ret_from_fork_nospec_begin+0xe/0x21 [244766.679578] [] 0xffffffffffffffff [244766.684687] LustreError: dumping log to /tmp/lustre-log.1576145786.66903 [244779.766220] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800522 to 0x1a80000400:11800545 [244796.436850] LustreError: 112547:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576145516, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff88e9f0f4d340/0x7066c9c190b38bea lrc: 3/0,1 mode: --/PW res: [0x1a490d4:0x0:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112547 timeout: 0 lvb_type: 0 [244796.480062] LustreError: 112547:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 9 previous similar messages [244809.056343] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467491 to 0x0:27467521 [244827.882738] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082051 to 0x1a80000402:3082081 [244828.047456] Pid: 66384, comm: ll_ost00_004 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244828.057974] Call Trace: [244828.060544] [] wait_transaction_locked+0x85/0xd0 [jbd2] [244828.067543] [] add_transaction_credits+0x268/0x2f0 [jbd2] [244828.074733] [] start_this_handle+0x1a1/0x430 [jbd2] [244828.081382] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [244828.088118] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [244828.095669] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [244828.102765] [] dqget+0x3fa/0x450 [244828.107779] [] dquot_get_dqblk+0x14/0x1f0 [244828.113553] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [244828.121178] [] lquotactl_slv+0x27d/0x9d0 [lquota] [244828.127663] [] ofd_quotactl+0x13c/0x380 [ofd] [244828.133812] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244828.140859] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244828.148676] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244828.155122] [] kthread+0xd1/0xe0 [244828.160123] [] ret_from_fork_nospec_begin+0xe/0x21 [244828.166708] [] 0xffffffffffffffff [244828.171807] LustreError: dumping log to /tmp/lustre-log.1576145848.66384 [244844.431793] Pid: 67642, comm: ll_ost01_020 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [244844.442314] Call Trace: [244844.444868] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [244844.451906] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [244844.459202] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [244844.465852] [] ofd_destroy_hdl+0x267/0x970 [ofd] [244844.472255] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [244844.479309] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [244844.487144] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [244844.493577] [] kthread+0xd1/0xe0 [244844.498577] [] ret_from_fork_nospec_begin+0xe/0x21 [244844.505161] [] 0xffffffffffffffff [244844.510274] LustreError: dumping log to /tmp/lustre-log.1576145864.67642 [244852.623954] LNet: Service thread pid 112540 was inactive for 1201.63s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [244852.637077] LNet: Skipped 6 previous similar messages [244852.642225] LustreError: dumping log to /tmp/lustre-log.1576145872.112540 [244866.327961] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089765 to 0x1980000402:3089793 [244873.104370] LustreError: dumping log to /tmp/lustre-log.1576145893.67681 [244877.200447] LustreError: dumping log to /tmp/lustre-log.1576145897.67776 [244881.296515] LustreError: dumping log to /tmp/lustre-log.1576145901.67811 [244921.210547] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192484 to 0x0:27192513 [244926.353407] LustreError: dumping log to /tmp/lustre-log.1576145946.67613 [244930.449508] LustreError: dumping log to /tmp/lustre-log.1576145950.67704 [244934.545583] LustreError: dumping log to /tmp/lustre-log.1576145954.67857 [244955.025978] LustreError: dumping log to /tmp/lustre-log.1576145975.67909 [244963.218142] LustreError: dumping log to /tmp/lustre-log.1576145983.67854 [244968.739568] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506790 to 0x0:27506817 [244975.506392] LustreError: dumping log to /tmp/lustre-log.1576145995.67648 [244983.698547] LustreError: dumping log to /tmp/lustre-log.1576146003.67735 [245000.890591] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785385 to 0x1800000401:11785409 [245004.178952] LustreError: dumping log to /tmp/lustre-log.1576146024.67661 [245004.874736] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3048967 to 0x1a00000401:3048993 [245008.275039] LustreError: dumping log to /tmp/lustre-log.1576146028.67748 [245068.352651] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125800 to 0x1980000400:1125825 [245105.804728] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842158 to 0x1980000401:11842177 [245110.677064] Pid: 67149, comm: ll_ost01_007 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245110.687584] Call Trace: [245110.690153] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245110.697151] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245110.704337] [] start_this_handle+0x1a1/0x430 [jbd2] [245110.710976] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245110.717701] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245110.725216] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245110.732310] [] dqget+0x3fa/0x450 [245110.737330] [] dquot_get_dqblk+0x14/0x1f0 [245110.743115] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245110.750723] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245110.757209] [] ofd_quotactl+0x13c/0x380 [ofd] [245110.763341] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245110.770400] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245110.778206] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245110.784631] [] kthread+0xd1/0xe0 [245110.789634] [] ret_from_fork_nospec_begin+0xe/0x21 [245110.796203] [] 0xffffffffffffffff [245110.801317] LustreError: dumping log to /tmp/lustre-log.1576146131.67149 [245114.773156] Pid: 67601, comm: ll_ost00_009 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245114.783692] Call Trace: [245114.786264] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245114.793260] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245114.800445] [] start_this_handle+0x1a1/0x430 [jbd2] [245114.807092] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245114.813841] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245114.821365] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245114.828463] [] dqget+0x3fa/0x450 [245114.833481] [] dquot_get_dqblk+0x14/0x1f0 [245114.839271] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245114.846902] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245114.853390] [] ofd_quotactl+0x13c/0x380 [ofd] [245114.859548] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245114.866591] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245114.874420] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245114.880837] [] kthread+0xd1/0xe0 [245114.885854] [] ret_from_fork_nospec_begin+0xe/0x21 [245114.892417] [] 0xffffffffffffffff [245114.897544] LustreError: dumping log to /tmp/lustre-log.1576146135.67601 [245118.869229] Pid: 67927, comm: ll_ost01_075 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245118.879747] Call Trace: [245118.882317] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245118.889312] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245118.896497] [] start_this_handle+0x1a1/0x430 [jbd2] [245118.903146] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245118.909893] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245118.917421] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245118.924516] [] dqget+0x3fa/0x450 [245118.929535] [] dquot_get_dqblk+0x14/0x1f0 [245118.935322] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245118.942935] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245118.949424] [] ofd_quotactl+0x13c/0x380 [ofd] [245118.955553] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245118.962615] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245118.970419] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245118.976845] [] kthread+0xd1/0xe0 [245118.981851] [] ret_from_fork_nospec_begin+0xe/0x21 [245118.988424] [] 0xffffffffffffffff [245118.993539] LustreError: dumping log to /tmp/lustre-log.1576146139.67927 [245139.349639] Pid: 67852, comm: ll_ost00_059 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245139.360174] Call Trace: [245139.362753] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245139.369776] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245139.377005] [] start_this_handle+0x1a1/0x430 [jbd2] [245139.383678] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245139.390454] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245139.398003] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245139.405123] [] dqget+0x3fa/0x450 [245139.410137] [] dquot_get_dqblk+0x14/0x1f0 [245139.415917] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245139.423577] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245139.430078] [] ofd_quotactl+0x13c/0x380 [ofd] [245139.436230] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245139.443270] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245139.451097] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245139.457509] [] kthread+0xd1/0xe0 [245139.462511] [] ret_from_fork_nospec_begin+0xe/0x21 [245139.469079] [] 0xffffffffffffffff [245139.474179] LustreError: dumping log to /tmp/lustre-log.1576146159.67852 [245163.926129] Pid: 67634, comm: ll_ost00_018 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245163.936663] Call Trace: [245163.939239] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245163.946230] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245163.953406] [] start_this_handle+0x1a1/0x430 [jbd2] [245163.960055] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245163.966804] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245163.974345] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245163.981442] [] dqget+0x3fa/0x450 [245163.986436] [] dquot_get_dqblk+0x14/0x1f0 [245163.992223] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245163.999835] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245164.006323] [] ofd_quotactl+0x13c/0x380 [ofd] [245164.012453] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245164.019498] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245164.027300] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245164.033744] [] kthread+0xd1/0xe0 [245164.038757] [] ret_from_fork_nospec_begin+0xe/0x21 [245164.045350] [] 0xffffffffffffffff [245164.050451] LustreError: dumping log to /tmp/lustre-log.1576146184.67634 [245168.022259] LustreError: dumping log to /tmp/lustre-log.1576146188.67821 [245205.326654] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797482 to 0x1900000401:11797505 [245207.798809] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3085959 to 0x1900000402:3085985 [245217.175196] LustreError: dumping log to /tmp/lustre-log.1576146237.112558 [245222.073473] Lustre: fir-OST005a: Client 717fa73e-8071-a76f-931e-8957a8ca32aa (at 10.9.101.41@o2ib4) reconnecting [245222.083737] Lustre: Skipped 792 previous similar messages [245252.465167] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562200 to 0x0:27562241 [245256.976675] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105514 to 0x1a00000402:1105537 [245270.424257] LustreError: dumping log to /tmp/lustre-log.1576146290.67691 [245274.520335] LustreError: dumping log to /tmp/lustre-log.1576146294.67632 [245274.584357] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88ebc457c050 x1648527343518128/t0(0) o10->1d4d1153-82cd-6bbc-4932-1e6a2a506ca0@10.8.30.27@o2ib6:749/0 lens 440/0 e 0 to 0 dl 1576146299 ref 2 fl New:/2/ffffffff rc 0/-1 [245274.613172] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1630 previous similar messages [245297.957831] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [245297.968354] Lustre: Skipped 811 previous similar messages [245299.096823] LustreError: dumping log to /tmp/lustre-log.1576146319.67916 [245305.759932] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [245305.768898] Lustre: Skipped 71 previous similar messages [245323.082001] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122212 to 0x1a80000401:1122241 [245372.826286] LustreError: dumping log to /tmp/lustre-log.1576146393.112536 [245417.682987] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070697 to 0x1800000400:3070721 [245428.780099] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124451 to 0x1900000400:1124481 [245428.958687] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082088 to 0x1a80000402:3082113 [245430.171442] LNet: Service thread pid 112567 was inactive for 1203.28s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [245430.188635] LNet: Skipped 9 previous similar messages [245430.193785] Pid: 112567, comm: ll_ost00_097 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245430.204406] Call Trace: [245430.206974] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245430.213973] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245430.221158] [] start_this_handle+0x1a1/0x430 [jbd2] [245430.227806] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245430.234554] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245430.242080] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245430.249176] [] dqget+0x3fa/0x450 [245430.254182] [] dquot_get_dqblk+0x14/0x1f0 [245430.259970] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245430.267602] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245430.274080] [] ofd_quotactl+0x13c/0x380 [ofd] [245430.280222] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245430.287261] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245430.295077] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245430.301493] [] kthread+0xd1/0xe0 [245430.306507] [] ret_from_fork_nospec_begin+0xe/0x21 [245430.313070] [] 0xffffffffffffffff [245430.318184] LustreError: dumping log to /tmp/lustre-log.1576146450.112567 [245436.852255] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117514 to 0x1800000402:1117537 [245454.747927] Pid: 66253, comm: ll_ost00_003 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245454.758449] Call Trace: [245454.761018] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245454.768017] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245454.775203] [] start_this_handle+0x1a1/0x430 [jbd2] [245454.781847] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245454.788597] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245454.796124] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245454.803218] [] dqget+0x3fa/0x450 [245454.808221] [] dquot_get_dqblk+0x14/0x1f0 [245454.813992] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245454.821621] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245454.828094] [] ofd_quotactl+0x13c/0x380 [ofd] [245454.834238] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245454.841276] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245454.849094] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245454.855508] [] kthread+0xd1/0xe0 [245454.860508] [] ret_from_fork_nospec_begin+0xe/0x21 [245454.867070] [] 0xffffffffffffffff [245454.872182] LustreError: dumping log to /tmp/lustre-log.1576146475.66253 [245466.795979] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089802 to 0x1980000402:3089825 [245503.900901] Pid: 67788, comm: ll_ost01_051 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245503.911421] Call Trace: [245503.913974] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [245503.920372] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245503.927436] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245503.935237] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245503.941664] [] kthread+0xd1/0xe0 [245503.946669] [] ret_from_fork_nospec_begin+0xe/0x21 [245503.953243] [] 0xffffffffffffffff [245503.958353] LustreError: dumping log to /tmp/lustre-log.1576146524.67788 [245535.965256] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800552 to 0x1a80000400:11800577 [245564.735392] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467524 to 0x0:27467553 [245585.822526] Pid: 112550, comm: ll_ost00_091 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245585.833129] Call Trace: [245585.835699] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245585.842697] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245585.849900] [] start_this_handle+0x1a1/0x430 [jbd2] [245585.856547] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245585.863310] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245585.870832] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245585.877938] [] dqget+0x3fa/0x450 [245585.882927] [] dquot_get_dqblk+0x14/0x1f0 [245585.888701] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245585.896319] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245585.902791] [] ofd_quotactl+0x13c/0x380 [ofd] [245585.908935] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245585.915967] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245585.923796] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245585.930206] [] kthread+0xd1/0xe0 [245585.935213] [] ret_from_fork_nospec_begin+0xe/0x21 [245585.941768] [] 0xffffffffffffffff [245585.946870] LustreError: dumping log to /tmp/lustre-log.1576146606.112550 [245610.399022] Pid: 112552, comm: ll_ost00_092 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245610.409630] Call Trace: [245610.412209] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245610.419214] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245610.426400] [] start_this_handle+0x1a1/0x430 [jbd2] [245610.433071] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245610.439845] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245610.447390] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245610.454489] [] dqget+0x3fa/0x450 [245610.459496] [] dquot_get_dqblk+0x14/0x1f0 [245610.465268] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245610.472886] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245610.479361] [] ofd_quotactl+0x13c/0x380 [ofd] [245610.485506] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245610.492544] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245610.500344] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245610.506772] [] kthread+0xd1/0xe0 [245610.511777] [] ret_from_fork_nospec_begin+0xe/0x21 [245610.518351] [] 0xffffffffffffffff [245610.523444] LustreError: dumping log to /tmp/lustre-log.1576146630.112552 [245634.975512] LNet: Service thread pid 66233 was inactive for 1200.91s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [245634.988547] LNet: Skipped 19 previous similar messages [245634.993785] LustreError: dumping log to /tmp/lustre-log.1576146655.66233 [245639.071601] LustreError: dumping log to /tmp/lustre-log.1576146659.67676 [245668.810889] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125800 to 0x1980000400:1125857 [245677.033595] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192484 to 0x0:27192545 [245700.512816] LustreError: dumping log to /tmp/lustre-log.1576146720.112547 [245716.897172] LustreError: dumping log to /tmp/lustre-log.1576146737.112516 [245725.042542] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506790 to 0x0:27506849 [245733.281464] Pid: 67617, comm: ll_ost03_014 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245733.291986] Call Trace: [245733.294537] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [245733.300927] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245733.307993] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245733.315807] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245733.322237] [] kthread+0xd1/0xe0 [245733.327240] [] ret_from_fork_nospec_begin+0xe/0x21 [245733.333818] [] 0xffffffffffffffff [245733.338927] LustreError: dumping log to /tmp/lustre-log.1576146753.67617 [245741.473756] Pid: 67621, comm: ll_ost00_014 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245741.484276] Call Trace: [245741.486845] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245741.493852] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245741.501053] [] start_this_handle+0x1a1/0x430 [jbd2] [245741.507702] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245741.514450] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245741.521977] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245741.529072] [] dqget+0x3fa/0x450 [245741.534076] [] dquot_get_dqblk+0x14/0x1f0 [245741.539856] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245741.547482] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245741.553957] [] ofd_quotactl+0x13c/0x380 [ofd] [245741.560100] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245741.567157] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245741.575002] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245741.581422] [] kthread+0xd1/0xe0 [245741.586423] [] ret_from_fork_nospec_begin+0xe/0x21 [245741.592990] [] 0xffffffffffffffff [245741.598093] LustreError: dumping log to /tmp/lustre-log.1576146761.67621 [245760.825819] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049000 to 0x1a00000401:3049025 [245766.050105] Pid: 29310, comm: ll_ost00_099 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245766.060628] Call Trace: [245766.063194] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245766.070193] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245766.077382] [] start_this_handle+0x1a1/0x430 [jbd2] [245766.084032] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245766.090769] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245766.098309] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245766.105391] [] dqget+0x3fa/0x450 [245766.110406] [] dquot_get_dqblk+0x14/0x1f0 [245766.116179] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245766.123804] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245766.130297] [] ofd_quotactl+0x13c/0x380 [ofd] [245766.136440] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245766.143479] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245766.151304] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245766.157728] [] kthread+0xd1/0xe0 [245766.162745] [] ret_from_fork_nospec_begin+0xe/0x21 [245766.169326] [] 0xffffffffffffffff [245766.174437] LustreError: dumping log to /tmp/lustre-log.1576146786.29310 [245782.805115] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785417 to 0x1800000401:11785441 [245822.898922] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [245822.909101] Lustre: Skipped 768 previous similar messages [245858.540627] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105542 to 0x1a00000402:1105569 [245862.459717] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842192 to 0x1980000401:11842209 [245872.548215] Pid: 29376, comm: ll_ost00_102 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245872.558738] Call Trace: [245872.561311] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245872.568308] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245872.575506] [] start_this_handle+0x1a1/0x430 [jbd2] [245872.582157] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245872.588905] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245872.596431] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245872.603528] [] dqget+0x3fa/0x450 [245872.608529] [] dquot_get_dqblk+0x14/0x1f0 [245872.614301] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245872.621920] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245872.628402] [] ofd_quotactl+0x13c/0x380 [ofd] [245872.634549] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245872.641584] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245872.649408] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245872.655824] [] kthread+0xd1/0xe0 [245872.660841] [] ret_from_fork_nospec_begin+0xe/0x21 [245872.667418] [] 0xffffffffffffffff [245872.672535] LustreError: dumping log to /tmp/lustre-log.1576146892.29376 [245874.826272] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88f42c6cb050 x1651382341323616/t0(0) o4->4cd291bb-d8c0-256c-78b1-5ae56b16acd9@10.9.107.53@o2ib4:595/0 lens 6640/0 e 0 to 0 dl 1576146900 ref 2 fl New:/2/ffffffff rc 0/-1 [245874.855451] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1556 previous similar messages [245897.124688] Pid: 67787, comm: ll_ost00_047 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [245897.135230] Call Trace: [245897.137818] [] wait_transaction_locked+0x85/0xd0 [jbd2] [245897.144825] [] add_transaction_credits+0x268/0x2f0 [jbd2] [245897.152059] [] start_this_handle+0x1a1/0x430 [jbd2] [245897.158708] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [245897.165457] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [245897.172981] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [245897.180079] [] dqget+0x3fa/0x450 [245897.185081] [] dquot_get_dqblk+0x14/0x1f0 [245897.190853] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [245897.198478] [] lquotactl_slv+0x27d/0x9d0 [lquota] [245897.204953] [] ofd_quotactl+0x13c/0x380 [ofd] [245897.211096] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [245897.218151] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [245897.225976] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [245897.232394] [] kthread+0xd1/0xe0 [245897.237409] [] ret_from_fork_nospec_begin+0xe/0x21 [245897.243973] [] 0xffffffffffffffff [245897.249084] LustreError: dumping log to /tmp/lustre-log.1576146917.67787 [245899.157848] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [245899.168376] Lustre: Skipped 822 previous similar messages [245907.883442] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [245907.892414] Lustre: Skipped 50 previous similar messages [245921.701149] LustreError: dumping log to /tmp/lustre-log.1576146941.29351 [245960.741777] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797513 to 0x1900000401:11797537 [245963.829873] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3085992 to 0x1900000402:3086017 [246008.504171] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562243 to 0x0:27562273 [246028.199246] LustreError: dumping log to /tmp/lustre-log.1576147048.67719 [246030.266665] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082121 to 0x1a80000402:3082145 [246052.775736] LNet: Service thread pid 67830 was inactive for 1201.66s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [246052.792876] LNet: Skipped 9 previous similar messages [246052.798018] Pid: 67830, comm: ll_ost00_054 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246052.808556] Call Trace: [246052.811129] [] wait_transaction_locked+0x85/0xd0 [jbd2] [246052.818121] [] add_transaction_credits+0x268/0x2f0 [jbd2] [246052.825305] [] start_this_handle+0x1a1/0x430 [jbd2] [246052.831954] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [246052.838703] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [246052.846227] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [246052.853333] [] dqget+0x3fa/0x450 [246052.858337] [] dquot_get_dqblk+0x14/0x1f0 [246052.864125] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [246052.871791] [] lquotactl_slv+0x27d/0x9d0 [lquota] [246052.878287] [] ofd_quotactl+0x13c/0x380 [ofd] [246052.884431] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246052.891479] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246052.899294] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246052.905711] [] kthread+0xd1/0xe0 [246052.910733] [] ret_from_fork_nospec_begin+0xe/0x21 [246052.917297] [] 0xffffffffffffffff [246052.922402] LustreError: dumping log to /tmp/lustre-log.1576147073.67830 [246067.999841] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089828 to 0x1980000402:3089857 [246079.304980] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122243 to 0x1a80000401:1122273 [246084.824358] LustreError: 67733:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576146805, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0054_UUID lock: ffff89070c2b1d40/0x7066c9c190b583dc lrc: 3/0,1 mode: --/PW res: [0x1800000401:0xb3d4c6:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67733 timeout: 0 lvb_type: 0 [246084.868098] LustreError: 67733:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 6 previous similar messages [246173.105904] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070728 to 0x1800000400:3070753 [246183.850312] Pid: 29356, comm: ll_ost00_101 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246183.860852] Call Trace: [246183.863430] [] wait_transaction_locked+0x85/0xd0 [jbd2] [246183.870444] [] add_transaction_credits+0x268/0x2f0 [jbd2] [246183.877641] [] start_this_handle+0x1a1/0x430 [jbd2] [246183.884318] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [246183.891076] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [246183.898612] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [246183.905715] [] dqget+0x3fa/0x450 [246183.910742] [] dquot_get_dqblk+0x14/0x1f0 [246183.916517] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [246183.924160] [] lquotactl_slv+0x27d/0x9d0 [lquota] [246183.930635] [] ofd_quotactl+0x13c/0x380 [ofd] [246183.936777] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246183.943865] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246183.951693] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246183.958125] [] kthread+0xd1/0xe0 [246183.963134] [] ret_from_fork_nospec_begin+0xe/0x21 [246183.969720] [] 0xffffffffffffffff [246183.974839] LustreError: dumping log to /tmp/lustre-log.1576147204.29356 [246184.119327] LustreError: 112542:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576146904, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff8906eebcec00/0x7066c9c190b5889e lrc: 3/0,1 mode: --/PW res: [0x1a31f22:0x0:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112542 timeout: 0 lvb_type: 0 [246184.899052] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124486 to 0x1900000400:1124513 [246192.931229] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117544 to 0x1800000402:1117569 [246208.426796] Pid: 112568, comm: ll_ost00_098 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246208.437400] Call Trace: [246208.439967] [] wait_transaction_locked+0x85/0xd0 [jbd2] [246208.446962] [] add_transaction_credits+0x268/0x2f0 [jbd2] [246208.454154] [] start_this_handle+0x1a1/0x430 [jbd2] [246208.460800] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [246208.467551] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [246208.475075] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [246208.482171] [] dqget+0x3fa/0x450 [246208.487175] [] dquot_get_dqblk+0x14/0x1f0 [246208.492947] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [246208.500589] [] lquotactl_slv+0x27d/0x9d0 [lquota] [246208.507063] [] ofd_quotactl+0x13c/0x380 [ofd] [246208.513204] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246208.520246] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246208.528063] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246208.534477] [] kthread+0xd1/0xe0 [246208.539491] [] ret_from_fork_nospec_begin+0xe/0x21 [246208.546056] [] 0xffffffffffffffff [246208.551167] LustreError: dumping log to /tmp/lustre-log.1576147228.112568 [246228.907208] Pid: 67600, comm: ll_ost03_009 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246228.917723] Call Trace: [246228.920277] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [246228.927308] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [246228.934609] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [246228.941252] [] ofd_destroy_hdl+0x267/0x970 [ofd] [246228.947655] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246228.954688] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246228.962504] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246228.968918] [] kthread+0xd1/0xe0 [246228.973918] [] ret_from_fork_nospec_begin+0xe/0x21 [246228.980515] [] 0xffffffffffffffff [246228.985621] LustreError: dumping log to /tmp/lustre-log.1576147249.67600 [246228.993077] Pid: 67786, comm: ll_ost03_048 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246229.003628] Call Trace: [246229.006173] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [246229.013212] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [246229.020487] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [246229.027147] [] ofd_destroy_hdl+0x267/0x970 [ofd] [246229.033538] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246229.040575] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246229.048408] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246229.054839] [] kthread+0xd1/0xe0 [246229.059833] [] ret_from_fork_nospec_begin+0xe/0x21 [246229.066424] [] 0xffffffffffffffff [246261.675847] LNet: Service thread pid 112562 was inactive for 1203.66s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [246261.688968] LNet: Skipped 11 previous similar messages [246261.694207] LustreError: dumping log to /tmp/lustre-log.1576147281.112562 [246270.288449] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125862 to 0x1980000400:1125889 [246292.084197] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800595 to 0x1a80000400:11800641 [246319.021009] LustreError: dumping log to /tmp/lustre-log.1576147339.29390 [246320.910380] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467558 to 0x0:27467585 [246343.597501] LustreError: dumping log to /tmp/lustre-log.1576147363.29525 [246364.077899] Pid: 29512, comm: ll_ost00_109 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246364.088416] Call Trace: [246364.090986] [] wait_transaction_locked+0x85/0xd0 [jbd2] [246364.097983] [] add_transaction_credits+0x268/0x2f0 [jbd2] [246364.105185] [] start_this_handle+0x1a1/0x430 [jbd2] [246364.111850] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [246364.118617] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [246364.126144] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [246364.133238] [] dqget+0x3fa/0x450 [246364.138259] [] dquot_get_dqblk+0x14/0x1f0 [246364.144040] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [246364.151663] [] lquotactl_slv+0x27d/0x9d0 [lquota] [246364.158141] [] ofd_quotactl+0x13c/0x380 [ofd] [246364.164282] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246364.171324] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246364.179152] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246364.185571] [] kthread+0xd1/0xe0 [246364.190587] [] ret_from_fork_nospec_begin+0xe/0x21 [246364.197151] [] 0xffffffffffffffff [246364.202263] LustreError: dumping log to /tmp/lustre-log.1576147384.29512 [246392.750468] Pid: 67751, comm: ll_ost02_037 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246392.760987] Call Trace: [246392.763539] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [246392.769929] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246392.776985] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246392.784811] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246392.791247] [] kthread+0xd1/0xe0 [246392.796251] [] ret_from_fork_nospec_begin+0xe/0x21 [246392.802835] [] 0xffffffffffffffff [246392.807954] LustreError: dumping log to /tmp/lustre-log.1576147413.67751 [246392.815433] Pid: 29402, comm: ll_ost00_105 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246392.825994] Call Trace: [246392.828546] [] wait_transaction_locked+0x85/0xd0 [jbd2] [246392.835562] [] add_transaction_credits+0x268/0x2f0 [jbd2] [246392.842741] [] start_this_handle+0x1a1/0x430 [jbd2] [246392.849433] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [246392.856165] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [246392.863720] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [246392.870819] [] dqget+0x3fa/0x450 [246392.875836] [] dquot_get_dqblk+0x14/0x1f0 [246392.881613] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [246392.889244] [] lquotactl_slv+0x27d/0x9d0 [lquota] [246392.895720] [] ofd_quotactl+0x13c/0x380 [ofd] [246392.901880] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246392.908928] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246392.916767] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246392.923186] [] kthread+0xd1/0xe0 [246392.928219] [] ret_from_fork_nospec_begin+0xe/0x21 [246392.934773] [] 0xffffffffffffffff [246422.711081] LustreError: 112551:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576147142, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0058_UUID lock: ffff891d57f5d100/0x7066c9c190b59785 lrc: 3/0,1 mode: --/PW res: [0x1900000402:0x2f1685:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112551 timeout: 0 lvb_type: 0 [246423.926791] Lustre: fir-OST0056: Client 8c3ccd99-dc20-24d2-79f4-f8d2c0329cfb (at 10.8.24.19@o2ib6) reconnecting [246423.936960] Lustre: Skipped 970 previous similar messages [246433.812543] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192548 to 0x0:27192577 [246458.920492] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105575 to 0x1a00000402:1105601 [246474.672103] Pid: 29401, comm: ll_ost00_104 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246474.682621] Call Trace: [246474.685198] [] wait_transaction_locked+0x85/0xd0 [jbd2] [246474.692193] [] add_transaction_credits+0x268/0x2f0 [jbd2] [246474.699370] [] start_this_handle+0x1a1/0x430 [jbd2] [246474.706015] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [246474.712769] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [246474.720290] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [246474.727387] [] dqget+0x3fa/0x450 [246474.732390] [] dquot_get_dqblk+0x14/0x1f0 [246474.738177] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [246474.745790] [] lquotactl_slv+0x27d/0x9d0 [lquota] [246474.752294] [] ofd_quotactl+0x13c/0x380 [ofd] [246474.758426] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246474.765468] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246474.773271] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246474.779700] [] kthread+0xd1/0xe0 [246474.784696] [] ret_from_fork_nospec_begin+0xe/0x21 [246474.791271] [] 0xffffffffffffffff [246474.796371] LustreError: dumping log to /tmp/lustre-log.1576147495.29401 [246474.900106] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff891136168850 x1648527343518128/t0(0) o10->1d4d1153-82cd-6bbc-4932-1e6a2a506ca0@10.8.30.27@o2ib6:440/0 lens 440/0 e 0 to 0 dl 1576147500 ref 2 fl New:/2/ffffffff rc 0/-1 [246474.928936] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1785 previous similar messages [246481.169622] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506852 to 0x0:27506881 [246491.056498] Pid: 67667, comm: ll_ost03_022 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246491.067018] Call Trace: [246491.069568] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [246491.075969] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246491.083035] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246491.090841] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246491.097256] [] kthread+0xd1/0xe0 [246491.102256] [] ret_from_fork_nospec_begin+0xe/0x21 [246491.108849] [] 0xffffffffffffffff [246491.113959] LustreError: dumping log to /tmp/lustre-log.1576147511.67667 [246499.248574] LustreError: dumping log to /tmp/lustre-log.1576147519.29501 [246500.060606] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [246500.071124] Lustre: Skipped 918 previous similar messages [246510.007219] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [246510.016186] Lustre: Skipped 51 previous similar messages [246514.336620] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785450 to 0x1800000401:11785473 [246516.856686] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049034 to 0x1a00000401:3049057 [246568.881964] LustreError: dumping log to /tmp/lustre-log.1576147589.67836 [246572.978057] LustreError: dumping log to /tmp/lustre-log.1576147593.67693 [246617.946639] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842213 to 0x1980000401:11842241 [246630.918468] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082149 to 0x1a80000402:3082177 [246668.891693] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089859 to 0x1980000402:3089889 [246718.044603] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797546 to 0x1900000401:11797569 [246719.892732] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086025 to 0x1900000402:3086049 [246764.463124] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562243 to 0x0:27562305 [246785.974255] LNet: Service thread pid 67799 was inactive for 1203.85s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [246785.991385] LNet: Skipped 9 previous similar messages [246785.996533] Pid: 67799, comm: ll_ost02_044 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246786.007076] Call Trace: [246786.009626] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [246786.016019] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246786.023080] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246786.030882] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246786.037311] [] kthread+0xd1/0xe0 [246786.042330] [] ret_from_fork_nospec_begin+0xe/0x21 [246786.048909] [] 0xffffffffffffffff [246786.054016] LustreError: dumping log to /tmp/lustre-log.1576147806.67799 [246834.975909] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122275 to 0x1a80000401:1122305 [246870.940342] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125891 to 0x1980000400:1125921 [246903.620592] LustreError: 112530:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576147623, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0054_UUID lock: ffff88fef6f3fbc0/0x7066c9c190b5adc7 lrc: 3/0,1 mode: --/PW res: [0x1a2f979:0x0:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112530 timeout: 0 lvb_type: 0 [246928.984866] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070765 to 0x1800000400:3070785 [246941.306057] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124515 to 0x1900000400:1124545 [246949.090154] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117571 to 0x1800000402:1117601 [246986.682221] Pid: 67733, comm: ll_ost02_033 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [246986.692745] Call Trace: [246986.695302] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [246986.702333] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [246986.709621] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [246986.716267] [] ofd_destroy_hdl+0x267/0x970 [ofd] [246986.722670] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [246986.729713] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [246986.737542] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [246986.743959] [] kthread+0xd1/0xe0 [246986.748961] [] ret_from_fork_nospec_begin+0xe/0x21 [246986.755529] [] 0xffffffffffffffff [246986.760636] LustreError: dumping log to /tmp/lustre-log.1576148007.67733 [247015.354787] Pid: 112512, comm: ll_ost01_079 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247015.365398] Call Trace: [247015.367960] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [247015.374358] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247015.381426] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247015.389261] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247015.395693] [] kthread+0xd1/0xe0 [247015.400697] [] ret_from_fork_nospec_begin+0xe/0x21 [247015.407271] [] 0xffffffffffffffff [247015.412380] LustreError: dumping log to /tmp/lustre-log.1576148035.112512 [247024.547823] Lustre: fir-OST0058: Client ec8d663e-70c3-0c7c-9511-dfaaba3f32c1 (at 10.9.104.45@o2ib4) reconnecting [247024.558087] Lustre: Skipped 1002 previous similar messages [247049.059147] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800646 to 0x1a80000400:11800673 [247060.276417] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105603 to 0x1a00000402:1105633 [247076.236003] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f75c15f850 x1648527343518128/t0(0) o10->1d4d1153-82cd-6bbc-4932-1e6a2a506ca0@10.8.30.27@o2ib6:286/0 lens 440/0 e 0 to 0 dl 1576148101 ref 2 fl New:/2/ffffffff rc 0/-1 [247076.264812] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1998 previous similar messages [247076.781289] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467558 to 0x0:27467617 [247084.988154] Pid: 112542, comm: ll_ost02_082 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247084.998762] Call Trace: [247085.001322] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [247085.008364] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [247085.015660] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [247085.022306] [] ofd_destroy_hdl+0x267/0x970 [ofd] [247085.028710] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247085.035766] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247085.043582] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247085.049999] [] kthread+0xd1/0xe0 [247085.054999] [] ret_from_fork_nospec_begin+0xe/0x21 [247085.061566] [] 0xffffffffffffffff [247085.066675] LustreError: dumping log to /tmp/lustre-log.1576148105.112542 [247101.357552] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [247101.368075] Lustre: Skipped 1025 previous similar messages [247112.131401] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [247112.140368] Lustre: Skipped 49 previous similar messages [247150.525464] Pid: 67919, comm: ll_ost02_063 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247150.535984] Call Trace: [247150.538537] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [247150.544925] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247150.551990] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247150.559798] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247150.566236] [] kthread+0xd1/0xe0 [247150.571265] [] ret_from_fork_nospec_begin+0xe/0x21 [247150.577849] [] 0xffffffffffffffff [247150.582967] LustreError: dumping log to /tmp/lustre-log.1576148170.67919 [247191.071544] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192579 to 0x0:27192609 [247232.706360] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082184 to 0x1a80000402:3082209 [247237.376466] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506884 to 0x0:27506913 [247244.735321] Pid: 67847, comm: ll_ost03_055 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247244.745840] Call Trace: [247244.748401] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [247244.754797] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247244.761865] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247244.769672] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247244.776098] [] kthread+0xd1/0xe0 [247244.781101] [] ret_from_fork_nospec_begin+0xe/0x21 [247244.787677] [] 0xffffffffffffffff [247244.792787] LustreError: dumping log to /tmp/lustre-log.1576148265.67847 [247270.703510] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785478 to 0x1800000401:11785505 [247270.711576] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089895 to 0x1980000402:3089921 [247272.791635] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049065 to 0x1a00000401:3049089 [247322.560844] Pid: 67685, comm: ll_ost02_028 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247322.571362] Call Trace: [247322.573938] [] wait_transaction_locked+0x85/0xd0 [jbd2] [247322.580945] [] add_transaction_credits+0x268/0x2f0 [jbd2] [247322.588142] [] start_this_handle+0x1a1/0x430 [jbd2] [247322.594796] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [247322.601544] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [247322.609070] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [247322.616165] [] dqget+0x3fa/0x450 [247322.621169] [] dquot_get_dqblk+0x14/0x1f0 [247322.626940] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [247322.634558] [] lquotactl_slv+0x27d/0x9d0 [lquota] [247322.641033] [] ofd_quotactl+0x13c/0x380 [ofd] [247322.647188] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247322.654242] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247322.662065] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247322.668482] [] kthread+0xd1/0xe0 [247322.673495] [] ret_from_fork_nospec_begin+0xe/0x21 [247322.680060] [] 0xffffffffffffffff [247322.685173] LustreError: dumping log to /tmp/lustre-log.1576148342.67685 [247326.656918] Pid: 112551, comm: ll_ost03_075 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247326.667521] Call Trace: [247326.670082] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [247326.677121] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [247326.684418] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [247326.691066] [] ofd_destroy_hdl+0x267/0x970 [ofd] [247326.697469] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247326.704508] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247326.712326] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247326.718740] [] kthread+0xd1/0xe0 [247326.723741] [] ret_from_fork_nospec_begin+0xe/0x21 [247326.730311] [] 0xffffffffffffffff [247326.735419] LustreError: dumping log to /tmp/lustre-log.1576148346.112551 [247334.849075] Pid: 67910, comm: ll_ost02_061 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247334.859594] Call Trace: [247334.862164] [] wait_transaction_locked+0x85/0xd0 [jbd2] [247334.869177] [] add_transaction_credits+0x268/0x2f0 [jbd2] [247334.876368] [] start_this_handle+0x1a1/0x430 [jbd2] [247334.883023] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [247334.889794] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [247334.897324] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [247334.904435] [] dqget+0x3fa/0x450 [247334.909474] [] dquot_get_dqblk+0x14/0x1f0 [247334.915277] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [247334.922900] [] lquotactl_slv+0x27d/0x9d0 [lquota] [247334.929395] [] ofd_quotactl+0x13c/0x380 [ofd] [247334.935531] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247334.942593] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247334.950405] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247334.956849] [] kthread+0xd1/0xe0 [247334.961853] [] ret_from_fork_nospec_begin+0xe/0x21 [247334.968438] [] 0xffffffffffffffff [247334.973552] LustreError: dumping log to /tmp/lustre-log.1576148355.67910 [247359.666862] Lustre: fir-OST0054: deleting orphan objects from 0x0:27457914 to 0x0:27457953 [247367.617723] LNet: Service thread pid 67708 was inactive for 1202.29s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [247367.630763] LNet: Skipped 5 previous similar messages [247367.635910] LustreError: dumping log to /tmp/lustre-log.1576148387.67708 [247374.265558] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842243 to 0x1980000401:11842273 [247473.132475] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125925 to 0x1980000400:1125953 [247474.491521] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797577 to 0x1900000401:11797601 [247475.835612] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086059 to 0x1900000402:3086081 [247520.054007] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562243 to 0x0:27562337 [247522.773765] LustreError: 112569:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576148243, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0058_UUID lock: ffff891d57f5a880/0x7066c9c190b5cad1 lrc: 3/0,1 mode: --/PW res: [0x1900000402:0x2f16c8:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112569 timeout: 0 lvb_type: 0 [247522.817652] LustreError: 112569:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 2 previous similar messages [247539.653082] LNet: Service thread pid 113351 was inactive for 1200.50s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [247539.670277] LNet: Skipped 8 previous similar messages [247539.675419] Pid: 113351, comm: ll_ost02_089 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247539.686038] Call Trace: [247539.688590] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [247539.694975] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247539.702037] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247539.709855] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247539.716286] [] kthread+0xd1/0xe0 [247539.721288] [] ret_from_fork_nospec_begin+0xe/0x21 [247539.727863] [] 0xffffffffffffffff [247539.732973] LustreError: dumping log to /tmp/lustre-log.1576148559.113351 [247590.926789] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122310 to 0x1a80000401:1122337 [247624.714508] Lustre: fir-OST005e: Client 7126efc2-9676-1db9-94d0-ae09c1520697 (at 10.9.101.26@o2ib4) reconnecting [247624.724769] Lustre: Skipped 990 previous similar messages [247661.424196] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105638 to 0x1a00000402:1105665 [247677.983820] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8901ad32e850 x1648527343229280/t0(0) o4->1d4d1153-82cd-6bbc-4932-1e6a2a506ca0@10.8.30.27@o2ib6:133/0 lens 488/0 e 0 to 0 dl 1576148703 ref 2 fl New:/2/ffffffff rc 0/-1 [247678.012544] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1962 previous similar messages [247684.935728] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070796 to 0x1800000400:3070817 [247697.000874] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124549 to 0x1900000400:1124577 [247701.489301] Lustre: fir-OST0058: Connection restored to 635a05c8-c7a3-e96d-15e7-653531254cf2 (at 10.9.110.38@o2ib4) [247701.499823] Lustre: Skipped 999 previous similar messages [247705.321033] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117606 to 0x1800000402:1117633 [247733.385421] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [247733.394387] Lustre: Skipped 46 previous similar messages [247773.129679] Pid: 67766, comm: ll_ost01_048 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247773.140199] Call Trace: [247773.142751] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [247773.149149] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247773.156214] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247773.164014] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247773.170441] [] kthread+0xd1/0xe0 [247773.175446] [] ret_from_fork_nospec_begin+0xe/0x21 [247773.182020] [] 0xffffffffffffffff [247773.187132] LustreError: dumping log to /tmp/lustre-log.1576148793.67766 [247805.378019] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800675 to 0x1a80000400:11800705 [247805.898326] Pid: 112530, comm: ll_ost02_079 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247805.908938] Call Trace: [247805.911496] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [247805.918529] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [247805.925827] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [247805.932474] [] ofd_destroy_hdl+0x267/0x970 [ofd] [247805.938879] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247805.945924] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247805.953726] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247805.960154] [] kthread+0xd1/0xe0 [247805.965156] [] ret_from_fork_nospec_begin+0xe/0x21 [247805.971717] [] 0xffffffffffffffff [247805.976838] LustreError: dumping log to /tmp/lustre-log.1576148826.112530 [247832.820181] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467620 to 0x0:27467649 [247833.886155] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082211 to 0x1a80000402:3082241 [247871.683423] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089923 to 0x1980000402:3089953 [247908.300344] Pid: 67645, comm: ll_ost02_019 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [247908.310862] Call Trace: [247908.313422] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [247908.319821] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [247908.326884] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [247908.334685] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [247908.341114] [] kthread+0xd1/0xe0 [247908.346117] [] ret_from_fork_nospec_begin+0xe/0x21 [247908.352677] [] 0xffffffffffffffff [247908.357786] LustreError: dumping log to /tmp/lustre-log.1576148928.67645 [247946.822431] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192611 to 0x0:27192641 [247993.199331] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506916 to 0x0:27506945 [248002.510245] Pid: 67872, comm: ll_ost03_062 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248002.520766] Call Trace: [248002.523316] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [248002.529716] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248002.536780] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248002.544579] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248002.551010] [] kthread+0xd1/0xe0 [248002.556011] [] ret_from_fork_nospec_begin+0xe/0x21 [248002.562596] [] 0xffffffffffffffff [248002.567706] LustreError: dumping log to /tmp/lustre-log.1576149022.67872 [248014.798469] Pid: 67580, comm: ll_ost03_006 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248014.808999] Call Trace: [248014.811570] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [248014.818598] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [248014.825933] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [248014.832611] [] ofd_destroy_hdl+0x267/0x970 [ofd] [248014.839015] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248014.846060] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248014.853911] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248014.860329] [] kthread+0xd1/0xe0 [248014.865347] [] ret_from_fork_nospec_begin+0xe/0x21 [248014.871938] [] 0xffffffffffffffff [248014.877057] LustreError: dumping log to /tmp/lustre-log.1576149035.67580 [248025.958371] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785511 to 0x1800000401:11785537 [248029.062530] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049095 to 0x1a00000401:3049121 [248074.132080] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125956 to 0x1980000400:1125985 [248115.177715] Lustre: fir-OST0054: deleting orphan objects from 0x0:27457956 to 0x0:27457985 [248130.264457] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842276 to 0x1980000401:11842305 [248224.919314] Lustre: fir-OST005e: Client 7126efc2-9676-1db9-94d0-ae09c1520697 (at 10.9.101.26@o2ib4) reconnecting [248224.929576] Lustre: Skipped 1020 previous similar messages [248227.832679] LustreError: 67772:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576148948, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005c_UUID lock: ffff89134edb7740/0x7066c9c190b63a22 lrc: 3/0,1 mode: --/PW res: [0x1a00000401:0x2e8666:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67772 timeout: 0 lvb_type: 0 [248227.876400] LustreError: 67772:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 2 previous similar messages [248230.218434] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797607 to 0x1900000401:11797633 [248231.810529] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086093 to 0x1900000402:3086113 [248262.396088] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105668 to 0x1a00000402:1105697 [248276.436873] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562339 to 0x0:27562369 [248278.331688] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88ffa5698850 x1648620021418976/t0(0) o4->4cd581b8-382e-0698-9aa0-2f501c687dc8@10.8.25.10@o2ib6:733/0 lens 520/0 e 0 to 0 dl 1576149303 ref 2 fl New:/2/ffffffff rc 0/-1 [248278.360683] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2027 previous similar messages [248297.428049] LNet: Service thread pid 112559 was inactive for 1201.25s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [248297.445244] LNet: Skipped 5 previous similar messages [248297.450394] Pid: 112559, comm: ll_ost02_085 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248297.461016] Call Trace: [248297.463566] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [248297.469960] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248297.477036] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248297.484840] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248297.491270] [] kthread+0xd1/0xe0 [248297.496272] [] ret_from_fork_nospec_begin+0xe/0x21 [248297.502847] [] 0xffffffffffffffff [248297.507965] LustreError: dumping log to /tmp/lustre-log.1576149317.112559 [248302.812188] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [248302.822722] Lustre: Skipped 1035 previous similar messages [248335.509313] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [248335.518281] Lustre: Skipped 56 previous similar messages [248347.149767] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122340 to 0x1a80000401:1122369 [248375.253580] Pid: 113355, comm: ll_ost02_093 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248375.264188] Call Trace: [248375.266749] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [248375.273787] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [248375.281087] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [248375.287733] [] ofd_destroy_hdl+0x267/0x970 [ofd] [248375.294136] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248375.301167] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248375.308983] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248375.315413] [] kthread+0xd1/0xe0 [248375.320415] [] ret_from_fork_nospec_begin+0xe/0x21 [248375.326985] [] 0xffffffffffffffff [248375.332093] LustreError: dumping log to /tmp/lustre-log.1576149395.113355 [248424.406594] Pid: 112569, comm: ll_ost03_079 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248424.417202] Call Trace: [248424.419763] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [248424.426803] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [248424.434114] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [248424.440773] [] ofd_destroy_hdl+0x267/0x970 [ofd] [248424.447175] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248424.454232] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248424.462048] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248424.468463] [] kthread+0xd1/0xe0 [248424.473480] [] ret_from_fork_nospec_begin+0xe/0x21 [248424.480051] [] 0xffffffffffffffff [248424.485158] LustreError: dumping log to /tmp/lustre-log.1576149444.112569 [248434.962051] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082250 to 0x1a80000402:3082273 [248441.086639] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070827 to 0x1800000400:3070849 [248453.489843] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124579 to 0x1900000400:1124609 [248460.880000] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117636 to 0x1800000402:1117665 [248469.463444] Pid: 67902, comm: ll_ost02_060 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248469.473964] Call Trace: [248469.476521] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [248469.483571] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [248469.490868] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [248469.497532] [] ofd_destroy_hdl+0x267/0x970 [ofd] [248469.503935] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248469.510966] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248469.518799] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248469.525230] [] kthread+0xd1/0xe0 [248469.530233] [] ret_from_fork_nospec_begin+0xe/0x21 [248469.536801] [] 0xffffffffffffffff [248469.541911] LustreError: dumping log to /tmp/lustre-log.1576149489.67902 [248472.015269] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089960 to 0x1980000402:3089985 [248530.904667] Pid: 67605, comm: ll_ost01_010 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248530.915193] Call Trace: [248530.917771] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [248530.924168] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248530.931247] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248530.939047] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248530.945477] [] kthread+0xd1/0xe0 [248530.950479] [] ret_from_fork_nospec_begin+0xe/0x21 [248530.957054] [] 0xffffffffffffffff [248530.962174] LustreError: dumping log to /tmp/lustre-log.1576149551.67605 [248561.120970] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800711 to 0x1a80000400:11800737 [248589.691092] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467653 to 0x0:27467681 [248666.075326] Pid: 67914, comm: ll_ost02_062 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248666.085842] Call Trace: [248666.088392] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [248666.094786] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248666.101848] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248666.109649] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248666.116076] [] kthread+0xd1/0xe0 [248666.121080] [] ret_from_fork_nospec_begin+0xe/0x21 [248666.127664] [] 0xffffffffffffffff [248666.132776] LustreError: dumping log to /tmp/lustre-log.1576149686.67914 [248675.743947] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1125990 to 0x1980000400:1126017 [248703.213365] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192644 to 0x0:27192673 [248743.900878] Pid: 66110, comm: ll_ost03_000 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248743.911403] Call Trace: [248743.913983] [] wait_transaction_locked+0x85/0xd0 [jbd2] [248743.920977] [] add_transaction_credits+0x268/0x2f0 [jbd2] [248743.928161] [] start_this_handle+0x1a1/0x430 [jbd2] [248743.934810] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [248743.941559] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [248743.949084] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [248743.956186] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [248743.963497] [] tgt_client_new+0x41b/0x610 [ptlrpc] [248743.970096] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [248743.976488] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [248743.983784] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [248743.990815] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248743.998630] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248744.005044] [] kthread+0xd1/0xe0 [248744.010073] [] ret_from_fork_nospec_begin+0xe/0x21 [248744.016632] [] 0xffffffffffffffff [248744.021745] LustreError: dumping log to /tmp/lustre-log.1576149764.66110 [248750.113278] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506947 to 0x0:27506977 [248760.285186] Pid: 67800, comm: ll_ost03_051 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248760.295705] Call Trace: [248760.298248] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [248760.304637] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248760.311698] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248760.319503] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248760.325930] [] kthread+0xd1/0xe0 [248760.330924] [] ret_from_fork_nospec_begin+0xe/0x21 [248760.337492] [] 0xffffffffffffffff [248760.342584] LustreError: dumping log to /tmp/lustre-log.1576149780.67800 [248782.005337] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785547 to 0x1800000401:11785569 [248784.821470] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049132 to 0x1a00000401:3049153 [248809.438173] Pid: 67922, comm: ll_ost02_065 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [248809.448693] Call Trace: [248809.451246] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [248809.458286] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [248809.465596] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [248809.472248] [] ofd_destroy_hdl+0x267/0x970 [ofd] [248809.478665] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [248809.485709] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [248809.493523] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [248809.499939] [] kthread+0xd1/0xe0 [248809.504939] [] ret_from_fork_nospec_begin+0xe/0x21 [248809.511521] [] 0xffffffffffffffff [248809.516644] LustreError: dumping log to /tmp/lustre-log.1576149829.67922 [248825.036042] Lustre: fir-OST005c: Client 6fe05dcf-b9e2-99d7-33ce-acbd0a395824 (at 10.9.117.43@o2ib4) reconnecting [248825.046307] Lustre: Skipped 991 previous similar messages [248862.935916] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105702 to 0x1a00000402:1105729 [248871.200656] Lustre: fir-OST0054: deleting orphan objects from 0x0:27457989 to 0x0:27458017 [248878.857548] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88e71576d850 x1648846800564208/t0(0) o4->9a91b993-1399-1978-f4a8-fbbdfe7e9dbc@10.9.105.36@o2ib4:579/0 lens 488/0 e 0 to 0 dl 1576149904 ref 2 fl New:/2/ffffffff rc 0/-1 [248878.886624] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 1974 previous similar messages [248885.911390] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842313 to 0x1980000401:11842337 [248903.615297] Lustre: fir-OST0058: Connection restored to 60f9d14b-3c44-6ec6-f712-fe240a1f47a0 (at 10.9.104.30@o2ib4) [248903.625821] Lustre: Skipped 1007 previous similar messages [248943.591555] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [248943.600519] Lustre: Skipped 58 previous similar messages [248986.713378] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797643 to 0x1900000401:11797665 [248987.913454] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086122 to 0x1900000402:3086145 [249032.355816] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562339 to 0x0:27562401 [249036.597929] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082282 to 0x1a80000402:3082305 [249055.203014] LNet: Service thread pid 93055 was inactive for 1201.99s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [249055.220143] LNet: Skipped 8 previous similar messages [249055.225293] Pid: 93055, comm: ll_ost02_067 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249055.235854] Call Trace: [249055.238404] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [249055.244806] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249055.251875] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249055.259682] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249055.266118] [] kthread+0xd1/0xe0 [249055.271122] [] ret_from_fork_nospec_begin+0xe/0x21 [249055.277693] [] 0xffffffffffffffff [249055.282829] LustreError: dumping log to /tmp/lustre-log.1576150075.93055 [249074.987177] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3089992 to 0x1980000402:3090017 [249102.988655] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122372 to 0x1a80000401:1122401 [249128.932472] Pid: 67772, comm: ll_ost03_042 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249128.942990] Call Trace: [249128.945548] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [249128.952580] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [249128.959860] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [249128.966509] [] ofd_destroy_hdl+0x267/0x970 [ofd] [249128.972887] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249128.979944] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249128.987758] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249128.994173] [] kthread+0xd1/0xe0 [249128.999190] [] ret_from_fork_nospec_begin+0xe/0x21 [249129.005747] [] 0xffffffffffffffff [249129.010874] LustreError: dumping log to /tmp/lustre-log.1576150149.67772 [249133.028554] Pid: 67838, comm: ll_ost01_061 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249133.039079] Call Trace: [249133.041642] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [249133.048038] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249133.055129] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249133.062928] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249133.069357] [] kthread+0xd1/0xe0 [249133.074360] [] ret_from_fork_nospec_begin+0xe/0x21 [249133.080935] [] 0xffffffffffffffff [249133.086053] LustreError: dumping log to /tmp/lustre-log.1576150153.67838 [249196.957637] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070853 to 0x1800000400:3070881 [249209.766794] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124617 to 0x1900000400:1124641 [249216.814894] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117670 to 0x1800000402:1117697 [249276.235748] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126019 to 0x1980000400:1126049 [249309.160033] Pid: 67809, comm: ll_ost02_046 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249309.170550] Call Trace: [249309.173103] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [249309.180155] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [249309.187457] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [249309.194124] [] ofd_destroy_hdl+0x267/0x970 [ofd] [249309.200533] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249309.207588] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249309.215418] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249309.221846] [] kthread+0xd1/0xe0 [249309.226849] [] ret_from_fork_nospec_begin+0xe/0x21 [249309.233425] [] 0xffffffffffffffff [249309.238534] LustreError: dumping log to /tmp/lustre-log.1576150329.67809 [249317.823933] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800747 to 0x1a80000400:11800769 [249344.866047] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467684 to 0x0:27467713 [249418.734208] LustreError: 113350:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576150138, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff891b11266e40/0x7066c9c190b6827c lrc: 3/0,1 mode: --/PW res: [0x1a31f84:0x0:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 113350 timeout: 0 lvb_type: 0 [249418.777406] LustreError: 113350:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 3 previous similar messages [249423.850298] Pid: 66392, comm: ll_ost02_004 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249423.860821] Call Trace: [249423.863372] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [249423.869762] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249423.876823] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249423.884625] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249423.891056] [] kthread+0xd1/0xe0 [249423.896057] [] ret_from_fork_nospec_begin+0xe/0x21 [249423.902626] [] 0xffffffffffffffff [249423.907734] LustreError: dumping log to /tmp/lustre-log.1576150444.66392 [249426.031367] Lustre: fir-OST005c: Client 6fe05dcf-b9e2-99d7-33ce-acbd0a395824 (at 10.9.117.43@o2ib4) reconnecting [249426.041627] Lustre: Skipped 1104 previous similar messages [249459.044275] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192644 to 0x0:27192705 [249464.963860] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105733 to 0x1a00000402:1105761 [249479.839421] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88fb8d1ee050 x1649407019631840/t0(0) o4->022acf30-b33d-ab48-4fa4-ec70c96ae93e@10.9.114.2@o2ib4:425/0 lens 840/0 e 0 to 0 dl 1576150505 ref 2 fl New:/2/ffffffff rc 0/-1 [249479.868169] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2153 previous similar messages [249504.860836] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [249504.871363] Lustre: Skipped 1080 previous similar messages [249506.141235] Lustre: fir-OST0058: deleting orphan objects from 0x0:27506982 to 0x0:27507009 [249518.060183] Pid: 67606, comm: ll_ost03_011 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249518.070700] Call Trace: [249518.073252] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [249518.079651] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249518.086712] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249518.094514] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249518.100945] [] kthread+0xd1/0xe0 [249518.105948] [] ret_from_fork_nospec_begin+0xe/0x21 [249518.112522] [] 0xffffffffffffffff [249518.117649] LustreError: dumping log to /tmp/lustre-log.1576150538.67606 [249539.188283] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785582 to 0x1800000401:11785601 [249540.820447] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049157 to 0x1a00000401:3049185 [249558.976305] Lustre: fir-OST0056: Export ffff891b8f069c00 already connecting from 10.8.22.14@o2ib6 [249558.985275] Lustre: Skipped 55 previous similar messages [249627.983628] Lustre: fir-OST0054: deleting orphan objects from 0x0:27457989 to 0x0:27458049 [249637.649850] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082310 to 0x1a80000402:3082337 [249642.550332] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842359 to 0x1980000401:11842401 [249676.375086] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090019 to 0x1980000402:3090049 [249726.960325] LNet: Service thread pid 67839 was inactive for 1203.49s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [249726.977429] LNet: Skipped 5 previous similar messages [249726.982578] Pid: 67839, comm: ll_ost03_054 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249726.993113] Call Trace: [249726.995663] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [249727.002689] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [249727.009977] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [249727.016625] [] ofd_destroy_hdl+0x267/0x970 [ofd] [249727.023028] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249727.030073] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249727.037890] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249727.044308] [] kthread+0xd1/0xe0 [249727.049321] [] ret_from_fork_nospec_begin+0xe/0x21 [249727.055886] [] 0xffffffffffffffff [249727.061008] LustreError: dumping log to /tmp/lustre-log.1576150747.67839 [249727.069834] Pid: 112534, comm: ll_ost03_070 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249727.080475] Call Trace: [249727.083020] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [249727.090035] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [249727.097333] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [249727.103979] [] ofd_destroy_hdl+0x267/0x970 [ofd] [249727.110380] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249727.117403] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249727.125217] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249727.131635] [] kthread+0xd1/0xe0 [249727.136640] [] ret_from_fork_nospec_begin+0xe/0x21 [249727.143197] [] 0xffffffffffffffff [249741.928311] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797679 to 0x1900000401:11797697 [249743.944442] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086157 to 0x1900000402:3086177 [249789.250788] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562339 to 0x0:27562433 [249800.689767] Pid: 67850, comm: ll_ost02_054 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249800.700284] Call Trace: [249800.702860] [] wait_transaction_locked+0x85/0xd0 [jbd2] [249800.709866] [] add_transaction_credits+0x268/0x2f0 [jbd2] [249800.717057] [] start_this_handle+0x1a1/0x430 [jbd2] [249800.723713] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [249800.730469] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [249800.737994] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [249800.745102] [] ofd_trans_start+0x75/0xf0 [ofd] [249800.751325] [] ofd_destroy+0x5d0/0x960 [ofd] [249800.757387] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [249800.764053] [] ofd_destroy_hdl+0x267/0x970 [ofd] [249800.770459] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249800.777506] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249800.785331] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249800.791747] [] kthread+0xd1/0xe0 [249800.796771] [] ret_from_fork_nospec_begin+0xe/0x21 [249800.803333] [] 0xffffffffffffffff [249800.808444] LustreError: dumping log to /tmp/lustre-log.1576150821.67850 [249812.978007] Pid: 67926, comm: ll_ost02_066 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249812.988523] Call Trace: [249812.991076] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [249812.997466] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249813.004533] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249813.012339] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249813.018768] [] kthread+0xd1/0xe0 [249813.023770] [] ret_from_fork_nospec_begin+0xe/0x21 [249813.030345] [] 0xffffffffffffffff [249813.035457] LustreError: dumping log to /tmp/lustre-log.1576150833.67926 [249859.339646] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122372 to 0x1a80000401:1122433 [249877.023672] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126052 to 0x1980000400:1126081 [249890.803544] Pid: 67798, comm: ll_ost01_054 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [249890.814064] Call Trace: [249890.816614] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [249890.823015] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [249890.830078] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [249890.837879] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [249890.844309] [] kthread+0xd1/0xe0 [249890.849312] [] ret_from_fork_nospec_begin+0xe/0x21 [249890.855887] [] 0xffffffffffffffff [249890.860995] LustreError: dumping log to /tmp/lustre-log.1576150911.67798 [249952.876534] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070892 to 0x1800000400:3070913 [249964.941770] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124645 to 0x1900000400:1124673 [249973.325887] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117700 to 0x1800000402:1117729 [250026.824362] Lustre: fir-OST005a: Client 8ea9d9cd-8086-16f1-7cea-3b482c5d9f4c (at 10.9.108.20@o2ib4) reconnecting [250026.834629] Lustre: Skipped 1127 previous similar messages [250065.919743] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105733 to 0x1a00000402:1105793 [250073.806836] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800781 to 0x1a80000400:11800801 [250080.871297] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff8910cd0c1850 x1648858225853520/t0(0) o4->8e4fe161-7440-1bc3-60cf-ef16452a7501@10.9.105.43@o2ib4:271/0 lens 512/0 e 0 to 0 dl 1576151106 ref 2 fl New:/2/ffffffff rc 0/-1 [250080.900372] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2364 previous similar messages [250100.817003] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467716 to 0x0:27467745 [250105.856716] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [250105.867249] Lustre: Skipped 1149 previous similar messages [250159.733920] Lustre: fir-OST0056: Export ffff891d99820800 already connecting from 10.8.23.26@o2ib6 [250159.742895] Lustre: Skipped 52 previous similar messages [250177.529194] Pid: 67659, comm: ll_ost02_024 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250177.539718] Call Trace: [250177.542276] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [250177.548677] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250177.555747] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250177.563550] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250177.569977] [] kthread+0xd1/0xe0 [250177.574981] [] ret_from_fork_nospec_begin+0xe/0x21 [250177.581556] [] 0xffffffffffffffff [250177.586668] LustreError: dumping log to /tmp/lustre-log.1576151197.67659 [250215.363202] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192708 to 0x0:27192737 [250238.581657] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082349 to 0x1a80000402:3082369 [250262.364153] Lustre: fir-OST0058: deleting orphan objects from 0x0:27507013 to 0x0:27507105 [250275.835129] Pid: 112555, comm: ll_ost03_077 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250275.845734] Call Trace: [250275.848288] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [250275.854687] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250275.861747] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250275.869552] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250275.875977] [] kthread+0xd1/0xe0 [250275.880982] [] ret_from_fork_nospec_begin+0xe/0x21 [250275.887559] [] 0xffffffffffffffff [250275.892682] LustreError: dumping log to /tmp/lustre-log.1576151296.112555 [250277.802927] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090058 to 0x1980000402:3090081 [250295.675238] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785609 to 0x1800000401:11785633 [250296.923336] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049197 to 0x1a00000401:3049217 [250320.892016] Pid: 113350, comm: ll_ost02_088 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250320.902620] Call Trace: [250320.905175] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [250320.912218] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [250320.919515] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [250320.926164] [] ofd_destroy_hdl+0x267/0x970 [ofd] [250320.932567] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250320.939605] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250320.947421] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250320.953839] [] kthread+0xd1/0xe0 [250320.958836] [] ret_from_fork_nospec_begin+0xe/0x21 [250320.965413] [] 0xffffffffffffffff [250320.970531] LustreError: dumping log to /tmp/lustre-log.1576151341.113350 [250383.894535] Lustre: fir-OST0054: deleting orphan objects from 0x0:27458051 to 0x0:27458081 [250397.949266] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842407 to 0x1980000401:11842433 [250425.790104] LustreError: 112507:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576151145, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff89135ff21440/0x7066c9c190b6bfae lrc: 3/0,1 mode: --/PW res: [0x1a80000402:0x2f0864:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112507 timeout: 0 lvb_type: 0 [250425.833993] LustreError: 112507:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 4 previous similar messages [250472.447025] LNet: Service thread pid 67864 was inactive for 1201.95s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [250472.464135] LNet: Skipped 7 previous similar messages [250472.469283] Pid: 67864, comm: ll_ost02_056 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250472.479822] Call Trace: [250472.482379] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [250472.489423] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [250472.496717] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [250472.503374] [] ofd_destroy_hdl+0x267/0x970 [ofd] [250472.509777] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250472.516809] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250472.524622] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250472.531039] [] kthread+0xd1/0xe0 [250472.536069] [] ret_from_fork_nospec_begin+0xe/0x21 [250472.542637] [] 0xffffffffffffffff [250472.547766] LustreError: dumping log to /tmp/lustre-log.1576151492.67864 [250478.307497] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126085 to 0x1980000400:1126113 [250492.927428] Pid: 67737, comm: ll_ost01_043 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250492.937948] Call Trace: [250492.940508] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [250492.946897] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250492.953961] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250492.961761] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250492.968193] [] kthread+0xd1/0xe0 [250492.973194] [] ret_from_fork_nospec_begin+0xe/0x21 [250492.979767] [] 0xffffffffffffffff [250492.984878] LustreError: dumping log to /tmp/lustre-log.1576151513.67737 [250498.495240] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797702 to 0x1900000401:11797729 [250499.855320] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086185 to 0x1900000402:3086209 [250525.696079] Pid: 67717, comm: ll_ost03_035 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250525.706597] Call Trace: [250525.709150] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [250525.716189] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [250525.723486] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [250525.730149] [] ofd_destroy_hdl+0x267/0x970 [ofd] [250525.736553] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250525.743595] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250525.751410] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250525.757826] [] kthread+0xd1/0xe0 [250525.762826] [] ret_from_fork_nospec_begin+0xe/0x21 [250525.769403] [] 0xffffffffffffffff [250525.774511] LustreError: dumping log to /tmp/lustre-log.1576151545.67717 [250545.097706] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562438 to 0x0:27562465 [250570.752967] Pid: 112498, comm: ll_ost02_069 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250570.763571] Call Trace: [250570.766122] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [250570.772525] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250570.779587] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250570.787389] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250570.793816] [] kthread+0xd1/0xe0 [250570.798819] [] ret_from_fork_nospec_begin+0xe/0x21 [250570.805396] [] 0xffffffffffffffff [250570.810529] LustreError: dumping log to /tmp/lustre-log.1576151590.112498 [250591.233377] Pid: 67665, comm: ll_ost02_026 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250591.243897] Call Trace: [250591.246455] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [250591.253502] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [250591.260784] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [250591.267446] [] ofd_destroy_hdl+0x267/0x970 [ofd] [250591.273837] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250591.280890] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250591.288692] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250591.295133] [] kthread+0xd1/0xe0 [250591.300134] [] ret_from_fork_nospec_begin+0xe/0x21 [250591.306706] [] 0xffffffffffffffff [250591.311817] LustreError: dumping log to /tmp/lustre-log.1576151611.67665 [250615.482517] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122436 to 0x1a80000401:1122465 [250628.269594] Lustre: fir-OST0058: Client ce5ee768-37d0-d480-6e14-e3a25f5ac36c (at 10.9.117.30@o2ib4) reconnecting [250628.279869] Lustre: Skipped 1173 previous similar messages [250667.155551] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105797 to 0x1a00000402:1105825 [250682.691180] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8921db043050 x1652298167675392/t0(0) o4->dc9dbc94-e0be-4@10.9.113.14@o2ib4:117/0 lens 488/0 e 1 to 0 dl 1576151707 ref 2 fl New:/0/ffffffff rc 0/-1 [250682.718170] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2407 previous similar messages [250707.061657] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [250707.072178] Lustre: Skipped 1178 previous similar messages [250709.547455] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070921 to 0x1800000400:3070945 [250721.236613] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124676 to 0x1900000400:1124705 [250728.860747] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117734 to 0x1800000402:1117761 [250769.093211] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [250769.102174] Lustre: Skipped 61 previous similar messages [250828.806053] Pid: 66109, comm: ll_ost02_002 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250828.816569] Call Trace: [250828.819127] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [250828.826159] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [250828.833448] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [250828.840109] [] ofd_destroy_hdl+0x267/0x970 [ofd] [250828.846518] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250828.853557] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250828.861371] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250828.867787] [] kthread+0xd1/0xe0 [250828.872789] [] ret_from_fork_nospec_begin+0xe/0x21 [250828.879382] [] 0xffffffffffffffff [250828.884492] LustreError: dumping log to /tmp/lustre-log.1576151849.66109 [250830.789793] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800805 to 0x1a80000400:11800833 [250839.617532] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082371 to 0x1a80000402:3082401 [250845.190372] Pid: 67816, comm: ll_ost03_053 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250845.200886] Call Trace: [250845.203456] [] wait_transaction_locked+0x85/0xd0 [jbd2] [250845.210454] [] add_transaction_credits+0x268/0x2f0 [jbd2] [250845.217637] [] start_this_handle+0x1a1/0x430 [jbd2] [250845.224288] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [250845.231036] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [250845.238561] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [250845.245656] [] dqget+0x3fa/0x450 [250845.250659] [] dquot_get_dqblk+0x14/0x1f0 [250845.256446] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [250845.264060] [] lquotactl_slv+0x27d/0x9d0 [lquota] [250845.270545] [] ofd_quotactl+0x13c/0x380 [ofd] [250845.276677] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250845.283738] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250845.291543] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250845.297983] [] kthread+0xd1/0xe0 [250845.302982] [] ret_from_fork_nospec_begin+0xe/0x21 [250845.309556] [] 0xffffffffffffffff [250845.314659] LustreError: dumping log to /tmp/lustre-log.1576151865.67816 [250856.903873] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467748 to 0x0:27467777 [250878.318764] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090085 to 0x1980000402:3090113 [250898.439433] Pid: 67624, comm: ll_ost01_016 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250898.449956] Call Trace: [250898.452524] [] wait_transaction_locked+0x85/0xd0 [jbd2] [250898.459523] [] add_transaction_credits+0x268/0x2f0 [jbd2] [250898.466708] [] start_this_handle+0x1a1/0x430 [jbd2] [250898.473371] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [250898.480122] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [250898.487646] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [250898.494741] [] dqget+0x3fa/0x450 [250898.499745] [] dquot_get_dqblk+0x14/0x1f0 [250898.505519] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [250898.513143] [] lquotactl_slv+0x27d/0x9d0 [lquota] [250898.519618] [] ofd_quotactl+0x13c/0x380 [ofd] [250898.525760] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250898.532809] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250898.540640] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250898.547059] [] kthread+0xd1/0xe0 [250898.552072] [] ret_from_fork_nospec_begin+0xe/0x21 [250898.558637] [] 0xffffffffffffffff [250898.563751] LustreError: dumping log to /tmp/lustre-log.1576151918.67624 [250935.304148] Pid: 67616, comm: ll_ost02_013 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [250935.314666] Call Trace: [250935.317219] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [250935.323620] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [250935.330680] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [250935.338481] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [250935.344911] [] kthread+0xd1/0xe0 [250935.349915] [] ret_from_fork_nospec_begin+0xe/0x21 [250935.356490] [] 0xffffffffffffffff [250935.361599] LustreError: dumping log to /tmp/lustre-log.1576151955.67616 [250971.263225] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192708 to 0x0:27192769 [251018.571081] Lustre: fir-OST0058: deleting orphan objects from 0x0:27507110 to 0x0:27507137 [251029.513992] Pid: 67610, comm: ll_ost03_012 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251029.524510] Call Trace: [251029.527061] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [251029.533478] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251029.540541] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251029.548358] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251029.554789] [] kthread+0xd1/0xe0 [251029.559810] [] ret_from_fork_nospec_begin+0xe/0x21 [251029.566383] [] 0xffffffffffffffff [251029.571497] LustreError: dumping log to /tmp/lustre-log.1576152049.67610 [251050.898116] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785637 to 0x1800000401:11785665 [251052.994241] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049223 to 0x1a00000401:3049249 [251079.279393] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126116 to 0x1980000400:1126145 [251095.051283] LNet: Service thread pid 67656 was inactive for 1202.85s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [251095.064318] LustreError: dumping log to /tmp/lustre-log.1576152115.67656 [251139.909431] Lustre: fir-OST0054: deleting orphan objects from 0x0:27458083 to 0x0:27458113 [251153.836208] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842439 to 0x1980000401:11842465 [251228.308143] Lustre: fir-OST005c: Client ff8445d1-f99d-03b2-7c66-3abfa27fa6d1 (at 10.8.27.23@o2ib6) reconnecting [251228.318323] Lustre: Skipped 1166 previous similar messages [251254.758148] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797736 to 0x1900000401:11797761 [251255.910215] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086214 to 0x1900000402:3086241 [251267.943423] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105828 to 0x1a00000402:1105857 [251283.815029] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88e3f7ce9850 x1649407019631840/t0(0) o4->022acf30-b33d-ab48-4fa4-ec70c96ae93e@10.9.114.2@o2ib4:718/0 lens 840/0 e 0 to 0 dl 1576152308 ref 2 fl New:/2/ffffffff rc 0/-1 [251283.843752] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2557 previous similar messages [251301.248618] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562438 to 0x0:27562497 [251308.051385] Lustre: fir-OST005a: Connection restored to f5f4e1fb-09a1-cbb0-925c-f94d3727005b (at 10.9.101.44@o2ib4) [251308.061923] Lustre: Skipped 1190 previous similar messages [251323.407810] LNet: Service thread pid 67774 was inactive for 200.21s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [251323.424831] LNet: Skipped 9 previous similar messages [251323.429982] Pid: 67774, comm: ll_ost03_043 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251323.440516] Call Trace: [251323.443067] [] ldlm_completion_ast+0x430/0x860 [ptlrpc] [251323.450107] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [251323.457404] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [251323.464052] [] ofd_destroy_hdl+0x267/0x970 [ofd] [251323.470457] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251323.477496] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251323.485319] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251323.491734] [] kthread+0xd1/0xe0 [251323.496737] [] ret_from_fork_nospec_begin+0xe/0x21 [251323.503327] [] 0xffffffffffffffff [251323.508447] LustreError: dumping log to /tmp/lustre-log.1576152343.67774 [251324.431822] Pid: 30840, comm: ll_ost02_098 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251324.442336] Call Trace: [251324.444890] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [251324.451289] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251324.458349] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251324.466153] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251324.472580] [] kthread+0xd1/0xe0 [251324.477586] [] ret_from_fork_nospec_begin+0xe/0x21 [251324.484174] [] 0xffffffffffffffff [251324.489280] LustreError: dumping log to /tmp/lustre-log.1576152344.30840 [251328.527913] Pid: 112507, comm: ll_ost03_066 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251328.538522] Call Trace: [251328.541071] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [251328.548106] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [251328.555402] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [251328.562051] [] ofd_destroy_hdl+0x267/0x970 [ofd] [251328.568453] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251328.575484] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251328.583314] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251328.589733] [] kthread+0xd1/0xe0 [251328.594734] [] ret_from_fork_nospec_begin+0xe/0x21 [251328.601303] [] 0xffffffffffffffff [251328.606402] LustreError: dumping log to /tmp/lustre-log.1576152348.112507 [251371.217025] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [251371.225989] Lustre: Skipped 66 previous similar messages [251371.545464] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122470 to 0x1a80000401:1122497 [251423.194794] LustreError: 67774:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576152143, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0054_UUID lock: ffff891ba507bf00/0x7066c9c190b6f817 lrc: 3/0,1 mode: --/PW res: [0x1800000400:0x2edbc5:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 67774 timeout: 0 lvb_type: 0 [251423.238515] LustreError: 67774:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 4 previous similar messages [251440.533389] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082408 to 0x1a80000402:3082433 [251464.954358] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070952 to 0x1800000400:3070977 [251477.443561] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124710 to 0x1900000400:1124737 [251479.402596] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090117 to 0x1980000402:3090145 [251485.083690] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117765 to 0x1800000402:1117793 [251500.563303] Pid: 30854, comm: ll_ost02_099 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251500.573824] Call Trace: [251500.576375] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [251500.583408] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [251500.590706] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [251500.597370] [] ofd_destroy_hdl+0x267/0x970 [ofd] [251500.603773] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251500.610812] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251500.618627] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251500.625044] [] kthread+0xd1/0xe0 [251500.630043] [] ret_from_fork_nospec_begin+0xe/0x21 [251500.636614] [] 0xffffffffffffffff [251500.641722] LustreError: dumping log to /tmp/lustre-log.1576152520.30854 [251586.140636] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800839 to 0x1a80000400:11800865 [251612.846821] Lustre: fir-OST005e: deleting orphan objects from 0x0:27467748 to 0x0:27467809 [251680.690394] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126150 to 0x1980000400:1126177 [251693.079093] Pid: 112510, comm: ll_ost02_073 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251693.089692] Call Trace: [251693.092245] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [251693.098648] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251693.105721] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251693.113527] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251693.119954] [] kthread+0xd1/0xe0 [251693.124958] [] ret_from_fork_nospec_begin+0xe/0x21 [251693.131533] [] 0xffffffffffffffff [251693.136643] LustreError: dumping log to /tmp/lustre-log.1576152713.112510 [251697.175176] Pid: 67643, comm: ll_ost01_021 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251697.185697] Call Trace: [251697.188250] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [251697.194649] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251697.201725] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251697.209530] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251697.215957] [] kthread+0xd1/0xe0 [251697.220960] [] ret_from_fork_nospec_begin+0xe/0x21 [251697.227537] [] 0xffffffffffffffff [251697.232646] LustreError: dumping log to /tmp/lustre-log.1576152717.67643 [251727.713043] Lustre: fir-OST005c: deleting orphan objects from 0x0:27192771 to 0x0:27192801 [251729.943809] Pid: 67801, comm: ll_ost01_055 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251729.954330] Call Trace: [251729.956906] [] wait_transaction_locked+0x85/0xd0 [jbd2] [251729.963903] [] add_transaction_credits+0x268/0x2f0 [jbd2] [251729.971106] [] start_this_handle+0x1a1/0x430 [jbd2] [251729.977754] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [251729.984518] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [251729.992045] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [251729.999155] [] dqget+0x3fa/0x450 [251730.004161] [] dquot_get_dqblk+0x14/0x1f0 [251730.009934] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [251730.017559] [] lquotactl_slv+0x27d/0x9d0 [lquota] [251730.024035] [] ofd_quotactl+0x13c/0x380 [ofd] [251730.030179] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251730.037242] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251730.045057] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251730.051475] [] kthread+0xd1/0xe0 [251730.056488] [] ret_from_fork_nospec_begin+0xe/0x21 [251730.063054] [] 0xffffffffffffffff [251730.068166] LustreError: dumping log to /tmp/lustre-log.1576152750.67801 [251742.232048] Pid: 31047, comm: ll_ost02_101 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251742.242567] Call Trace: [251742.245122] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [251742.252163] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [251742.259472] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [251742.266125] [] ofd_destroy_hdl+0x267/0x970 [ofd] [251742.272525] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251742.279565] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251742.287382] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251742.293798] [] kthread+0xd1/0xe0 [251742.298797] [] ret_from_fork_nospec_begin+0xe/0x21 [251742.305367] [] 0xffffffffffffffff [251742.310474] LustreError: dumping log to /tmp/lustre-log.1576152762.31047 [251742.317918] Pid: 66107, comm: ll_ost02_000 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [251742.328448] Call Trace: [251742.330992] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [251742.338016] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [251742.345304] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [251742.351951] [] ofd_destroy_hdl+0x267/0x970 [ofd] [251742.358352] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [251742.365386] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [251742.373199] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [251742.379618] [] kthread+0xd1/0xe0 [251742.384621] [] ret_from_fork_nospec_begin+0xe/0x21 [251742.391201] [] 0xffffffffffffffff [251742.396310] LNet: Service thread pid 67844 was inactive for 1204.37s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [251773.929968] Lustre: fir-OST0058: deleting orphan objects from 0x0:27507140 to 0x0:27507169 [251787.288941] LustreError: dumping log to /tmp/lustre-log.1576152807.67924 [251808.049072] Lustre: fir-OST0054: deleting orphan objects from 0x1800000401:11785679 to 0x1800000401:11785697 [251809.249153] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049259 to 0x1a00000401:3049281 [251828.393886] Lustre: fir-OST0054: Client 016cfe19-2250-799b-d8ad-887e11d25409 (at 10.8.30.32@o2ib6) reconnecting [251828.404063] Lustre: Skipped 1157 previous similar messages [251869.835282] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105859 to 0x1a00000402:1105889 [251884.440856] Lustre: 29439:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88e6a228c050 x1649533346266960/t0(0) o101->3d41db68-318d-91b5-b35b-0dc2e1801091@10.9.114.11@o2ib4:564/0 lens 328/0 e 0 to 0 dl 1576152909 ref 2 fl New:/0/ffffffff rc 0/-1 [251884.470099] Lustre: 29439:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2434 previous similar messages [251896.924393] Lustre: fir-OST0054: deleting orphan objects from 0x0:27458083 to 0x0:27458145 [251908.876888] Lustre: fir-OST0054: Connection restored to 9a91b993-1399-1978-f4a8-fbbdfe7e9dbc (at 10.9.105.36@o2ib4) [251908.887413] Lustre: Skipped 1157 previous similar messages [251910.003072] Lustre: fir-OST005a: deleting orphan objects from 0x1980000401:11842474 to 0x1980000401:11842497 [251978.275300] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [251978.284262] Lustre: Skipped 70 previous similar messages [252010.469034] Lustre: fir-OST0058: deleting orphan objects from 0x1900000401:11797774 to 0x1900000401:11797793 [252011.925153] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086245 to 0x1900000402:3086273 [252040.906257] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082437 to 0x1a80000402:3082465 [252057.386556] Lustre: fir-OST005a: deleting orphan objects from 0x0:27562499 to 0x0:27562529 [252080.022519] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090150 to 0x1980000402:3090177 [252082.206789] LNet: Service thread pid 31053 was inactive for 1200.89s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [252082.223897] LNet: Skipped 8 previous similar messages [252082.229046] Pid: 31053, comm: ll_ost02_102 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252082.239581] Call Trace: [252082.242130] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [252082.248525] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252082.255599] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252082.263396] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252082.269824] [] kthread+0xd1/0xe0 [252082.274829] [] ret_from_fork_nospec_begin+0xe/0x21 [252082.281403] [] 0xffffffffffffffff [252082.286513] LustreError: dumping log to /tmp/lustre-log.1576153102.31053 [252094.495042] Pid: 67920, comm: ll_ost02_064 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252094.505558] Call Trace: [252094.508127] [] wait_transaction_locked+0x85/0xd0 [jbd2] [252094.515123] [] add_transaction_credits+0x268/0x2f0 [jbd2] [252094.522342] [] start_this_handle+0x1a1/0x430 [jbd2] [252094.529010] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [252094.535770] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [252094.543294] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [252094.550403] [] dqget+0x3fa/0x450 [252094.555408] [] dquot_get_dqblk+0x14/0x1f0 [252094.561189] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [252094.568808] [] lquotactl_slv+0x27d/0x9d0 [lquota] [252094.575282] [] ofd_quotactl+0x13c/0x380 [ofd] [252094.581436] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252094.588480] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252094.596296] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252094.602713] [] kthread+0xd1/0xe0 [252094.607729] [] ret_from_fork_nospec_begin+0xe/0x21 [252094.614292] [] 0xffffffffffffffff [252094.619408] LustreError: dumping log to /tmp/lustre-log.1576153114.67920 [252106.783291] Pid: 31124, comm: ll_ost02_105 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252106.793809] Call Trace: [252106.796384] [] wait_transaction_locked+0x85/0xd0 [jbd2] [252106.803389] [] add_transaction_credits+0x268/0x2f0 [jbd2] [252106.810589] [] start_this_handle+0x1a1/0x430 [jbd2] [252106.817240] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [252106.823989] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [252106.831513] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [252106.838610] [] ofd_trans_start+0x75/0xf0 [ofd] [252106.844826] [] ofd_destroy+0x5d0/0x960 [ofd] [252106.850880] [] ofd_destroy_by_fid+0x1f4/0x4a0 [ofd] [252106.857532] [] ofd_destroy_hdl+0x267/0x970 [ofd] [252106.863934] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252106.870998] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252106.878815] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252106.885231] [] kthread+0xd1/0xe0 [252106.890233] [] ret_from_fork_nospec_begin+0xe/0x21 [252106.896800] [] 0xffffffffffffffff [252106.901900] LustreError: dumping log to /tmp/lustre-log.1576153127.31124 [252127.880420] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122501 to 0x1a80000401:1122529 [252199.053140] LustreError: 31187:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576152919, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0058_UUID lock: ffff890945339200/0x7066c9c190b76cd9 lrc: 3/0,1 mode: --/PW res: [0x1a3b9c2:0x0:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 31187 timeout: 0 lvb_type: 0 [252220.921297] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3070984 to 0x1800000400:3071009 [252233.618531] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124739 to 0x1900000400:1124769 [252240.858691] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117798 to 0x1800000402:1117825 [252281.863168] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126179 to 0x1980000400:1126209 [252311.587397] Pid: 31102, comm: ll_ost02_103 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252311.597913] Call Trace: [252311.600483] [] wait_transaction_locked+0x85/0xd0 [jbd2] [252311.607480] [] add_transaction_credits+0x268/0x2f0 [jbd2] [252311.614676] [] start_this_handle+0x1a1/0x430 [jbd2] [252311.621312] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [252311.628051] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [252311.635576] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [252311.642671] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [252311.649982] [] tgt_client_new+0x41b/0x610 [ptlrpc] [252311.656582] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [252311.662973] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [252311.670260] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [252311.677283] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252311.685109] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252311.691521] [] kthread+0xd1/0xe0 [252311.696536] [] ret_from_fork_nospec_begin+0xe/0x21 [252311.703101] [] 0xffffffffffffffff [252311.708213] LustreError: dumping log to /tmp/lustre-log.1576153331.31102 [252332.067802] Pid: 113356, comm: ll_ost02_094 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252332.078409] Call Trace: [252332.080982] [] wait_transaction_locked+0x85/0xd0 [jbd2] [252332.087976] [] add_transaction_credits+0x268/0x2f0 [jbd2] [252332.095161] [] start_this_handle+0x1a1/0x430 [jbd2] [252332.101821] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [252332.108574] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [252332.116098] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [252332.123194] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [252332.130503] [] tgt_client_new+0x41b/0x610 [ptlrpc] [252332.137104] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [252332.143496] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [252332.150766] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [252332.157801] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252332.165603] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252332.172047] [] kthread+0xd1/0xe0 [252332.177052] [] ret_from_fork_nospec_begin+0xe/0x21 [252332.183629] [] 0xffffffffffffffff [252332.188729] LustreError: dumping log to /tmp/lustre-log.1576153352.113356 [252342.235601] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000400:11800880 to 0x1a80000400:11800897 [252428.973807] Lustre: fir-OST0054: Client 016cfe19-2250-799b-d8ad-887e11d25409 (at 10.8.30.32@o2ib6) reconnecting [252428.983982] Lustre: Skipped 1246 previous similar messages [252450.854153] Pid: 31229, comm: ll_ost02_109 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252450.864670] Call Trace: [252450.867221] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [252450.873621] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252450.880697] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252450.888504] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252450.894928] [] kthread+0xd1/0xe0 [252450.899934] [] ret_from_fork_nospec_begin+0xe/0x21 [252450.906509] [] 0xffffffffffffffff [252450.911622] LustreError: dumping log to /tmp/lustre-log.1576153471.31229 [252454.950234] Pid: 112523, comm: ll_ost01_083 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252454.960837] Call Trace: [252454.963391] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [252454.969781] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252454.976860] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252454.984661] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252454.991088] [] kthread+0xd1/0xe0 [252454.996109] [] ret_from_fork_nospec_begin+0xe/0x21 [252455.002685] [] 0xffffffffffffffff [252455.007795] LustreError: dumping log to /tmp/lustre-log.1576153475.112523 [252470.263219] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105893 to 0x1a00000402:1105921 [252485.198845] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88f17fb1c050 x1649533345480736/t0(0) o3->3d41db68-318d-91b5-b35b-0dc2e1801091@10.9.114.11@o2ib4:410/0 lens 488/0 e 0 to 0 dl 1576153510 ref 2 fl New:/2/ffffffff rc 0/-1 [252485.227918] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2588 previous similar messages [252509.350433] Lustre: fir-OST005a: Connection restored to cc43915b-6aa0-7796-18f9-1827e6f9b899 (at 10.8.18.12@o2ib6) [252509.360884] Lustre: Skipped 1225 previous similar messages [252545.064043] Pid: 67806, comm: ll_ost03_052 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252545.074566] Call Trace: [252545.077125] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [252545.083525] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252545.090590] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252545.098388] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252545.104817] [] kthread+0xd1/0xe0 [252545.109820] [] ret_from_fork_nospec_begin+0xe/0x21 [252545.116396] [] 0xffffffffffffffff [252545.121504] LustreError: dumping log to /tmp/lustre-log.1576153565.67806 [252549.160129] Pid: 66106, comm: ll_ost01_002 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252549.170646] Call Trace: [252549.173224] [] wait_transaction_locked+0x85/0xd0 [jbd2] [252549.180237] [] add_transaction_credits+0x268/0x2f0 [jbd2] [252549.187436] [] start_this_handle+0x1a1/0x430 [jbd2] [252549.194088] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [252549.200852] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [252549.208379] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [252549.215489] [] dqget+0x3fa/0x450 [252549.220496] [] dquot_get_dqblk+0x14/0x1f0 [252549.226277] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [252549.233901] [] lquotactl_slv+0x27d/0x9d0 [lquota] [252549.240385] [] ofd_quotactl+0x13c/0x380 [ofd] [252549.246528] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252549.253594] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252549.261408] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252549.267840] [] kthread+0xd1/0xe0 [252549.272860] [] ret_from_fork_nospec_begin+0xe/0x21 [252549.279437] [] 0xffffffffffffffff [252549.284550] LustreError: dumping log to /tmp/lustre-log.1576153569.66106 [252564.992177] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049288 to 0x1a00000401:3049313 [252583.815269] Lustre: fir-OST0056: Export ffff88e42919e800 already connecting from 10.8.22.22@o2ib6 [252583.824232] Lustre: Skipped 52 previous similar messages [252642.493198] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082476 to 0x1a80000402:3082497 [252681.298446] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090185 to 0x1980000402:3090209 [252768.052197] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086284 to 0x1900000402:3086305 [252839.981857] LNet: Service thread pid 31123 was inactive for 1201.63s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [252839.998986] LNet: Skipped 8 previous similar messages [252840.004146] Pid: 31123, comm: ll_ost02_104 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252840.014682] Call Trace: [252840.017237] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [252840.023650] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252840.030729] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252840.038551] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252840.045016] [] kthread+0xd1/0xe0 [252840.050017] [] ret_from_fork_nospec_begin+0xe/0x21 [252840.056592] [] 0xffffffffffffffff [252840.061701] LustreError: dumping log to /tmp/lustre-log.1576153860.31123 [252856.366183] Pid: 67923, comm: ll_ost01_074 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [252856.376705] Call Trace: [252856.379283] [] wait_transaction_locked+0x85/0xd0 [jbd2] [252856.386286] [] add_transaction_credits+0x268/0x2f0 [jbd2] [252856.393472] [] start_this_handle+0x1a1/0x430 [jbd2] [252856.400120] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [252856.406869] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [252856.414394] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [252856.421489] [] dqget+0x3fa/0x450 [252856.426493] [] dquot_get_dqblk+0x14/0x1f0 [252856.432287] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [252856.439917] [] lquotactl_slv+0x27d/0x9d0 [lquota] [252856.446428] [] ofd_quotactl+0x13c/0x380 [ofd] [252856.452556] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [252856.459615] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [252856.467418] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [252856.473847] [] kthread+0xd1/0xe0 [252856.478849] [] ret_from_fork_nospec_begin+0xe/0x21 [252856.485425] [] 0xffffffffffffffff [252856.490527] LustreError: dumping log to /tmp/lustre-log.1576153876.67923 [252883.283076] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126212 to 0x1980000400:1126241 [252884.007411] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122531 to 0x1a80000401:1122561 [252976.984385] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071021 to 0x1800000400:3071041 [252989.049543] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124773 to 0x1900000400:1124801 [252996.873667] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117830 to 0x1800000402:1117857 [253029.118883] Lustre: fir-OST0054: Client c681c8c8-a3bd-4f09-2cf4-358a58ae71d2 (at 10.9.117.22@o2ib4) reconnecting [253029.129148] Lustre: Skipped 1290 previous similar messages [253057.074156] Pid: 67690, comm: ll_ost01_036 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253057.084673] Call Trace: [253057.087233] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [253057.093633] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253057.100706] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253057.108524] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253057.114951] [] kthread+0xd1/0xe0 [253057.119957] [] ret_from_fork_nospec_begin+0xe/0x21 [253057.126538] [] 0xffffffffffffffff [253057.131650] LustreError: dumping log to /tmp/lustre-log.1576154077.67690 [253071.051174] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105925 to 0x1a00000402:1105953 [253085.282734] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff891cbfca1850 x1649559496365360/t0(0) o4->fe16bc49-4bbe-dc30-a069-fee92bf3e984@10.9.104.23@o2ib4:255/0 lens 488/0 e 0 to 0 dl 1576154110 ref 2 fl New:/2/ffffffff rc 0/-1 [253085.311801] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2776 previous similar messages [253102.131056] Pid: 31187, comm: ll_ost02_106 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253102.141578] Call Trace: [253102.144133] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [253102.151173] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [253102.158467] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [253102.165118] [] ofd_destroy_hdl+0x267/0x970 [ofd] [253102.171519] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253102.178560] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253102.186375] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253102.192790] [] kthread+0xd1/0xe0 [253102.197790] [] ret_from_fork_nospec_begin+0xe/0x21 [253102.204368] [] 0xffffffffffffffff [253102.209484] LustreError: dumping log to /tmp/lustre-log.1576154122.31187 [253110.571598] Lustre: fir-OST005a: Connection restored to cc43915b-6aa0-7796-18f9-1827e6f9b899 (at 10.8.18.12@o2ib6) [253110.582033] Lustre: Skipped 1317 previous similar messages [253147.187951] Pid: 112553, comm: ll_ost03_076 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253147.198557] Call Trace: [253147.201119] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [253147.207516] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253147.214581] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253147.222381] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253147.228810] [] kthread+0xd1/0xe0 [253147.233812] [] ret_from_fork_nospec_begin+0xe/0x21 [253147.240389] [] 0xffffffffffffffff [253147.245497] LustreError: dumping log to /tmp/lustre-log.1576154167.112553 [253207.103912] Lustre: fir-OST0056: Export ffff88f2fa374c00 already connecting from 10.9.113.13@o2ib4 [253207.112958] Lustre: Skipped 52 previous similar messages [253208.629166] Pid: 30965, comm: ll_ost02_100 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253208.639692] Call Trace: [253208.642250] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [253208.648649] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253208.655709] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253208.663511] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253208.669940] [] kthread+0xd1/0xe0 [253208.674943] [] ret_from_fork_nospec_begin+0xe/0x21 [253208.681518] [] 0xffffffffffffffff [253208.686629] LustreError: dumping log to /tmp/lustre-log.1576154228.30965 [253229.109573] Pid: 31223, comm: ll_ost02_108 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253229.120088] Call Trace: [253229.122641] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [253229.129680] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [253229.136972] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [253229.143625] [] ofd_destroy_hdl+0x267/0x970 [ofd] [253229.150027] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253229.157070] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253229.164909] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253229.171327] [] kthread+0xd1/0xe0 [253229.176343] [] ret_from_fork_nospec_begin+0xe/0x21 [253229.182923] [] 0xffffffffffffffff [253229.188044] LustreError: dumping log to /tmp/lustre-log.1576154249.31223 [253243.121158] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082505 to 0x1a80000402:3082529 [253282.670429] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090212 to 0x1980000402:3090241 [253320.967211] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049319 to 0x1a00000401:3049345 [253327.415533] Pid: 67886, comm: ll_ost03_063 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253327.426055] Call Trace: [253327.428617] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [253327.435655] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [253327.442951] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [253327.449625] [] ofd_destroy_hdl+0x267/0x970 [ofd] [253327.456029] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253327.463068] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253327.470885] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253327.477299] [] kthread+0xd1/0xe0 [253327.482301] [] ret_from_fork_nospec_begin+0xe/0x21 [253327.488870] [] 0xffffffffffffffff [253327.493979] LustreError: dumping log to /tmp/lustre-log.1576154347.67886 [253331.511606] Pid: 67851, comm: ll_ost03_057 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253331.522125] Call Trace: [253331.524677] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [253331.531721] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [253331.539026] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [253331.545682] [] ofd_destroy_hdl+0x267/0x970 [ofd] [253331.552085] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253331.559123] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253331.566938] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253331.573353] [] kthread+0xd1/0xe0 [253331.578357] [] ret_from_fork_nospec_begin+0xe/0x21 [253331.584924] [] 0xffffffffffffffff [253331.590032] LustreError: dumping log to /tmp/lustre-log.1576154351.67851 [253484.480326] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126243 to 0x1980000400:1126273 [253495.354881] LNet: Service thread pid 112556 was inactive for 1200.75s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [253495.372075] LNet: Skipped 8 previous similar messages [253495.377224] Pid: 112556, comm: ll_ost01_091 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253495.387850] Call Trace: [253495.390425] [] wait_transaction_locked+0x85/0xd0 [jbd2] [253495.397420] [] add_transaction_credits+0x268/0x2f0 [jbd2] [253495.404606] [] start_this_handle+0x1a1/0x430 [jbd2] [253495.411253] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [253495.418002] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [253495.425527] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [253495.432632] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [253495.439941] [] tgt_client_new+0x41b/0x610 [ptlrpc] [253495.446551] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [253495.452941] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [253495.460237] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [253495.467284] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253495.475101] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253495.481517] [] kthread+0xd1/0xe0 [253495.486532] [] ret_from_fork_nospec_begin+0xe/0x21 [253495.493094] [] 0xffffffffffffffff [253495.498208] LustreError: dumping log to /tmp/lustre-log.1576154515.112556 [253524.163207] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086309 to 0x1900000402:3086337 [253538.363714] Pid: 67761, comm: ll_ost01_047 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253538.374236] Call Trace: [253538.376815] [] wait_transaction_locked+0x85/0xd0 [jbd2] [253538.383812] [] add_transaction_credits+0x268/0x2f0 [jbd2] [253538.391011] [] start_this_handle+0x1a1/0x430 [jbd2] [253538.397661] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [253538.404409] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [253538.411935] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [253538.419040] [] dqget+0x3fa/0x450 [253538.424041] [] dquot_get_dqblk+0x14/0x1f0 [253538.429823] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [253538.437449] [] lquotactl_slv+0x27d/0x9d0 [lquota] [253538.443934] [] ofd_quotactl+0x13c/0x380 [ofd] [253538.450075] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253538.457137] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253538.464955] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253538.471371] [] kthread+0xd1/0xe0 [253538.476386] [] ret_from_fork_nospec_begin+0xe/0x21 [253538.482952] [] 0xffffffffffffffff [253538.488063] LustreError: dumping log to /tmp/lustre-log.1576154558.67761 [253569.084314] Pid: 31222, comm: ll_ost02_107 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253569.094833] Call Trace: [253569.097395] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [253569.104432] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [253569.111728] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [253569.118377] [] ofd_destroy_hdl+0x267/0x970 [ofd] [253569.124780] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253569.131820] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253569.139634] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253569.146050] [] kthread+0xd1/0xe0 [253569.151052] [] ret_from_fork_nospec_begin+0xe/0x21 [253569.157619] [] 0xffffffffffffffff [253569.162727] LustreError: dumping log to /tmp/lustre-log.1576154589.31222 [253626.429452] Pid: 31584, comm: ll_ost03_081 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253626.439970] Call Trace: [253626.442521] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [253626.449552] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [253626.456859] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [253626.463507] [] ofd_destroy_hdl+0x267/0x970 [ofd] [253626.469908] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253626.476950] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253626.484763] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253626.491180] [] kthread+0xd1/0xe0 [253626.496180] [] ret_from_fork_nospec_begin+0xe/0x21 [253626.502749] [] 0xffffffffffffffff [253626.507858] LustreError: dumping log to /tmp/lustre-log.1576154646.31584 [253629.377852] Lustre: fir-OST0054: Client a31c4d05-c2c1-d128-e70d-4b9b8b78ea7d (at 10.8.8.22@o2ib6) reconnecting [253629.387943] Lustre: Skipped 1351 previous similar messages [253640.534429] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122531 to 0x1a80000401:1122593 [253672.655064] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105956 to 0x1a00000402:1105985 [253685.854640] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88f0e8215050 x1649533154028048/t0(0) o4->63828333-ab85-9660-a339-05c4e4362ad0@10.9.102.13@o2ib4:100/0 lens 840/0 e 0 to 0 dl 1576154710 ref 2 fl New:/2/ffffffff rc 0/-1 [253685.883448] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2924 previous similar messages [253710.854945] Lustre: fir-OST0054: Connection restored to (at 10.9.107.70@o2ib4) [253710.862364] Lustre: Skipped 1354 previous similar messages [253733.239358] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071051 to 0x1800000400:3071073 [253745.224524] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124805 to 0x1900000400:1124833 [253749.311879] Pid: 67490, comm: ll_ost03_005 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253749.322398] Call Trace: [253749.324951] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [253749.331350] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253749.338414] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253749.346230] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253749.352659] [] kthread+0xd1/0xe0 [253749.357663] [] ret_from_fork_nospec_begin+0xe/0x21 [253749.364238] [] 0xffffffffffffffff [253749.369348] LustreError: dumping log to /tmp/lustre-log.1576154769.67490 [253753.440679] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117859 to 0x1800000402:1117889 [253794.368771] LNet: Service thread pid 66252 was inactive for 1201.94s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [253794.381803] LNet: Skipped 1 previous similar message [253794.386866] LustreError: dumping log to /tmp/lustre-log.1576154814.66252 [253813.151776] Lustre: fir-OST0056: Export ffff88e42919e800 already connecting from 10.8.22.22@o2ib6 [253813.160739] Lustre: Skipped 45 previous similar messages [253814.849177] Pid: 67792, comm: ll_ost01_052 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [253814.859697] Call Trace: [253814.862247] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [253814.868640] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [253814.875700] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [253814.883502] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [253814.889930] [] kthread+0xd1/0xe0 [253814.894935] [] ret_from_fork_nospec_begin+0xe/0x21 [253814.901514] [] 0xffffffffffffffff [253814.906620] LustreError: dumping log to /tmp/lustre-log.1576154835.67792 [253823.169346] LustreError: 112570:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576154543, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff8921e7397bc0/0x7066c9c190b7be78 lrc: 3/0,1 mode: --/PW res: [0x1a80000402:0x2f08e2:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 112570 timeout: 0 lvb_type: 0 [253823.213239] LustreError: 112570:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 5 previous similar messages [253844.461075] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082533 to 0x1a80000402:3082561 [253883.274330] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090249 to 0x1980000402:3090273 [254076.886196] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049352 to 0x1a00000401:3049377 [254085.994919] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126275 to 0x1980000400:1126305 [254229.549668] Lustre: fir-OST005a: Client 84451726-da5e-16d9-ee63-43bfe8a9f835 (at 10.8.27.27@o2ib6) reconnecting [254229.559856] Lustre: Skipped 1342 previous similar messages [254273.346973] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1105988 to 0x1a00000402:1106017 [254279.954169] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086345 to 0x1900000402:3086369 [254287.146557] Lustre: 31346:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-746), not sending early reply req@ffff8905584ca050 x1651830967511744/t0(0) o101->12e2c9b6-7b56-574c-526a-a98d62b67a85@10.9.103.31@o2ib4:702/0 lens 328/0 e 0 to 0 dl 1576155312 ref 2 fl New:/0/ffffffff rc 0/-1 [254287.175802] Lustre: 31346:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2938 previous similar messages [254310.911146] Lustre: fir-OST0056: Connection restored to 35ba350a-bccc-3fd9-39f0-a94eca80785d (at 10.9.107.33@o2ib4) [254310.921678] Lustre: Skipped 1367 previous similar messages [254396.757393] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122598 to 0x1a80000401:1122625 [254426.144873] Lustre: fir-OST0056: Export ffff891b8f069c00 already connecting from 10.8.22.14@o2ib6 [254426.153831] Lustre: Skipped 46 previous similar messages [254445.425164] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082576 to 0x1a80000402:3082593 [254484.534247] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090289 to 0x1980000402:3090305 [254489.254320] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071085 to 0x1800000400:3071105 [254501.327465] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124836 to 0x1900000400:1124865 [254507.086903] LNet: Service thread pid 31585 was inactive for 1203.21s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [254507.104023] LNet: Skipped 5 previous similar messages [254507.109198] Pid: 31585, comm: ll_ost03_082 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [254507.119729] Call Trace: [254507.122308] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [254507.128704] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [254507.135776] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [254507.143597] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [254507.150026] [] kthread+0xd1/0xe0 [254507.155040] [] ret_from_fork_nospec_begin+0xe/0x21 [254507.161604] [] 0xffffffffffffffff [254507.166742] LustreError: dumping log to /tmp/lustre-log.1576155527.31585 [254508.943599] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117891 to 0x1800000402:1117921 [254568.528133] Pid: 67650, comm: ll_ost01_024 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [254568.538655] Call Trace: [254568.541207] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [254568.547607] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [254568.554675] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [254568.562481] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [254568.568923] [] kthread+0xd1/0xe0 [254568.573930] [] ret_from_fork_nospec_begin+0xe/0x21 [254568.580505] [] 0xffffffffffffffff [254568.585623] LustreError: dumping log to /tmp/lustre-log.1576155588.67650 [254625.807252] LustreError: 32063:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576155345, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff8921e7392d00/0x7066c9c190b7e266 lrc: 3/0,1 mode: --/PW res: [0x1980000402:0x2f2766:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 32063 timeout: 0 lvb_type: 0 [254688.070865] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126307 to 0x1980000400:1126337 [254724.179194] Pid: 112570, comm: ll_ost03_080 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [254724.189795] Call Trace: [254724.192349] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [254724.199390] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [254724.206703] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [254724.213351] [] ofd_destroy_hdl+0x267/0x970 [ofd] [254724.219754] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [254724.226801] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [254724.234601] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [254724.241030] [] kthread+0xd1/0xe0 [254724.246034] [] ret_from_fork_nospec_begin+0xe/0x21 [254724.252594] [] 0xffffffffffffffff [254724.257701] LustreError: dumping log to /tmp/lustre-log.1576155744.112570 [254823.335168] LustreError: 31675:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576155543, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005a_UUID lock: ffff8910f9018000/0x7066c9c190b7ecbc lrc: 3/0,1 mode: --/PW res: [0x1980000402:0x2f276e:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 31675 timeout: 0 lvb_type: 0 [254829.642746] Lustre: fir-OST0058: Client be4565a9-8448-ebff-ec7a-065a9a83593c (at 10.8.18.19@o2ib6) reconnecting [254829.652928] Lustre: Skipped 1327 previous similar messages [254832.909252] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049392 to 0x1a00000401:3049409 [254874.310856] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106023 to 0x1a00000402:1106049 [254887.438428] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88ebc1f82050 x1649561036324336/t0(0) o4->c1504d4c-7504-c251-de3c-6f26c7b8e7d5@10.9.102.26@o2ib4:547/0 lens 488/0 e 0 to 0 dl 1576155912 ref 2 fl New:/2/ffffffff rc 0/-1 [254887.467507] Lustre: 27257:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 2840 previous similar messages [254911.448127] Lustre: fir-OST005a: Connection restored to 97102c2b-e0e2-553a-c933-88dc912145da (at 10.9.115.11@o2ib4) [254911.458650] Lustre: Skipped 1325 previous similar messages [255028.269015] Lustre: fir-OST0056: Export ffff891b8f069c00 already connecting from 10.8.22.14@o2ib6 [255028.277986] Lustre: Skipped 57 previous similar messages [255036.513165] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086374 to 0x1900000402:3086401 [255046.397080] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082601 to 0x1a80000402:3082625 [255086.578210] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090311 to 0x1980000402:3090337 [255109.210803] LNet: Service thread pid 31710 was inactive for 1203.31s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [255109.227932] LNet: Skipped 2 previous similar messages [255109.233077] Pid: 31710, comm: ll_ost03_084 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255109.243625] Call Trace: [255109.246177] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [255109.252579] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255109.259674] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255109.267495] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255109.273924] [] kthread+0xd1/0xe0 [255109.278927] [] ret_from_fork_nospec_begin+0xe/0x21 [255109.285503] [] 0xffffffffffffffff [255109.290613] LustreError: dumping log to /tmp/lustre-log.1576156129.31710 [255152.980336] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122628 to 0x1a80000401:1122657 [255244.925272] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071109 to 0x1800000400:3071137 [255256.982391] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124870 to 0x1900000400:1124897 [255265.014509] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117928 to 0x1800000402:1117953 [255289.031011] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126345 to 0x1980000400:1126369 [255326.303078] Pid: 67646, comm: ll_ost01_023 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255326.313599] Call Trace: [255326.316150] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [255326.322552] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255326.329616] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255326.337432] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255326.343862] [] kthread+0xd1/0xe0 [255326.348864] [] ret_from_fork_nospec_begin+0xe/0x21 [255326.355439] [] 0xffffffffffffffff [255326.360549] LustreError: dumping log to /tmp/lustre-log.1576156346.67646 [255388.256292] Pid: 32390, comm: ll_ost03_092 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255388.266814] Call Trace: [255388.269384] [] wait_transaction_locked+0x85/0xd0 [jbd2] [255388.276380] [] add_transaction_credits+0x268/0x2f0 [jbd2] [255388.283581] [] start_this_handle+0x1a1/0x430 [jbd2] [255388.290230] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [255388.296979] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [255388.304505] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [255388.311600] [] dqget+0x3fa/0x450 [255388.316603] [] dquot_get_dqblk+0x14/0x1f0 [255388.322383] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [255388.330002] [] lquotactl_slv+0x27d/0x9d0 [lquota] [255388.336486] [] ofd_quotactl+0x13c/0x380 [ofd] [255388.342626] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255388.349678] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255388.357490] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255388.363909] [] kthread+0xd1/0xe0 [255388.368923] [] ret_from_fork_nospec_begin+0xe/0x21 [255388.375487] [] 0xffffffffffffffff [255388.380601] LustreError: dumping log to /tmp/lustre-log.1576156408.32390 [255429.859079] Lustre: fir-OST005c: Client 9ff24344-feb6-8c0e-cb07-a92244c00aa4 (at 10.9.101.21@o2ib4) reconnecting [255429.869362] Lustre: Skipped 1453 previous similar messages [255475.154679] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106051 to 0x1a00000402:1106081 [255487.942260] Lustre: 29548:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88f1bfb8f050 x1652161380591696/t0(0) o19->ae1d0080-04fa-5436-e145-ffdf0db9990d@10.0.10.3@o2ib7:393/0 lens 336/0 e 0 to 0 dl 1576156513 ref 2 fl New:/0/ffffffff rc 0/-1 [255487.971246] Lustre: 29548:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3088 previous similar messages [255511.680306] Lustre: fir-OST0058: Connection restored to c2d505e5-30ab-23ac-7017-17a83f00b35d (at 10.9.102.49@o2ib4) [255511.690832] Lustre: Skipped 1442 previous similar messages [255527.011028] Pid: 32063, comm: ll_ost03_087 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255527.021549] Call Trace: [255527.024102] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [255527.031141] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [255527.038449] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [255527.045112] [] ofd_destroy_hdl+0x267/0x970 [ofd] [255527.051514] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255527.058548] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255527.066361] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255527.072779] [] kthread+0xd1/0xe0 [255527.077779] [] ret_from_fork_nospec_begin+0xe/0x21 [255527.084348] [] 0xffffffffffffffff [255527.089457] LustreError: dumping log to /tmp/lustre-log.1576156547.32063 [255588.964029] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049415 to 0x1a00000401:3049441 [255636.261890] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [255636.270853] Lustre: Skipped 44 previous similar messages [255647.000876] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082631 to 0x1a80000402:3082657 [255688.286025] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090343 to 0x1980000402:3090369 [255690.854256] Pid: 67747, comm: ll_ost01_046 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255690.864772] Call Trace: [255690.867348] [] wait_transaction_locked+0x85/0xd0 [jbd2] [255690.874346] [] add_transaction_credits+0x268/0x2f0 [jbd2] [255690.881531] [] start_this_handle+0x1a1/0x430 [jbd2] [255690.888180] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [255690.894929] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [255690.902469] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [255690.909574] [] dqget+0x3fa/0x450 [255690.914578] [] dquot_get_dqblk+0x14/0x1f0 [255690.920351] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [255690.927977] [] lquotactl_slv+0x27d/0x9d0 [lquota] [255690.934450] [] ofd_quotactl+0x13c/0x380 [ofd] [255690.940594] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255690.947643] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255690.955457] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255690.961873] [] kthread+0xd1/0xe0 [255690.966901] [] ret_from_fork_nospec_begin+0xe/0x21 [255690.973462] [] 0xffffffffffffffff [255690.978573] LustreError: dumping log to /tmp/lustre-log.1576156711.67747 [255723.622908] LNet: Service thread pid 31675 was inactive for 1200.26s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [255723.640018] LNet: Skipped 4 previous similar messages [255723.645166] Pid: 31675, comm: ll_ost03_083 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255723.655704] Call Trace: [255723.658262] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [255723.665296] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [255723.672592] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [255723.679241] [] ofd_destroy_hdl+0x267/0x970 [ofd] [255723.685644] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255723.692683] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255723.700498] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255723.706914] [] kthread+0xd1/0xe0 [255723.711930] [] ret_from_fork_nospec_begin+0xe/0x21 [255723.718493] [] 0xffffffffffffffff [255723.723617] LustreError: dumping log to /tmp/lustre-log.1576156743.31675 [255723.879912] LustreError: 31953:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576156443, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST0054_UUID lock: ffff8910f901d580/0x7066c9c190b83b91 lrc: 3/0,1 mode: --/PW res: [0x1800000400:0x2edc82:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 31953 timeout: 0 lvb_type: 0 [255723.879914] LustreError: 32246:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) ### lock timed out (enqueued at 1576156443, 300s ago); not entering recovery in server code, just going back to sleep ns: filter-fir-OST005e_UUID lock: ffff89045a9b7500/0x7066c9c190b83b9f lrc: 3/0,1 mode: --/PW res: [0x1a80000402:0x2f0966:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->18446744073709551615) flags: 0x40010080000000 nid: local remote: 0x0 expref: -99 pid: 32246 timeout: 0 lvb_type: 0 [255723.879917] LustreError: 32246:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 1 previous similar message [255723.978244] LustreError: 31953:0:(ldlm_request.c:129:ldlm_expired_completion_wait()) Skipped 1 previous similar message [255731.815065] Pid: 31785, comm: ll_ost03_085 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255731.825581] Call Trace: [255731.828143] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [255731.835173] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [255731.842463] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [255731.849109] [] ofd_destroy_hdl+0x267/0x970 [ofd] [255731.855513] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255731.862550] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255731.870366] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255731.876783] [] kthread+0xd1/0xe0 [255731.881783] [] ret_from_fork_nospec_begin+0xe/0x21 [255731.888352] [] 0xffffffffffffffff [255731.893458] LustreError: dumping log to /tmp/lustre-log.1576156751.31785 [255792.880018] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086406 to 0x1900000402:3086433 [255866.985733] Pid: 67734, comm: ll_ost03_038 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [255866.996266] Call Trace: [255866.998817] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [255867.005251] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [255867.012353] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [255867.020177] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [255867.026621] [] kthread+0xd1/0xe0 [255867.031625] [] ret_from_fork_nospec_begin+0xe/0x21 [255867.038211] [] 0xffffffffffffffff [255867.043335] LustreError: dumping log to /tmp/lustre-log.1576156887.67734 [255890.398680] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126371 to 0x1980000400:1126401 [255909.043280] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122663 to 0x1a80000401:1122689 [256000.956173] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071146 to 0x1800000400:3071169 [256012.981329] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124901 to 0x1900000400:1124929 [256021.109512] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117956 to 0x1800000402:1117985 [256030.725939] Lustre: fir-OST005c: Client 9ff24344-feb6-8c0e-cb07-a92244c00aa4 (at 10.9.101.21@o2ib4) reconnecting [256030.736199] Lustre: Skipped 1446 previous similar messages [256076.118574] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106051 to 0x1a00000402:1106113 [256084.078018] Pid: 112548, comm: ll_ost01_090 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [256084.088619] Call Trace: [256084.091174] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [256084.097574] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [256084.104637] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [256084.112436] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [256084.118865] [] kthread+0xd1/0xe0 [256084.123867] [] ret_from_fork_nospec_begin+0xe/0x21 [256084.130445] [] 0xffffffffffffffff [256084.135570] LustreError: dumping log to /tmp/lustre-log.1576157104.112548 [256088.408112] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-127), not sending early reply req@ffff891c225b5050 x1649050252742160/t0(0) o4->935b75df-613a-c7ad-95b7-8cbfb8326a67@10.9.101.28@o2ib4:238/0 lens 8632/0 e 0 to 0 dl 1576157113 ref 2 fl New:/2/ffffffff rc 0/-1 [256088.437336] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3071 previous similar messages [256111.956162] Lustre: fir-OST0058: Connection restored to c2d505e5-30ab-23ac-7017-17a83f00b35d (at 10.9.102.49@o2ib4) [256111.966686] Lustre: Skipped 1444 previous similar messages [256238.385631] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [256238.394595] Lustre: Skipped 44 previous similar messages [256248.364716] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082661 to 0x1a80000402:3082689 [256289.801883] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090383 to 0x1980000402:3090401 [256327.794829] LNet: Service thread pid 112564 was inactive for 200.17s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [256327.811934] LNet: Skipped 3 previous similar messages [256327.817083] Pid: 112564, comm: ll_ost01_094 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [256327.827705] Call Trace: [256327.830272] [] wait_transaction_locked+0x85/0xd0 [jbd2] [256327.837264] [] add_transaction_credits+0x268/0x2f0 [jbd2] [256327.844446] [] start_this_handle+0x1a1/0x430 [jbd2] [256327.851096] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [256327.857846] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [256327.865387] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [256327.872484] [] dqget+0x3fa/0x450 [256327.877488] [] dquot_get_dqblk+0x14/0x1f0 [256327.883259] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [256327.890886] [] lquotactl_slv+0x27d/0x9d0 [lquota] [256327.897361] [] ofd_quotactl+0x13c/0x380 [ofd] [256327.903503] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [256327.910550] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [256327.918366] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [256327.924785] [] kthread+0xd1/0xe0 [256327.929784] [] ret_from_fork_nospec_begin+0xe/0x21 [256327.936370] [] 0xffffffffffffffff [256327.941469] LustreError: dumping log to /tmp/lustre-log.1576157348.112564 [256338.547041] Pid: 67706, comm: ll_ost01_038 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [256338.557555] Call Trace: [256338.560116] [] wait_transaction_locked+0x85/0xd0 [jbd2] [256338.567111] [] add_transaction_credits+0x268/0x2f0 [jbd2] [256338.574295] [] start_this_handle+0x1a1/0x430 [jbd2] [256338.580947] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [256338.587695] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [256338.595236] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [256338.602333] [] dqget+0x3fa/0x450 [256338.607336] [] dquot_get_dqblk+0x14/0x1f0 [256338.613123] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [256338.620734] [] lquotactl_slv+0x27d/0x9d0 [lquota] [256338.627224] [] ofd_quotactl+0x13c/0x380 [ofd] [256338.633353] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [256338.640407] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [256338.648209] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [256338.654638] [] kthread+0xd1/0xe0 [256338.659657] [] ret_from_fork_nospec_begin+0xe/0x21 [256338.666241] [] 0xffffffffffffffff [256338.671334] LustreError: dumping log to /tmp/lustre-log.1576157358.67706 [256345.187961] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049453 to 0x1a00000401:3049473 [256491.006782] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126408 to 0x1980000400:1126433 [256548.063021] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086449 to 0x1900000402:3086465 [256602.880081] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071175 to 0x1800000400:3071201 [256620.664623] Pid: 67635, comm: ll_ost03_017 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [256620.675138] Call Trace: [256620.677698] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [256620.684096] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [256620.691164] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [256620.698969] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [256620.705399] [] kthread+0xd1/0xe0 [256620.710402] [] ret_from_fork_nospec_begin+0xe/0x21 [256620.716978] [] 0xffffffffffffffff [256620.722102] LustreError: dumping log to /tmp/lustre-log.1576157640.67635 [256624.760708] Pid: 32190, comm: ll_ost03_088 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [256624.771247] Call Trace: [256624.773805] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [256624.780846] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [256624.788156] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [256624.794807] [] ofd_destroy_hdl+0x267/0x970 [ofd] [256624.801210] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [256624.808249] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [256624.816063] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [256624.822481] [] kthread+0xd1/0xe0 [256624.827482] [] ret_from_fork_nospec_begin+0xe/0x21 [256624.834059] [] 0xffffffffffffffff [256624.839166] LustreError: dumping log to /tmp/lustre-log.1576157644.32190 [256624.846566] Pid: 32246, comm: ll_ost03_090 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [256624.857098] Call Trace: [256624.859651] [] ldlm_completion_ast+0x4e5/0x860 [ptlrpc] [256624.866672] [] ldlm_cli_enqueue_local+0x231/0x830 [ptlrpc] [256624.873960] [] ofd_destroy_by_fid+0x1dd/0x4a0 [ofd] [256624.880609] [] ofd_destroy_hdl+0x267/0x970 [ofd] [256624.887011] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [256624.894049] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [256624.901869] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [256624.908283] [] kthread+0xd1/0xe0 [256624.913298] [] ret_from_fork_nospec_begin+0xe/0x21 [256624.919853] [] 0xffffffffffffffff [256624.924958] LNet: Service thread pid 31953 was inactive for 1201.02s. Watchdog stack traces are limited to 3 per 300 seconds, skipping this one. [256630.937376] Lustre: fir-OST005c: Client 55158f7b-59ce-2f71-9169-48a899507185 (at 10.9.117.3@o2ib4) reconnecting [256630.947554] Lustre: Skipped 1437 previous similar messages [256665.218237] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122693 to 0x1a80000401:1122721 [256677.306476] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106117 to 0x1a00000402:1106145 [256686.201926] Pid: 67658, comm: ll_ost01_027 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [256686.212444] Call Trace: [256686.214996] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [256686.221395] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [256686.228460] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [256686.236260] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [256686.242689] [] kthread+0xd1/0xe0 [256686.247693] [] ret_from_fork_nospec_begin+0xe/0x21 [256686.254270] [] 0xffffffffffffffff [256686.259376] LustreError: dumping log to /tmp/lustre-log.1576157706.67658 [256688.925990] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88fa20a6a050 x1649516933178880/t0(0) o10->c8791ada-b652-f6ba-7581-332c648ad12e@10.9.103.49@o2ib4:83/0 lens 440/0 e 0 to 0 dl 1576157713 ref 2 fl New:/2/ffffffff rc 0/-1 [256688.954795] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3096 previous similar messages [256713.288237] Lustre: fir-OST0058: Connection restored to c2d505e5-30ab-23ac-7017-17a83f00b35d (at 10.9.102.49@o2ib4) [256713.298767] Lustre: Skipped 1456 previous similar messages [256768.972297] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124932 to 0x1900000400:1124961 [256777.332433] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1117988 to 0x1800000402:1118017 [256840.509671] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [256840.518660] Lustre: Skipped 44 previous similar messages [256849.424672] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082700 to 0x1a80000402:3082721 [256890.197793] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090413 to 0x1980000402:3090433 [257092.038358] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126436 to 0x1980000400:1126465 [257101.161968] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049489 to 0x1a00000401:3049505 [257203.499957] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071208 to 0x1800000400:3071233 [257222.788550] LNet: Service thread pid 32513 was inactive for 1200.81s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [257222.805655] LNet: Skipped 5 previous similar messages [257222.810802] Pid: 32513, comm: ll_ost03_094 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [257222.821338] Call Trace: [257222.823888] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [257222.830281] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [257222.837342] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [257222.845144] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [257222.851574] [] kthread+0xd1/0xe0 [257222.856576] [] ret_from_fork_nospec_begin+0xe/0x21 [257222.863153] [] 0xffffffffffffffff [257222.868261] LustreError: dumping log to /tmp/lustre-log.1576158242.32513 [257231.013228] Lustre: fir-OST0058: Client a5aa8a03-126c-c3c2-1aed-484f379dc83d (at 10.9.105.72@o2ib4) reconnecting [257231.023489] Lustre: Skipped 1454 previous similar messages [257278.742355] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106149 to 0x1a00000402:1106177 [257289.029868] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff891f457ea050 x1649516933178880/t0(0) o10->c8791ada-b652-f6ba-7581-332c648ad12e@10.9.103.49@o2ib4:684/0 lens 440/0 e 0 to 0 dl 1576158314 ref 2 fl New:/2/ffffffff rc 0/-1 [257289.058772] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3145 previous similar messages [257304.093938] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086471 to 0x1900000402:3086497 [257306.295267] LustreError: 66071:0:(ldlm_lockd.c:256:expired_lock_main()) ### lock callback timer expired after 750s: evicting client at 10.9.101.54@o2ib4 ns: filter-fir-OST005c_UUID lock: ffff88fb25939200/0x7066c9c190b7797b lrc: 3/0,0 mode: PR/PR res: [0x1a00000400:0xb1d139:0x0].0x0 rrc: 3 type: EXT [0->18446744073709551615] (req 0->134217727) flags: 0x60000400000020 nid: 10.9.101.54@o2ib4 remote: 0xc717f25cc11d22e8 expref: 46 pid: 67851 timeout: 256700 lvb_type: 1 [257314.516914] Lustre: fir-OST0058: Connection restored to c2d505e5-30ab-23ac-7017-17a83f00b35d (at 10.9.102.49@o2ib4) [257314.527434] Lustre: Skipped 1474 previous similar messages [257421.441152] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122726 to 0x1a80000401:1122753 [257442.633734] Lustre: fir-OST0056: Export ffff8902f7fafc00 already connecting from 10.8.22.24@o2ib6 [257442.642702] Lustre: Skipped 55 previous similar messages [257443.976915] Pid: 112580, comm: ll_ost01_095 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [257443.987515] Call Trace: [257443.990068] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [257443.996471] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [257444.003541] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [257444.011339] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [257444.017769] [] kthread+0xd1/0xe0 [257444.022772] [] ret_from_fork_nospec_begin+0xe/0x21 [257444.029345] [] 0xffffffffffffffff [257444.034456] LustreError: dumping log to /tmp/lustre-log.1576158464.112580 [257450.340533] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082733 to 0x1a80000402:3082753 [257491.217646] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090441 to 0x1980000402:3090465 [257525.515222] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124970 to 0x1900000400:1124993 [257533.107354] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1118022 to 0x1800000402:1118049 [257786.933250] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126469 to 0x1980000400:1126497 [257804.823804] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071242 to 0x1800000400:3071265 [257831.264131] Lustre: fir-OST0058: Client 4380447f-ca0d-2b3f-165d-69d1c6bd4d88 (at 10.9.108.8@o2ib4) reconnecting [257831.274336] Lustre: Skipped 1436 previous similar messages [257857.144848] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049514 to 0x1a00000401:3049537 [257879.250192] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106149 to 0x1a00000402:1106209 [257889.131709] Lustre: 29548:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88ecd07af050 x1649447764852256/t0(0) o17->34b263e7-c235-6737-be01-1bc0ec67d622@10.9.117.33@o2ib4:529/0 lens 456/0 e 0 to 0 dl 1576158914 ref 2 fl New:/0/ffffffff rc 0/-1 [257889.160861] Lustre: 29548:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3017 previous similar messages [257914.913479] Lustre: fir-OST0056: Connection restored to fb9a2d5e-e9b3-4fb9-b988-9954fcfb0920 (at 10.8.0.66@o2ib6) [257914.923838] Lustre: Skipped 1418 previous similar messages [257935.506626] LNet: Service thread pid 67674 was inactive for 1201.22s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [257935.523741] LNet: Skipped 1 previous similar message [257935.528805] Pid: 67674, comm: ll_ost01_031 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [257935.539336] Call Trace: [257935.541907] [] wait_transaction_locked+0x85/0xd0 [jbd2] [257935.548903] [] add_transaction_credits+0x268/0x2f0 [jbd2] [257935.556086] [] start_this_handle+0x1a1/0x430 [jbd2] [257935.562735] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [257935.569484] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [257935.577009] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [257935.584105] [] dqget+0x3fa/0x450 [257935.589108] [] dquot_get_dqblk+0x14/0x1f0 [257935.594880] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [257935.602522] [] lquotactl_slv+0x27d/0x9d0 [lquota] [257935.609000] [] ofd_quotactl+0x13c/0x380 [ofd] [257935.615142] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [257935.622198] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [257935.630023] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [257935.636441] [] kthread+0xd1/0xe0 [257935.641438] [] ret_from_fork_nospec_begin+0xe/0x21 [257935.648016] [] 0xffffffffffffffff [257935.653132] LustreError: dumping log to /tmp/lustre-log.1576158955.67674 [257980.563536] Pid: 32762, comm: ll_ost03_098 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [257980.574053] Call Trace: [257980.576604] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [257980.582998] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [257980.590063] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [257980.597862] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [257980.604292] [] kthread+0xd1/0xe0 [257980.609292] [] ret_from_fork_nospec_begin+0xe/0x21 [257980.615871] [] 0xffffffffffffffff [257980.620977] LustreError: dumping log to /tmp/lustre-log.1576159000.32762 [258009.236102] Pid: 32728, comm: ll_ost01_096 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258009.246623] Call Trace: [258009.249195] [] wait_transaction_locked+0x85/0xd0 [jbd2] [258009.256192] [] add_transaction_credits+0x268/0x2f0 [jbd2] [258009.263377] [] start_this_handle+0x1a1/0x430 [jbd2] [258009.270023] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [258009.276771] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [258009.284314] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [258009.291397] [] dqget+0x3fa/0x450 [258009.296424] [] dquot_get_dqblk+0x14/0x1f0 [258009.302205] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [258009.309828] [] lquotactl_slv+0x27d/0x9d0 [lquota] [258009.316303] [] ofd_quotactl+0x13c/0x380 [ofd] [258009.322446] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [258009.329495] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258009.337310] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258009.343727] [] kthread+0xd1/0xe0 [258009.348742] [] ret_from_fork_nospec_begin+0xe/0x21 [258009.355306] [] 0xffffffffffffffff [258009.360418] LustreError: dumping log to /tmp/lustre-log.1576159029.32728 [258046.100835] Pid: 32889, comm: ll_ost01_099 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258046.111360] Call Trace: [258046.113914] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [258046.120303] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [258046.127370] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258046.135176] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258046.141605] [] kthread+0xd1/0xe0 [258046.146607] [] ret_from_fork_nospec_begin+0xe/0x21 [258046.153168] [] 0xffffffffffffffff [258046.158289] LustreError: dumping log to /tmp/lustre-log.1576159066.32889 [258051.264401] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082757 to 0x1a80000402:3082785 [258060.317872] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086503 to 0x1900000402:3086529 [258061.771983] Lustre: fir-OST0056: Export ffff8912f1e21000 already connecting from 10.8.25.17@o2ib6 [258061.780943] Lustre: Skipped 64 previous similar messages [258092.445632] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090473 to 0x1980000402:3090497 [258177.175943] Pid: 32512, comm: ll_ost03_093 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258177.186459] Call Trace: [258177.189029] [] wait_transaction_locked+0x85/0xd0 [jbd2] [258177.196035] [] add_transaction_credits+0x268/0x2f0 [jbd2] [258177.203236] [] start_this_handle+0x1a1/0x430 [jbd2] [258177.209884] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [258177.216632] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [258177.224158] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [258177.231253] [] dqget+0x3fa/0x450 [258177.236257] [] dquot_get_dqblk+0x14/0x1f0 [258177.242030] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [258177.249655] [] lquotactl_slv+0x27d/0x9d0 [lquota] [258177.256140] [] ofd_quotactl+0x13c/0x380 [ofd] [258177.262291] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [258177.269355] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258177.277171] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258177.283587] [] kthread+0xd1/0xe0 [258177.288601] [] ret_from_fork_nospec_begin+0xe/0x21 [258177.295166] [] 0xffffffffffffffff [258177.300279] LustreError: dumping log to /tmp/lustre-log.1576159197.32512 [258177.664094] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122756 to 0x1a80000401:1122785 [258255.000980] Pid: 32917, comm: ll_ost01_100 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258255.011502] Call Trace: [258255.014074] [] wait_transaction_locked+0x85/0xd0 [jbd2] [258255.021069] [] add_transaction_credits+0x268/0x2f0 [jbd2] [258255.028270] [] start_this_handle+0x1a1/0x430 [jbd2] [258255.034918] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [258255.041667] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [258255.049194] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [258255.056298] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [258255.063605] [] tgt_client_new+0x41b/0x610 [ptlrpc] [258255.070217] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [258255.076606] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [258255.083901] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [258255.090934] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258255.098763] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258255.105182] [] kthread+0xd1/0xe0 [258255.110195] [] ret_from_fork_nospec_begin+0xe/0x21 [258255.116776] [] 0xffffffffffffffff [258255.121892] LustreError: dumping log to /tmp/lustre-log.1576159275.32917 [258281.762177] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1124997 to 0x1900000400:1125025 [258289.642312] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1118054 to 0x1800000402:1118081 [258294.266426] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126500 to 0x1980000400:1126529 [258377.883423] Pid: 112541, comm: ll_ost01_088 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258377.894031] Call Trace: [258377.896603] [] wait_transaction_locked+0x85/0xd0 [jbd2] [258377.903599] [] add_transaction_credits+0x268/0x2f0 [jbd2] [258377.910787] [] start_this_handle+0x1a1/0x430 [jbd2] [258377.917442] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [258377.924190] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [258377.931714] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [258377.938810] [] dqget+0x3fa/0x450 [258377.943813] [] dquot_get_dqblk+0x14/0x1f0 [258377.949587] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [258377.957219] [] lquotactl_slv+0x27d/0x9d0 [lquota] [258377.963696] [] ofd_quotactl+0x13c/0x380 [ofd] [258377.969836] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [258377.976887] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258377.984702] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258377.991117] [] kthread+0xd1/0xe0 [258377.996132] [] ret_from_fork_nospec_begin+0xe/0x21 [258378.002696] [] 0xffffffffffffffff [258378.007809] LustreError: dumping log to /tmp/lustre-log.1576159398.112541 [258406.083759] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071269 to 0x1800000400:3071297 [258431.390313] Lustre: fir-OST0054: Client 1f1945b8-54db-69d1-6e1e-25c0ee92d4cb (at 10.9.103.47@o2ib4) reconnecting [258431.400577] Lustre: Skipped 1521 previous similar messages [258480.222125] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106217 to 0x1a00000402:1106241 [258489.213630] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8916cc359050 x1651840015315792/t0(0) o4->20841216-9d8b-7794-9459-ced18b617ae2@10.9.114.3@o2ib4:374/0 lens 488/0 e 0 to 0 dl 1576159514 ref 2 fl New:/2/ffffffff rc 0/-1 [258489.242352] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3262 previous similar messages [258515.944870] Lustre: fir-OST0054: Connection restored to cded0104-b7e2-3351-ef3d-a03eb9e0010a (at 10.9.108.66@o2ib4) [258515.955404] Lustre: Skipped 1497 previous similar messages [258613.223861] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049539 to 0x1a00000401:3049569 [258652.284429] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082790 to 0x1a80000402:3082817 [258668.598882] Lustre: fir-OST0056: Export ffff8912408a3000 already connecting from 10.9.112.4@o2ib4 [258668.607877] Lustre: Skipped 59 previous similar messages [258693.689478] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090502 to 0x1980000402:3090529 [258738.338551] LNet: Service thread pid 32514 was inactive for 1202.30s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [258738.355659] LNet: Skipped 6 previous similar messages [258738.360800] Pid: 32514, comm: ll_ost03_095 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258738.371340] Call Trace: [258738.373895] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [258738.380277] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [258738.387341] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258738.395142] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258738.401570] [] kthread+0xd1/0xe0 [258738.406573] [] ret_from_fork_nospec_begin+0xe/0x21 [258738.413141] [] 0xffffffffffffffff [258738.418251] LustreError: dumping log to /tmp/lustre-log.1576159758.32514 [258750.626794] Pid: 33085, comm: ll_ost03_100 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258750.637311] Call Trace: [258750.639861] [] wait_transaction_locked+0x85/0xd0 [jbd2] [258750.646859] [] add_transaction_credits+0x268/0x2f0 [jbd2] [258750.654041] [] start_this_handle+0x1a1/0x430 [jbd2] [258750.660684] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [258750.667425] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [258750.674951] [] osd_trans_start+0x20e/0x4e0 [osd_ldiskfs] [258750.682045] [] tgt_client_data_update+0x303/0x5e0 [ptlrpc] [258750.689338] [] tgt_client_new+0x41b/0x610 [ptlrpc] [258750.695936] [] ofd_obd_connect+0x3a3/0x4c0 [ofd] [258750.702328] [] target_handle_connect+0xecb/0x2b10 [ptlrpc] [258750.709613] [] tgt_request_handle+0x50a/0x1580 [ptlrpc] [258750.716654] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258750.724468] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258750.730886] [] kthread+0xd1/0xe0 [258750.735891] [] ret_from_fork_nospec_begin+0xe/0x21 [258750.742446] [] 0xffffffffffffffff [258750.747551] LustreError: dumping log to /tmp/lustre-log.1576159770.33085 [258803.875861] Pid: 67636, comm: ll_ost01_019 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [258803.886379] Call Trace: [258803.888940] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [258803.895338] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [258803.902402] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [258803.910202] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [258803.916631] [] kthread+0xd1/0xe0 [258803.921633] [] ret_from_fork_nospec_begin+0xe/0x21 [258803.928208] [] 0xffffffffffffffff [258803.933317] LustreError: dumping log to /tmp/lustre-log.1576159823.67636 [258816.427865] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086531 to 0x1900000402:3086561 [258895.674050] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126500 to 0x1980000400:1126561 [258933.935132] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122787 to 0x1a80000401:1122817 [259007.055663] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071300 to 0x1800000400:3071329 [259008.679904] Pid: 33111, comm: ll_ost01_103 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [259008.690420] Call Trace: [259008.692988] [] wait_transaction_locked+0x85/0xd0 [jbd2] [259008.699985] [] add_transaction_credits+0x268/0x2f0 [jbd2] [259008.707173] [] start_this_handle+0x1a1/0x430 [jbd2] [259008.713817] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [259008.720568] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [259008.728083] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [259008.735178] [] dqget+0x3fa/0x450 [259008.740183] [] dquot_get_dqblk+0x14/0x1f0 [259008.745969] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [259008.753581] [] lquotactl_slv+0x27d/0x9d0 [lquota] [259008.760070] [] ofd_quotactl+0x13c/0x380 [ofd] [259008.766200] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [259008.773259] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [259008.781066] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [259008.787507] [] kthread+0xd1/0xe0 [259008.792503] [] ret_from_fork_nospec_begin+0xe/0x21 [259008.799079] [] 0xffffffffffffffff [259008.804180] LustreError: dumping log to /tmp/lustre-log.1576160028.33111 [259031.699781] Lustre: fir-OST0058: Client ec6f0728-3f9f-b5fd-43eb-c07bc3da43b2 (at 10.9.117.12@o2ib4) reconnecting [259031.710050] Lustre: Skipped 1496 previous similar messages [259037.329181] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1125030 to 0x1900000400:1125057 [259045.817318] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1118084 to 0x1800000402:1118113 [259081.874075] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106243 to 0x1a00000402:1106273 [259089.621505] Lustre: 29439:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88eb1c5d1850 x1649447765080400/t0(0) o17->34b263e7-c235-6737-be01-1bc0ec67d622@10.9.117.33@o2ib4:219/0 lens 456/0 e 0 to 0 dl 1576160114 ref 2 fl New:/0/ffffffff rc 0/-1 [259089.650655] Lustre: 29439:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3267 previous similar messages [259116.335315] Lustre: fir-OST005e: Connection restored to 4e97c29c-283b-4253-402d-db9d46beedd7 (at 10.9.101.39@o2ib4) [259116.345834] Lustre: Skipped 1509 previous similar messages [259139.754503] Pid: 32958, comm: ll_ost01_101 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [259139.765022] Call Trace: [259139.767610] [] wait_transaction_locked+0x85/0xd0 [jbd2] [259139.774607] [] add_transaction_credits+0x268/0x2f0 [jbd2] [259139.781790] [] start_this_handle+0x1a1/0x430 [jbd2] [259139.788453] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [259139.795205] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [259139.802730] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [259139.809825] [] dqget+0x3fa/0x450 [259139.814830] [] dquot_get_dqblk+0x14/0x1f0 [259139.820609] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [259139.828227] [] lquotactl_slv+0x27d/0x9d0 [lquota] [259139.834702] [] ofd_quotactl+0x13c/0x380 [ofd] [259139.840843] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [259139.847894] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [259139.855708] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [259139.862138] [] kthread+0xd1/0xe0 [259139.867158] [] ret_from_fork_nospec_begin+0xe/0x21 [259139.873719] [] 0xffffffffffffffff [259139.878833] LustreError: dumping log to /tmp/lustre-log.1576160159.32958 [259253.216257] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082823 to 0x1a80000402:3082849 [259279.028246] Lustre: fir-OST0056: Export ffff88fc8fff5800 already connecting from 10.8.22.17@o2ib6 [259279.037210] Lustre: Skipped 47 previous similar messages [259295.125385] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090531 to 0x1980000402:3090561 [259340.462451] LNet: Service thread pid 32897 was inactive for 1202.40s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [259340.479552] LNet: Skipped 4 previous similar messages [259340.484694] Pid: 32897, comm: ll_ost03_099 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [259340.495231] Call Trace: [259340.497780] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [259340.504172] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [259340.511235] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [259340.519038] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [259340.525465] [] kthread+0xd1/0xe0 [259340.530470] [] ret_from_fork_nospec_begin+0xe/0x21 [259340.537044] [] 0xffffffffffffffff [259340.542155] LustreError: dumping log to /tmp/lustre-log.1576160360.32897 [259369.142825] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049578 to 0x1a00000401:3049601 [259496.581907] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126500 to 0x1980000400:1126593 [259557.554758] Pid: 32877, comm: ll_ost01_098 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [259557.565280] Call Trace: [259557.567832] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [259557.574234] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [259557.581312] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [259557.589114] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [259557.595541] [] kthread+0xd1/0xe0 [259557.600544] [] ret_from_fork_nospec_begin+0xe/0x21 [259557.607121] [] 0xffffffffffffffff [259557.612231] LustreError: dumping log to /tmp/lustre-log.1576160577.32877 [259572.106814] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086572 to 0x1900000402:3086593 [259608.987557] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071333 to 0x1800000400:3071361 [259632.698181] Lustre: fir-OST0058: Client ec6f0728-3f9f-b5fd-43eb-c07bc3da43b2 (at 10.9.117.12@o2ib4) reconnecting [259632.708437] Lustre: Skipped 1492 previous similar messages [259682.629933] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106276 to 0x1a00000402:1106305 [259689.667411] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff891091dac850 x1651792518523520/t0(0) o4->f01080a0-cc7c-da9c-568d-51eacd84f956@10.9.114.8@o2ib4:64/0 lens 7808/0 e 0 to 0 dl 1576160714 ref 2 fl New:H/2/ffffffff rc 0/-1 [259689.696243] Lustre: 27252:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3263 previous similar messages [259690.318084] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122819 to 0x1a80000401:1122849 [259718.012255] Lustre: fir-OST0058: Connection restored to c2d505e5-30ab-23ac-7017-17a83f00b35d (at 10.9.102.49@o2ib4) [259718.022775] Lustre: Skipped 1509 previous similar messages [259793.264111] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1125063 to 0x1900000400:1125089 [259801.168248] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1118117 to 0x1800000402:1118145 [259854.180093] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082852 to 0x1a80000402:3082881 [259876.025064] Pid: 32985, comm: ll_ost01_102 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [259876.035587] Call Trace: [259876.038165] [] wait_transaction_locked+0x85/0xd0 [jbd2] [259876.045159] [] add_transaction_credits+0x268/0x2f0 [jbd2] [259876.052347] [] start_this_handle+0x1a1/0x430 [jbd2] [259876.058992] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [259876.065741] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [259876.073268] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [259876.080363] [] dqget+0x3fa/0x450 [259876.085367] [] dquot_get_dqblk+0x14/0x1f0 [259876.091152] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [259876.098766] [] lquotactl_slv+0x27d/0x9d0 [lquota] [259876.105254] [] ofd_quotactl+0x13c/0x380 [ofd] [259876.111385] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [259876.118444] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [259876.126248] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [259876.132676] [] kthread+0xd1/0xe0 [259876.137680] [] ret_from_fork_nospec_begin+0xe/0x21 [259876.144266] [] 0xffffffffffffffff [259876.149364] LustreError: dumping log to /tmp/lustre-log.1576160896.32985 [259890.883604] Lustre: fir-OST0056: Export ffff88f4f5726000 already connecting from 10.8.7.5@o2ib6 [259890.892397] Lustre: Skipped 59 previous similar messages [259896.361272] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090568 to 0x1980000402:3090593 [260097.361839] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126598 to 0x1980000400:1126625 [260098.237469] LNet: Service thread pid 33368 was inactive for 1203.15s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [260098.254576] LNet: Skipped 2 previous similar messages [260098.259727] Pid: 33368, comm: ll_ost03_103 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [260098.270265] Call Trace: [260098.272821] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [260098.279224] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [260098.286285] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [260098.294096] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [260098.300526] [] kthread+0xd1/0xe0 [260098.305543] [] ret_from_fork_nospec_begin+0xe/0x21 [260098.312120] [] 0xffffffffffffffff [260098.317240] LustreError: dumping log to /tmp/lustre-log.1576161118.33368 [260125.325839] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049604 to 0x1a00000401:3049633 [260209.191454] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071364 to 0x1800000400:3071393 [260232.885006] Lustre: fir-OST005c: Client d22f0531-865c-6a0a-5b19-ab2316a51d3c (at 10.9.106.13@o2ib4) reconnecting [260232.895271] Lustre: Skipped 1476 previous similar messages [260283.601864] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106307 to 0x1a00000402:1106337 [260289.749270] Lustre: 29439:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff88e80a35b050 x1649447765303904/t0(0) o17->34b263e7-c235-6737-be01-1bc0ec67d622@10.9.117.33@o2ib4:664/0 lens 456/0 e 0 to 0 dl 1576161314 ref 2 fl New:/0/ffffffff rc 0/-1 [260289.778430] Lustre: 29439:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3304 previous similar messages [260315.329769] Pid: 33363, comm: ll_ost01_106 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [260315.340284] Call Trace: [260315.342839] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [260315.349237] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [260315.356301] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [260315.364101] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [260315.370531] [] kthread+0xd1/0xe0 [260315.375534] [] ret_from_fork_nospec_begin+0xe/0x21 [260315.382111] [] 0xffffffffffffffff [260315.387221] LustreError: dumping log to /tmp/lustre-log.1576161335.33363 [260319.146381] Lustre: fir-OST0054: Connection restored to ef2d362e-15f9-19b6-7dd8-07207d8adffe (at 10.9.103.50@o2ib4) [260319.156922] Lustre: Skipped 1509 previous similar messages [260328.009799] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086600 to 0x1900000402:3086625 [260446.157100] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000401:1122852 to 0x1a80000401:1122881 [260455.460321] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082884 to 0x1a80000402:3082913 [260496.124239] Lustre: fir-OST0056: Export ffff8912f1e21000 already connecting from 10.8.25.17@o2ib6 [260496.133213] Lustre: Skipped 67 previous similar messages [260497.501195] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090596 to 0x1980000402:3090625 [260549.823153] Lustre: fir-OST0058: deleting orphan objects from 0x1900000400:1125091 to 0x1900000400:1125121 [260557.151283] Lustre: fir-OST0054: deleting orphan objects from 0x1800000402:1118148 to 0x1800000402:1118177 [260698.525861] Lustre: fir-OST005a: deleting orphan objects from 0x1980000400:1126630 to 0x1980000400:1126657 [260811.091393] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071401 to 0x1800000400:3071425 [260833.153805] Lustre: fir-OST0054: Client 553c79c8-d0b4-823c-4974-01da71803ed2 (at 10.9.116.9@o2ib4) reconnecting [260833.163988] Lustre: Skipped 1501 previous similar messages [260856.012480] LNet: Service thread pid 32280 was inactive for 1203.89s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [260856.029593] LNet: Skipped 1 previous similar message [260856.034655] Pid: 32280, comm: ll_ost03_091 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [260856.045195] Call Trace: [260856.047747] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [260856.054141] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [260856.061204] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [260856.069021] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [260856.075450] [] kthread+0xd1/0xe0 [260856.080455] [] ret_from_fork_nospec_begin+0xe/0x21 [260856.087032] [] 0xffffffffffffffff [260856.092140] LustreError: dumping log to /tmp/lustre-log.1576161876.32280 [260881.084793] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049642 to 0x1a00000401:3049665 [260884.893786] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000402:1106341 to 0x1a00000402:1106369 [260890.131167] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8900d10f3050 x1649045828695760/t0(0) o4->341fa20c-48d3-b0f6-d4f7-bb2f25ae43ef@10.9.105.12@o2ib4:510/0 lens 11800/0 e 0 to 0 dl 1576161915 ref 2 fl New:H/2/ffffffff rc 0/-1 [260890.160256] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3119 previous similar messages [260920.088029] Lustre: fir-OST0054: Connection restored to cded0104-b7e2-3351-ef3d-a03eb9e0010a (at 10.9.108.66@o2ib4) [260920.098547] Lustre: Skipped 1468 previous similar messages [261056.163928] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082922 to 0x1a80000402:3082945 [261073.104777] Pid: 33603, comm: ll_ost01_108 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [261073.115296] Call Trace: [261073.117848] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [261073.124247] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [261073.131312] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [261073.139113] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [261073.145555] [] kthread+0xd1/0xe0 [261073.150561] [] ret_from_fork_nospec_begin+0xe/0x21 [261073.157137] [] 0xffffffffffffffff [261073.162255] LustreError: dumping log to /tmp/lustre-log.1576162093.33603 [261084.224805] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086630 to 0x1900000402:3086657 [261098.577113] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090637 to 0x1980000402:3090657 [261101.576577] Lustre: fir-OST0056: Export ffff8912f1e21000 already connecting from 10.8.25.17@o2ib6 [261101.585538] Lustre: Skipped 66 previous similar messages [261412.447311] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071433 to 0x1800000400:3071457 [261433.423105] Lustre: fir-OST0054: Client 6c8c6de8-60d9-5ab1-5f74-dc1e64ab5212 (at 10.8.30.9@o2ib6) reconnecting [261433.433210] Lustre: Skipped 1547 previous similar messages [261449.944241] Pid: 32729, comm: ll_ost01_097 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [261449.954786] Call Trace: [261449.957371] [] wait_transaction_locked+0x85/0xd0 [jbd2] [261449.964375] [] add_transaction_credits+0x268/0x2f0 [jbd2] [261449.971584] [] start_this_handle+0x1a1/0x430 [jbd2] [261449.978267] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [261449.985016] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [261449.992584] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [261449.999681] [] dqget+0x3fa/0x450 [261450.004684] [] dquot_get_dqblk+0x14/0x1f0 [261450.010465] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [261450.018082] [] lquotactl_slv+0x27d/0x9d0 [lquota] [261450.024557] [] ofd_quotactl+0x13c/0x380 [ofd] [261450.030697] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [261450.037764] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [261450.045565] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [261450.051978] [] kthread+0xd1/0xe0 [261450.056980] [] ret_from_fork_nospec_begin+0xe/0x21 [261450.063556] [] 0xffffffffffffffff [261450.068656] LustreError: dumping log to /tmp/lustre-log.1576162470.32729 [261458.136416] LNet: Service thread pid 32245 was inactive for 1203.99s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [261458.153524] LNet: Skipped 2 previous similar messages [261458.158670] Pid: 32245, comm: ll_ost03_089 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [261458.169222] Call Trace: [261458.171773] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [261458.178176] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [261458.185227] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [261458.193029] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [261458.199460] [] kthread+0xd1/0xe0 [261458.204461] [] ret_from_fork_nospec_begin+0xe/0x21 [261458.211038] [] 0xffffffffffffffff [261458.216146] LustreError: dumping log to /tmp/lustre-log.1576162478.32245 [261462.232495] Pid: 33852, comm: ll_ost01_110 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [261462.243016] Call Trace: [261462.245574] [] wait_transaction_locked+0x85/0xd0 [jbd2] [261462.252570] [] add_transaction_credits+0x268/0x2f0 [jbd2] [261462.259753] [] start_this_handle+0x1a1/0x430 [jbd2] [261462.266402] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [261462.273153] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [261462.280667] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [261462.287764] [] dqget+0x3fa/0x450 [261462.292766] [] dquot_get_dqblk+0x14/0x1f0 [261462.298552] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [261462.306166] [] lquotactl_slv+0x27d/0x9d0 [lquota] [261462.312671] [] ofd_quotactl+0x13c/0x380 [ofd] [261462.318803] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [261462.325847] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [261462.333650] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [261462.340078] [] kthread+0xd1/0xe0 [261462.345081] [] ret_from_fork_nospec_begin+0xe/0x21 [261462.351656] [] 0xffffffffffffffff [261462.356748] LustreError: dumping log to /tmp/lustre-log.1576162482.33852 [261490.467054] Lustre: 31346:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/-150), not sending early reply req@ffff890a40f29050 x1649447765532320/t0(0) o17->34b263e7-c235-6737-be01-1bc0ec67d622@10.9.117.33@o2ib4:355/0 lens 456/0 e 0 to 0 dl 1576162515 ref 2 fl New:/0/ffffffff rc 0/-1 [261490.496211] Lustre: 31346:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3431 previous similar messages [261520.582993] Lustre: fir-OST005a: Connection restored to 8af5a0e2-950b-b38c-1f8d-81e50d22f3d1 (at 10.9.102.4@o2ib4) [261520.593438] Lustre: Skipped 1548 previous similar messages [261613.787487] Pid: 112560, comm: ll_ost01_092 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [261613.798092] Call Trace: [261613.800666] [] wait_transaction_locked+0x85/0xd0 [jbd2] [261613.807658] [] add_transaction_credits+0x268/0x2f0 [jbd2] [261613.814844] [] start_this_handle+0x1a1/0x430 [jbd2] [261613.821509] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [261613.828258] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [261613.835775] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [261613.842868] [] dqget+0x3fa/0x450 [261613.847872] [] dquot_get_dqblk+0x14/0x1f0 [261613.853647] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [261613.861265] [] lquotactl_slv+0x27d/0x9d0 [lquota] [261613.867740] [] ofd_quotactl+0x13c/0x380 [ofd] [261613.873879] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [261613.880930] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [261613.888758] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [261613.895170] [] kthread+0xd1/0xe0 [261613.900170] [] ret_from_fork_nospec_begin+0xe/0x21 [261613.906747] [] 0xffffffffffffffff [261613.911847] LustreError: dumping log to /tmp/lustre-log.1576162633.112560 [261621.979635] Pid: 33700, comm: ll_ost01_109 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [261621.990149] Call Trace: [261621.992704] [] wait_transaction_locked+0x85/0xd0 [jbd2] [261621.999699] [] add_transaction_credits+0x268/0x2f0 [jbd2] [261622.006884] [] start_this_handle+0x1a1/0x430 [jbd2] [261622.013547] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [261622.020288] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [261622.027806] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [261622.034901] [] dqget+0x3fa/0x450 [261622.039904] [] dquot_get_dqblk+0x14/0x1f0 [261622.045691] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [261622.053303] [] lquotactl_slv+0x27d/0x9d0 [lquota] [261622.059792] [] ofd_quotactl+0x13c/0x380 [ofd] [261622.065921] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [261622.072976] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [261622.080793] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [261622.087221] [] kthread+0xd1/0xe0 [261622.092226] [] ret_from_fork_nospec_begin+0xe/0x21 [261622.098792] [] 0xffffffffffffffff [261622.103884] LustreError: dumping log to /tmp/lustre-log.1576162642.33700 [261637.219719] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049675 to 0x1a00000401:3049697 [261657.343817] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082953 to 0x1a80000402:3082977 [261700.581002] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090663 to 0x1980000402:3090689 [261706.915850] Lustre: fir-OST0056: Export ffff890661bf0c00 already connecting from 10.9.113.12@o2ib4 [261706.924895] Lustre: Skipped 18 previous similar messages [261830.879767] Pid: 33217, comm: ll_ost01_105 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [261830.890288] Call Trace: [261830.892842] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [261830.899240] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [261830.906303] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [261830.914103] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [261830.920531] [] kthread+0xd1/0xe0 [261830.925534] [] ret_from_fork_nospec_begin+0xe/0x21 [261830.932112] [] 0xffffffffffffffff [261830.937231] LustreError: dumping log to /tmp/lustre-log.1576162850.33217 [261840.143697] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086664 to 0x1900000402:3086689 [262013.107106] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071461 to 0x1800000400:3071489 [262033.981719] Lustre: fir-OST0058: Client f0c93108-cf16-4103-60a6-55732a55eba6 (at 10.8.30.11@o2ib6) reconnecting [262033.991901] Lustre: Skipped 1555 previous similar messages [262060.260308] LNet: Service thread pid 33123 was inactive for 1204.09s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [262060.277415] LNet: Skipped 4 previous similar messages [262060.282565] Pid: 33123, comm: ll_ost03_102 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [262060.293101] Call Trace: [262060.295660] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [262060.302060] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [262060.309135] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [262060.316942] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [262060.323370] [] kthread+0xd1/0xe0 [262060.328373] [] ret_from_fork_nospec_begin+0xe/0x21 [262060.334949] [] 0xffffffffffffffff [262060.340058] LustreError: dumping log to /tmp/lustre-log.1576163080.33123 [262090.592897] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88fb95758850 x1648391582529008/t0(0) o10->bb0489d8-99d9-bd6e-c7e4-6c2155fd6f79@10.8.23.36@o2ib6:200/0 lens 440/0 e 0 to 0 dl 1576163115 ref 2 fl New:/2/ffffffff rc 0/-1 [262090.621704] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3487 previous similar messages [262120.784650] Lustre: fir-OST0054: Connection restored to 41d92224-621a-6fa8-8ee5-70a524b04ee8 (at 10.9.117.6@o2ib4) [262120.795082] Lustre: Skipped 1554 previous similar messages [262179.046609] Pid: 67823, comm: ll_ost01_058 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [262179.057128] Call Trace: [262179.059707] [] wait_transaction_locked+0x85/0xd0 [jbd2] [262179.066704] [] add_transaction_credits+0x268/0x2f0 [jbd2] [262179.073890] [] start_this_handle+0x1a1/0x430 [jbd2] [262179.080537] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [262179.087286] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [262179.094812] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [262179.101915] [] dqget+0x3fa/0x450 [262179.106933] [] dquot_get_dqblk+0x14/0x1f0 [262179.112721] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [262179.120335] [] lquotactl_slv+0x27d/0x9d0 [lquota] [262179.126823] [] ofd_quotactl+0x13c/0x380 [ofd] [262179.132952] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [262179.140014] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [262179.147818] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [262179.154255] [] kthread+0xd1/0xe0 [262179.159257] [] ret_from_fork_nospec_begin+0xe/0x21 [262179.165832] [] 0xffffffffffffffff [262179.170950] LustreError: dumping log to /tmp/lustre-log.1576163199.67823 [262258.315609] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3082989 to 0x1a80000402:3083009 [262301.904801] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090696 to 0x1980000402:3090721 [262350.719622] Lustre: fir-OST0056: Export ffff8912408a3000 already connecting from 10.9.112.4@o2ib4 [262350.728586] Lustre: Skipped 11 previous similar messages [262387.946751] Pid: 33791, comm: ll_ost03_105 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [262387.957284] Call Trace: [262387.959862] [] wait_transaction_locked+0x85/0xd0 [jbd2] [262387.966868] [] add_transaction_credits+0x268/0x2f0 [jbd2] [262387.974054] [] start_this_handle+0x1a1/0x430 [jbd2] [262387.980707] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [262387.987457] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [262387.994984] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [262388.002085] [] dqget+0x3fa/0x450 [262388.007106] [] dquot_get_dqblk+0x14/0x1f0 [262388.012889] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [262388.020533] [] lquotactl_slv+0x27d/0x9d0 [lquota] [262388.027015] [] ofd_quotactl+0x13c/0x380 [ofd] [262388.033157] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [262388.040205] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [262388.048030] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [262388.054445] [] kthread+0xd1/0xe0 [262388.059446] [] ret_from_fork_nospec_begin+0xe/0x21 [262388.066023] [] 0xffffffffffffffff [262388.071122] LustreError: dumping log to /tmp/lustre-log.1576163408.33791 [262393.490606] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049708 to 0x1a00000401:3049729 [262400.234964] Pid: 33161, comm: ll_ost01_104 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [262400.245477] Call Trace: [262400.248044] [] wait_transaction_locked+0x85/0xd0 [jbd2] [262400.255035] [] add_transaction_credits+0x268/0x2f0 [jbd2] [262400.262233] [] start_this_handle+0x1a1/0x430 [jbd2] [262400.268876] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [262400.275617] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [262400.283133] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [262400.290230] [] dqget+0x3fa/0x450 [262400.295232] [] dquot_get_dqblk+0x14/0x1f0 [262400.301004] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [262400.308630] [] lquotactl_slv+0x27d/0x9d0 [lquota] [262400.315105] [] ofd_quotactl+0x13c/0x380 [ofd] [262400.321248] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [262400.328289] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [262400.336103] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [262400.342518] [] kthread+0xd1/0xe0 [262400.347538] [] ret_from_fork_nospec_begin+0xe/0x21 [262400.354088] [] 0xffffffffffffffff [262400.359195] LustreError: dumping log to /tmp/lustre-log.1576163420.33161 [262596.350622] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086697 to 0x1900000402:3086721 [262614.846989] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071495 to 0x1800000400:3071521 [262635.174559] Lustre: fir-OST005a: Client 4ae1953c-c5de-651a-c222-99cb1d82d019 (at 10.8.7.6@o2ib6) reconnecting [262635.184565] Lustre: Skipped 1599 previous similar messages [262662.384148] LNet: Service thread pid 34019 was inactive for 1204.19s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [262662.401253] LNet: Skipped 3 previous similar messages [262662.406404] Pid: 34019, comm: ll_ost03_107 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [262662.416940] Call Trace: [262662.419498] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [262662.425897] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [262662.432961] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [262662.440762] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [262662.447191] [] kthread+0xd1/0xe0 [262662.452195] [] ret_from_fork_nospec_begin+0xe/0x21 [262662.458770] [] 0xffffffffffffffff [262662.463877] LustreError: dumping log to /tmp/lustre-log.1576163682.34019 [262690.816709] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88fd4d87b850 x1649047542081856/t0(0) o4->eeedb4d1-a88f-91d4-b517-9794013a9735@10.9.101.23@o2ib4:45/0 lens 8632/0 e 0 to 0 dl 1576163715 ref 2 fl New:/2/ffffffff rc 0/-1 [262690.845522] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3600 previous similar messages [262720.974042] Lustre: fir-OST0058: Connection restored to 0c302cf4-1147-d945-dfa2-e9bc796b3175 (at 10.9.101.32@o2ib4) [262720.984568] Lustre: Skipped 1631 previous similar messages [262859.687585] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3083011 to 0x1a80000402:3083041 [262902.636753] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090728 to 0x1980000402:3090753 [262962.699475] Lustre: fir-OST0056: Export ffff8922dee96000 already connecting from 10.9.116.8@o2ib4 [262962.708441] Lustre: Skipped 10 previous similar messages [263149.665690] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049733 to 0x1a00000401:3049761 [263215.322973] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071525 to 0x1800000400:3071553 [263236.718264] Lustre: fir-OST005e: Client af0b82f1-b12f-53c0-f83a-3232c3516fc9 (at 10.9.117.18@o2ib4) reconnecting [263236.728524] Lustre: Skipped 1588 previous similar messages [263260.412075] Pid: 33538, comm: ll_ost03_104 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [263260.422598] Call Trace: [263260.425149] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [263260.431552] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [263260.438611] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [263260.446412] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [263260.452844] [] kthread+0xd1/0xe0 [263260.457846] [] ret_from_fork_nospec_begin+0xe/0x21 [263260.464420] [] 0xffffffffffffffff [263260.469530] LustreError: dumping log to /tmp/lustre-log.1576164280.33538 [263291.084707] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff88ffd6c21850 x1649047542536512/t0(0) o4->eeedb4d1-a88f-91d4-b517-9794013a9735@10.9.101.23@o2ib4:646/0 lens 488/0 e 0 to 0 dl 1576164316 ref 2 fl New:/2/ffffffff rc 0/-1 [263291.113510] Lustre: 26941:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3783 previous similar messages [263321.242984] Lustre: fir-OST0058: Connection restored to 0c302cf4-1147-d945-dfa2-e9bc796b3175 (at 10.9.101.32@o2ib4) [263321.253504] Lustre: Skipped 1638 previous similar messages [263352.373715] Lustre: fir-OST0058: deleting orphan objects from 0x1900000402:3086729 to 0x1900000402:3086753 [263460.315550] Lustre: fir-OST005e: deleting orphan objects from 0x1a80000402:3083046 to 0x1a80000402:3083073 [263503.608742] Lustre: fir-OST005a: deleting orphan objects from 0x1980000402:3090755 to 0x1980000402:3090785 [263569.480838] Lustre: fir-OST0056: Export ffff890661bf0c00 already connecting from 10.9.113.12@o2ib4 [263569.489891] Lustre: Skipped 11 previous similar messages [263816.334925] Lustre: fir-OST0054: deleting orphan objects from 0x1800000400:3071556 to 0x1800000400:3071585 [263837.823815] Lustre: fir-OST0054: Client 6c8c6de8-60d9-5ab1-5f74-dc1e64ab5212 (at 10.8.30.9@o2ib6) reconnecting [263837.833909] Lustre: Skipped 1651 previous similar messages [263891.110578] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) @@@ Couldn't add any time (5/5), not sending early reply req@ffff8914fc6b7850 x1649315416807712/t0(0) o4->25827f45-931d-eb26-8907-81e567064f86@10.9.106.11@o2ib4:491/0 lens 488/0 e 1 to 0 dl 1576164916 ref 2 fl New:/0/ffffffff rc 0/-1 [263891.139386] Lustre: 27223:0:(service.c:1372:ptlrpc_at_send_early_reply()) Skipped 3666 previous similar messages [263905.168644] Lustre: fir-OST005c: deleting orphan objects from 0x1a00000401:3049769 to 0x1a00000401:3049793 [263921.465131] Lustre: fir-OST005c: Connection restored to 592dade7-9f13-e25c-8636-229aef409ea9 (at 10.9.108.38@o2ib4) [263921.475654] Lustre: Skipped 1598 previous similar messages [264018.187097] LNet: Service thread pid 33911 was inactive for 1200.94s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [264018.204204] LNet: Skipped 1 previous similar message [264018.209269] Pid: 33911, comm: ll_ost03_106 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [264018.219804] Call Trace: [264018.222354] [] ofd_create_hdl+0xcb3/0x20e0 [ofd] [264018.228754] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [264018.235831] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [264018.243637] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [264018.250067] [] kthread+0xd1/0xe0 [264018.255066] [] ret_from_fork_nospec_begin+0xe/0x21 [264018.261642] [] 0xffffffffffffffff [264018.266754] LustreError: dumping log to /tmp/lustre-log.1576165038.33911 [264126.221225] LNet: Service thread pid 34617 was inactive for 200.36s. The thread might be hung, or it might only be slow and will resume later. Dumping the stack trace for debugging purposes: [264126.238268] Pid: 34617, comm: ll_ost03_111 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [264126.248785] Call Trace: [264126.251360] [] wait_transaction_locked+0x85/0xd0 [jbd2] [264126.258376] [] add_transaction_credits+0x268/0x2f0 [jbd2] [264126.265544] [] start_this_handle+0x1a1/0x430 [jbd2] [264126.272211] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [264126.278951] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [264126.286491] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [264126.293573] [] dqget+0x3fa/0x450 [264126.298588] [] dquot_get_dqblk+0x14/0x1f0 [264126.304371] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [264126.311997] [] lquotactl_slv+0x27d/0x9d0 [lquota] [264126.318488] [] ofd_quotactl+0x13c/0x380 [ofd] [264126.324630] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [264126.331681] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [264126.339496] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [264126.345912] [] kthread+0xd1/0xe0 [264126.350926] [] ret_from_fork_nospec_begin+0xe/0x21 [264126.357490] [] 0xffffffffffffffff [264126.362604] LustreError: dumping log to /tmp/lustre-log.1576165146.34617 [264136.461417] Pid: 34429, comm: ll_ost03_110 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 SMP Thu Nov 7 15:26:16 PST 2019 [264136.471940] Call Trace: [264136.474507] [] wait_transaction_locked+0x85/0xd0 [jbd2] [264136.481514] [] add_transaction_credits+0x268/0x2f0 [jbd2] [264136.488699] [] start_this_handle+0x1a1/0x430 [jbd2] [264136.495351] [] jbd2__journal_start+0xf3/0x1f0 [jbd2] [264136.502103] [] __ldiskfs_journal_start_sb+0x69/0xe0 [ldiskfs] [264136.509629] [] ldiskfs_acquire_dquot+0x53/0xb0 [ldiskfs] [264136.516725] [] dqget+0x3fa/0x450 [264136.521728] [] dquot_get_dqblk+0x14/0x1f0 [264136.527514] [] osd_acct_index_lookup+0x235/0x480 [osd_ldiskfs] [264136.535127] [] lquotactl_slv+0x27d/0x9d0 [lquota] [264136.541618] [] ofd_quotactl+0x13c/0x380 [ofd] [264136.547746] [] tgt_request_handle+0xaea/0x1580 [ptlrpc] [264136.554806] [] ptlrpc_server_handle_request+0x24b/0xab0 [ptlrpc] [264136.562610] [] ptlrpc_main+0xb2c/0x1460 [ptlrpc] [264136.569039] [] kthread+0xd1/0xe0 [264136.574042] [] ret_from_fork_nospec_begin+0xe/0x21 [264136.580632] [] 0xffffffffffffffff [264136.585737] LustreError: dumping log to /tmp/lustre-log.1576165156.34429 [264349.590511] SysRq : Trigger a crash [264349.594172] BUG: unable to handle kernel NULL pointer dereference at (null) [264349.602142] IP: [] sysrq_handle_crash+0x16/0x20 [264349.608357] PGD 3344b32067 PUD 31426f2067 PMD 0 [264349.613141] Oops: 0002 [#1] SMP [264349.616515] Modules linked in: osp(OE) ofd(OE) lfsck(OE) ost(OE) mgc(OE) osd_ldiskfs(OE) lquota(OE) raid456 async_raid6_recov async_memcpy async_pq raid6_pq libcrc32c async_xor xor async_tx ldiskfs(OE) lmv(OE) osc(OE) lov(OE) fid(OE) fld(OE) ko2iblnd(OE) ptlrpc(OE) obdclass(OE) lnet(OE) libcfs(OE) rpcsec_gss_krb5 auth_rpcgss nfsv4 dns_resolver nfs lockd grace fscache rdma_ucm(OE) ib_ucm(OE) rdma_cm(OE) iw_cm(OE) ib_ipoib(OE) ib_cm(OE) ib_umad(OE) mlx4_en(OE) mlx4_ib(OE) mlx4_core(OE) dell_rbu sunrpc vfat fat dcdbas amd64_edac_mod edac_mce_amd kvm_amd kvm irqbypass crc32_pclmul ghash_clmulni_intel aesni_intel lrw gf128mul glue_helper ablk_helper cryptd pcspkr dm_service_time ses enclosure dm_multipath dm_mod ipmi_si ipmi_devintf sg ccp ipmi_msghandler acpi_power_meter k10temp i2c_piix4 ip_tables ext4 [264349.689140] mbcache jbd2 sd_mod crc_t10dif crct10dif_generic mlx5_ib(OE) ib_uverbs(OE) ib_core(OE) i2c_algo_bit drm_kms_helper mlx5_core(OE) syscopyarea sysfillrect sysimgblt mlxfw(OE) fb_sys_fops ahci devlink ttm libahci mpt3sas(OE) mlx_compat(OE) crct10dif_pclmul tg3 raid_class crct10dif_common drm libata crc32c_intel ptp megaraid_sas scsi_transport_sas drm_panel_orientation_quirks pps_core [last unloaded: mdc] [264349.725354] CPU: 21 PID: 35083 Comm: bash Kdump: loaded Tainted: G OE ------------ 3.10.0-957.27.2.el7_lustre.pl2.x86_64 #1 [264349.737684] Hardware name: Dell Inc. PowerEdge R6415/07YXFK, BIOS 1.10.6 08/15/2019 [264349.745424] task: ffff892252d72080 ti: ffff890e9b504000 task.ti: ffff890e9b504000 [264349.752991] RIP: 0010:[] [] sysrq_handle_crash+0x16/0x20 [264349.761623] RSP: 0018:ffff890e9b507e58 EFLAGS: 00010246 [264349.767020] RAX: ffffffffa7e64430 RBX: ffffffffa86e4f80 RCX: 0000000000000000 [264349.774240] RDX: 0000000000000000 RSI: ffff8902ff753898 RDI: 0000000000000063 [264349.781461] RBP: ffff890e9b507e58 R08: ffffffffa89e38bc R09: ffffffffa8a0e7d7 [264349.788681] R10: 00000000000032da R11: 00000000000032d9 R12: 0000000000000063 [264349.795900] R13: 0000000000000000 R14: 0000000000000007 R15: 0000000000000000 [264349.803120] FS: 00007fe662f8d740(0000) GS:ffff8902ff740000(0000) knlGS:0000000000000000 [264349.811291] CS: 0010 DS: 0000 ES: 0000 CR0: 0000000080050033 [264349.817124] CR2: 0000000000000000 CR3: 0000003d0c2a0000 CR4: 00000000003407e0 [264349.824344] Call Trace: [264349.826888] [] __handle_sysrq+0x10d/0x170 [264349.832630] [] write_sysrq_trigger+0x28/0x40 [264349.838640] [] proc_reg_write+0x40/0x80 [264349.844210] [] vfs_write+0xc0/0x1f0 [264349.849436] [] SyS_write+0x7f/0xf0 [264349.854578] [] system_call_fastpath+0x22/0x27 [264349.860667] Code: eb 9b 45 01 f4 45 39 65 34 75 e5 4c 89 ef e8 e2 f7 ff ff eb db 66 66 66 66 90 55 48 89 e5 c7 05 91 31 7e 00 01 00 00 00 0f ae f8 04 25 00 00 00 00 01 5d c3 66 66 66 66 90 55 31 c0 c7 05 0e [264349.881276] RIP [] sysrq_handle_crash+0x16/0x20 [264349.887568] RSP [264349.891147] CR2: 0000000000000000