<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:03:43 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-6841] replay-single test_30: multiop 20786 failed</title>
                <link>https://jira.whamcloud.com/browse/LU-6841</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for John Hammond &amp;lt;john.hammond@intel.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/3f0a5c78-27ba-11e5-b37a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/3f0a5c78-27ba-11e5-b37a-5254006e85c2&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The sub-test test_30 failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;multiop 20786 failed
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Please provide additional information about the failure here.&lt;/p&gt;

&lt;p&gt;Info required for matching: replay-single 30&lt;/p&gt;</description>
                <environment></environment>
        <key id="31062">LU-6841</key>
            <summary>replay-single test_30: multiop 20786 failed</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="2" iconUrl="https://jira.whamcloud.com/images/icons/priorities/critical.svg">Critical</priority>
                        <status id="6" iconUrl="https://jira.whamcloud.com/images/icons/statuses/closed.png" description="The issue is considered finished, the resolution is correct. Issues which are closed can be reopened.">Closed</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="hongchao.zhang">Hongchao Zhang</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                            <label>p4hc</label>
                    </labels>
                <created>Mon, 13 Jul 2015 14:23:35 +0000</created>
                <updated>Thu, 28 Jan 2016 08:18:44 +0000</updated>
                            <resolved>Thu, 28 Jan 2016 08:18:44 +0000</resolved>
                                                                        <due></due>
                            <votes>0</votes>
                                    <watches>11</watches>
                                                                            <comments>
                            <comment id="122154" author="jamesanunez" created="Fri, 24 Jul 2015 17:44:15 +0000"  >&lt;p&gt;We&apos;ve hit this several times in review-dne-part-2:&lt;br/&gt;
2015-07-02 22:11:37 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/2a34f08c-214e-11e5-b398-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/2a34f08c-214e-11e5-b398-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-06 01:11:19 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/5dfd340e-23c6-11e5-a5be-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/5dfd340e-23c6-11e5-a5be-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-07 14:35:41 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/964e83c2-2505-11e5-9d0f-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/964e83c2-2505-11e5-9d0f-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-07 16:17:47 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/0dc78356-251d-11e5-bf7b-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/0dc78356-251d-11e5-bf7b-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-08 13:58:58 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/bd146d1c-25c5-11e5-a6b1-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/bd146d1c-25c5-11e5-a6b1-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-11 01:17:04 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/3f0a5c78-27ba-11e5-b37a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/3f0a5c78-27ba-11e5-b37a-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-23 22:17:07 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/c3e8da08-31d5-11e5-84cf-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/c3e8da08-31d5-11e5-84cf-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-23 14:54:47 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/a8bae426-3192-11e5-a788-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/a8bae426-3192-11e5-a788-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-23 10:36:11 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/0da58c80-3178-11e5-9558-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/0da58c80-3178-11e5-9558-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-23 08:49:47 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/54d88a1a-312f-11e5-9558-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/54d88a1a-312f-11e5-9558-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-20 11:51:53 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/9bb5ee50-2f22-11e5-bb5a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/9bb5ee50-2f22-11e5-bb5a-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-18 05:32:09 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/4abb96d6-2d5a-11e5-a112-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/4abb96d6-2d5a-11e5-a112-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-17 13:17:20 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/56c54190-2db2-11e5-8ad8-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/56c54190-2db2-11e5-8ad8-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-07-13 19:14:15 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/c30d68dc-2a01-11e5-b04d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/c30d68dc-2a01-11e5-b04d-5254006e85c2&lt;/a&gt;&lt;/p&gt;
</comment>
                            <comment id="122155" author="jamesanunez" created="Fri, 24 Jul 2015 17:45:25 +0000"  >&lt;p&gt;Di - Would you please review these errors and see if they are related to recent DNE landings?&lt;br/&gt;
Thank you.&lt;/p&gt;</comment>
                            <comment id="122177" author="di.wang" created="Fri, 24 Jul 2015 20:32:28 +0000"  >&lt;p&gt;Hmm, this is replay-single 30, and no DNE operation in this test. So it is unlikely related with DNE changes.&lt;/p&gt;</comment>
                            <comment id="122440" author="pjones" created="Tue, 28 Jul 2015 18:03:14 +0000"  >&lt;p&gt;Hongchao&lt;/p&gt;

&lt;p&gt;Could you please look into this issue?&lt;/p&gt;

&lt;p&gt;Thanks&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="122543" author="hongchao.zhang" created="Wed, 29 Jul 2015 14:35:06 +0000"  >&lt;p&gt;status update: still analyzing the logs, and still need more time to find the cause of it.&lt;/p&gt;</comment>
                            <comment id="122649" author="hongchao.zhang" created="Thu, 30 Jul 2015 07:50:40 +0000"  >&lt;p&gt;this could be related to &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5319&quot; title=&quot;Support multiple slots per client in last_rcvd file&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5319&quot;&gt;&lt;del&gt;LU-5319&lt;/del&gt;&lt;/a&gt; (multiple slots per client in last_rcvd), for it was encountered firstly in the test of the patch of it. &lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/153ee6d4-ffba-11e4-ac6b-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/153ee6d4-ffba-11e4-ac6b-5254006e85c2&lt;/a&gt; (at 2015-05-20)&lt;/p&gt;

&lt;p&gt;then this issue disappeared and occurred again just after the patch (&lt;a href=&quot;http://review.whamcloud.com/#/c/14860/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/#/c/14860/&lt;/a&gt;) was landed at July 1th.&lt;/p&gt;

&lt;p&gt;the reason of the bug is the committed replay request (open request) is timed out during replaying&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;00000100:00100000:0.0:1437707068.959718:0:6229:0:(client.c:2591:ptlrpc_free_committed()) lustre-MDT0000-mdc-ffff88007c3bb800: committing for last_committed 163208757256 gen 3
00000100:00100000:0.0:1437707068.959727:0:6229:0:(client.c:2622:ptlrpc_free_committed()) @@@ keeping (FL_REPLAY)  req@ffff88007a0c7000 x1507534108223584/t163208757253(163208757253) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 0 to 0 dl 1437707075 ref 1 fl Complete:RP/4/0 rc 0/0
00000100:00100000:0.0:1437707068.959731:0:6229:0:(client.c:2622:ptlrpc_free_committed()) @@@ keeping (FL_REPLAY)  req@ffff88007a0c5e00 x1507534108223592/t163208757254(163208757254) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 0 to 0 dl 1437707075 ref 1 fl Complete:RP/4/0 rc 0/0

...

00000100:00080000:0.0:1437707094.279383:0:5882:0:(import.c:1188:ptlrpc_connect_interpret()) ffff88007c3bf000 lustre-MDT0000_UUID: changing import state from CONNECTING to REPLAY
00000100:00080000:0.0:1437707094.279386:0:5882:0:(import.c:1448:ptlrpc_import_recovery_state_machine()) replay requested by lustre-MDT0000_UUID
00000100:00100000:0.0:1437707094.279389:0:5882:0:(client.c:2591:ptlrpc_free_committed()) lustre-MDT0000-mdc-ffff88007c3bb800: committing for last_committed 163208757256 gen 3
00000100:00100000:0.0:1437707094.279394:0:5882:0:(client.c:2617:ptlrpc_free_committed()) @@@ stopping search  req@ffff88007a0c4000 x1507534108223680/t163208757257(163208757257) o36-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 640/416 e 0 to 0 dl 1437707076 ref 1 fl Complete:R/4/0 rc 0/0
00000100:00080000:0.0:1437707094.279401:0:5882:0:(recover.c:93:ptlrpc_replay_next()) import ffff88007c3bf000 from lustre-MDT0000_UUID committed 163208757256 last 0
00000100:00080000:0.0:1437707094.279408:0:5882:0:(client.c:2928:ptlrpc_replay_req()) @@@ REPLAY  req@ffff88007a0c7000 x1507534108223584/t163208757253(163208757253) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 0 to 0 dl 1437707075 ref 1 fl New:RP/4/0 rc 0/0

...

00000100:00000400:0.0:1437707100.279594:0:5882:0:(client.c:2020:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1437707094/real 1437707094]  req@ffff88007a0c7000 x1507534108223584/t163208757253(163208757253) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 1 to 1 dl 1437707096 ref 2 fl Rpc:XP/4/ffffffff rc 0/-1
00000100:00100000:0.0:1437707100.279621:0:5882:0:(client.c:2049:ptlrpc_expire_one_request()) @@@ err -110, sent_state=REPLAY (now=REPLAY)  req@ffff88007a0c7000 x1507534108223584/t163208757253(163208757253) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 1 to 1 dl 1437707096 ref 2 fl Rpc:XP/4/ffffffff rc 0/-1
00000100:00020000:0.0:1437707100.279637:0:5882:0:(client.c:2816:ptlrpc_replay_interpret()) request replay timed out, restarting recovery
00000100:00080000:0.0:1437707100.282045:0:5882:0:(import.c:673:ptlrpc_connect_import()) ffff88007c3bf000 lustre-MDT0000_UUID: changing import state from REPLAY to CONNECTING
00000100:00080000:0.0:1437707100.282053:0:5882:0:(import.c:518:import_select_connection()) lustre-MDT0000-mdc-ffff88007c3bb800: connect to NID 10.1.5.29@tcp last attempt 4306605556
00000100:00080000:0.0:1437707100.282062:0:5882:0:(import.c:596:import_select_connection()) lustre-MDT0000-mdc-ffff88007c3bb800: import ffff88007c3bf000 using connection 10.1.5.29@tcp/10.1.5.29@tcp
00000100:00100000:0.0:1437707100.282089:0:5882:0:(import.c:760:ptlrpc_connect_import()) @@@ (re)connect request (timeout 5)  req@ffff88007a0c4f00 x0/t0(0) o38-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 520/544 e 0 to 0 dl 0 ref 1 fl New:N/0/ffffffff rc 0/-1
00000100:00100000:0.0:1437707100.282101:0:5882:0:(client.c:1951:ptlrpc_check_set()) Completed RPC pname:cluuid:pid:xid:nid:opc ptlrpcd_rcv:89a94e57-b8e5-bb46-dc76-c5424df90dcb:5882:1507534108223584:10.1.5.29@tcp:101
00000100:00100000:0.0:1437707100.282113:0:5882:0:(client.c:1574:ptlrpc_send_new_req()) Sending RPC pname:cluuid:pid:xid:nid:opc ptlrpcd_rcv:89a94e57-b8e5-bb46-dc76-c5424df90dcb:5882:1507534108223988:10.1.5.29@tcp:38
00000100:00080000:0.0:1437707100.283658:0:5882:0:(import.c:1004:ptlrpc_connect_interpret()) lustre-MDT0000-mdc-ffff88007c3bb800: connect to target with instance 39
00000100:00080000:0.0:1437707100.283671:0:5882:0:(import.c:1163:ptlrpc_connect_interpret()) reconnected to lustre-MDT0000_UUID@10.1.5.29@tcp after partition
00000100:00080000:0.0:1437707100.283673:0:5882:0:(import.c:1173:ptlrpc_connect_interpret()) lustre-MDT0000-mdc-ffff88007c3bb800: reconnected to lustre-MDT0000_UUID during replay
00000100:00080000:0.0:1437707100.283676:0:5882:0:(import.c:1179:ptlrpc_connect_interpret()) ffff88007c3bf000 lustre-MDT0000_UUID: changing import state from CONNECTING to REPLAY
00000100:00080000:0.0:1437707100.283679:0:5882:0:(import.c:1448:ptlrpc_import_recovery_state_machine()) replay requested by lustre-MDT0000_UUID
00000100:00100000:0.0:1437707100.283682:0:5882:0:(client.c:2591:ptlrpc_free_committed()) lustre-MDT0000-mdc-ffff88007c3bb800: committing for last_committed 163208757256 gen 3
00000100:00100000:0.0:1437707100.283686:0:5882:0:(client.c:2617:ptlrpc_free_committed()) @@@ stopping search  req@ffff88007a0c4000 x1507534108223680/t163208757257(163208757257) o36-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 640/416 e 0 to 0 dl 1437707076 ref 1 fl Complete:R/4/0 rc 0/0
00000100:00080000:0.0:1437707100.283693:0:5882:0:(recover.c:93:ptlrpc_replay_next()) import ffff88007c3bf000 from lustre-MDT0000_UUID committed 163208757256 last 0
00000100:00080000:0.0:1437707100.283699:0:5882:0:(client.c:2928:ptlrpc_replay_req()) @@@ REPLAY  req@ffff88007a0c5e00 x1507534108223592/t163208757254(163208757254) o101-&amp;gt;lustre-MDT0000-mdc-

..

00000100:00000400:0.0:1437707106.283630:0:5882:0:(client.c:2020:ptlrpc_expire_one_request()) @@@ Request sent has timed out for slow reply: [sent 1437707100/real 1437707100]  req@ffff88007a0c5e00 x1507534108223592/t163208757254(163208757254) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 1 to 1 dl 1437707102 ref 2 fl Rpc:XP/6/ffffffff rc 0/-1
00000100:00100000:0.0:1437707106.283659:0:5882:0:(client.c:2049:ptlrpc_expire_one_request()) @@@ err -110, sent_state=REPLAY (now=REPLAY)  req@ffff88007a0c5e00 x1507534108223592/t163208757254(163208757254) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 1 to 1 dl 1437707102 ref 2 fl Rpc:XP/6/ffffffff rc 0/-1
00000100:00020000:0.0:1437707106.283676:0:5882:0:(client.c:2816:ptlrpc_replay_interpret()) request replay timed out, restarting recovery
00000100:00080000:0.0:1437707106.285949:0:5882:0:(import.c:673:ptlrpc_connect_import()) ffff88007c3bf000 lustre-MDT0000_UUID: changing import state from REPLAY to CONNECTING
00000100:00080000:0.0:1437707106.285956:0:5882:0:(import.c:518:import_select_connection()) lustre-MDT0000-mdc-ffff88007c3bb800: connect to NID 10.1.5.29@tcp last attempt 4306611560
00000100:00080000:0.0:1437707106.285965:0:5882:0:(import.c:596:import_select_connection()) lustre-MDT0000-mdc-ffff88007c3bb800: import ffff88007c3bf000 using connection 10.1.5.29@tcp/10.1.5.29@tcp
00000100:00100000:0.0:1437707106.285996:0:5882:0:(import.c:760:ptlrpc_connect_import()) @@@ (re)connect request (timeout 5)  req@ffff88007a0c5800 x0/t0(0) o38-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 520/544 e 0 to 0 dl 0 ref 1 fl New:N/0/ffffffff rc 0/-1
00000100:00100000:0.0:1437707106.286008:0:5882:0:(client.c:1951:ptlrpc_check_set()) Completed RPC pname:cluuid:pid:xid:nid:opc ptlrpcd_rcv:89a94e57-b8e5-bb46-dc76-c5424df90dcb:5882:1507534108223592:10.1.5.29@tcp:101
00000100:00100000:0.0:1437707106.286020:0:5882:0:(client.c:1574:ptlrpc_send_new_req()) Sending RPC pname:cluuid:pid:xid:nid:opc ptlrpcd_rcv:89a94e57-b8e5-bb46-dc76-c5424df90dcb:5882:1507534108224040:10.1.5.29@tcp:38
00000100:00080000:0.0:1437707106.287815:0:5882:0:(import.c:1004:ptlrpc_connect_interpret()) lustre-MDT0000-mdc-ffff88007c3bb800: connect to target with instance 39
00000100:00080000:0.0:1437707106.287828:0:5882:0:(import.c:1163:ptlrpc_connect_interpret()) reconnected to lustre-MDT0000_UUID@10.1.5.29@tcp after partition
00000100:00080000:0.0:1437707106.287831:0:5882:0:(import.c:1173:ptlrpc_connect_interpret()) lustre-MDT0000-mdc-ffff88007c3bb800: reconnected to lustre-MDT0000_UUID during replay
00000100:00080000:0.0:1437707106.287833:0:5882:0:(import.c:1179:ptlrpc_connect_interpret()) ffff88007c3bf000 lustre-MDT0000_UUID: changing import state from CONNECTING to REPLAY
00000100:00080000:0.0:1437707106.287836:0:5882:0:(import.c:1448:ptlrpc_import_recovery_state_machine()) replay requested by lustre-MDT0000_UUID
00000100:00100000:0.0:1437707106.287839:0:5882:0:(client.c:2591:ptlrpc_free_committed()) lustre-MDT0000-mdc-ffff88007c3bb800: committing for last_committed 163208757256 gen 3
00000100:00100000:0.0:1437707106.287843:0:5882:0:(client.c:2617:ptlrpc_free_committed()) @@@ stopping search  req@ffff88007a0c4000 x1507534108223680/t163208757257(163208757257) o36-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 640/416 e 0 to 0 dl 1437707076 ref 1 fl Complete:R/4/0 rc 0/0
00000100:00080000:0.0:1437707106.287852:0:5882:0:(recover.c:93:ptlrpc_replay_next()) import ffff88007c3bf000 from lustre-MDT0000_UUID committed 163208757256 last 0
00000100:00080000:0.0:1437707106.287857:0:5882:0:(client.c:2928:ptlrpc_replay_req()) @@@ REPLAY  req@ffff88007a0c5e00 x1507534108223592/t163208757254(163208757254) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c3bb800@10.1.5.29@tcp:12/10 lens 816/600 e 1 to 1 dl 1437707102 ref 1 fl New:EXP/6/ffffffff rc 0/-1
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Hi Yujian,&lt;br/&gt;
   In &lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/61999b00-32d7-11e5-a4fd-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/61999b00-32d7-11e5-a4fd-5254006e85c2&lt;/a&gt; (July 24),&lt;br/&gt;
the debug logs at MDS side during recovery is dumped to &quot;/tmp/lustre-log.1437707274.9697&quot;,&lt;br/&gt;
and how I can get that log?  Thanks!!&lt;/p&gt;</comment>
                            <comment id="122797" author="yujian" created="Fri, 31 Jul 2015 00:32:55 +0000"  >&lt;p&gt;Hi Hongchao,&lt;br/&gt;
I think the logs were missing because the nodes were re-provisioned for testing other patches.&lt;/p&gt;</comment>
                            <comment id="122816" author="gerrit" created="Fri, 31 Jul 2015 04:47:12 +0000"  >&lt;p&gt;Hongchao Zhang (hongchao.zhang@intel.com) uploaded a new patch: &lt;a href=&quot;http://review.whamcloud.com/15814&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/15814&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6841&quot; title=&quot;replay-single test_30: multiop 20786 failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6841&quot;&gt;&lt;del&gt;LU-6841&lt;/del&gt;&lt;/a&gt; target: debug patch to collect more logs&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: d8fa2f729b6dd8fa5c9ae604308f39306ddd6aa9&lt;/p&gt;</comment>
                            <comment id="123614" author="adilger" created="Fri, 7 Aug 2015 18:13:37 +0000"  >&lt;p&gt;Hi Gregoire, have you seen any problems similar to this in your testing?&lt;/p&gt;</comment>
                            <comment id="124544" author="pichong" created="Wed, 19 Aug 2015 12:40:37 +0000"  >&lt;p&gt;I don&apos;t remember having seen this test fail during my local testing.&lt;/p&gt;

&lt;p&gt;However, I have to admit that the kernel I use does not include the &lt;tt&gt;dev_read_only&lt;/tt&gt; patch, which means the MDT device cannot be made readonly by &lt;tt&gt;replay_barrier&lt;/tt&gt;. This means the test environment is not exactly the same.&lt;/p&gt;</comment>
                            <comment id="125053" author="jgmitter" created="Tue, 25 Aug 2015 17:39:44 +0000"  >&lt;p&gt;This issue has not been seen on any branch since 7/24 and may have been resolved by related landings.  We are going to reduce the severity and leave it open.&lt;/p&gt;</comment>
                            <comment id="125577" author="pjones" created="Fri, 28 Aug 2015 18:28:34 +0000"  >&lt;p&gt;Let&apos;s close the ticket as cannot report and reopen if we do see it again&lt;/p&gt;</comment>
                            <comment id="132151" author="utopiabound" created="Fri, 30 Oct 2015 13:00:22 +0000"  >&lt;p&gt;Just encountered on master but test replay-single/test_31&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/7708f4da-7e14-11e5-9c23-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/7708f4da-7e14-11e5-9c23-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="132829" author="hongchao.zhang" created="Fri, 6 Nov 2015 13:06:22 +0000"  >&lt;p&gt;Status update:&lt;br/&gt;
as per the logs, this issue is caused by the resent replay request during recovery, but MDT doesn&apos;t find the corresponding reply&lt;br/&gt;
of the resent request, and It could be related to the Multiple Slots in last_rcvd. Still analysis the related code lines to find where&lt;br/&gt;
the problem is and will update the status once there is any progress.&lt;/p&gt;</comment>
                            <comment id="134067" author="gerrit" created="Fri, 20 Nov 2015 14:47:18 +0000"  >&lt;p&gt;Hongchao Zhang (hongchao.zhang@intel.com) uploaded a new patch: &lt;a href=&quot;http://review.whamcloud.com/17303&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/17303&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6841&quot; title=&quot;replay-single test_30: multiop 20786 failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6841&quot;&gt;&lt;del&gt;LU-6841&lt;/del&gt;&lt;/a&gt; target: check the last reply for resent&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 21583ba45fefda43841c36ab3609e529dbadda1d&lt;/p&gt;</comment>
                            <comment id="135980" author="hongchao.zhang" created="Thu, 10 Dec 2015 23:57:45 +0000"  >&lt;p&gt;This issue could be marked as a duplicate of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5951&quot; title=&quot;sanity test_39k: mtime is lost on close&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5951&quot;&gt;&lt;del&gt;LU-5951&lt;/del&gt;&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="140316" author="hongchao.zhang" created="Thu, 28 Jan 2016 08:18:44 +0000"  >&lt;p&gt;closed as duplicate of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5951&quot; title=&quot;sanity test_39k: mtime is lost on close&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5951&quot;&gt;&lt;del&gt;LU-5951&lt;/del&gt;&lt;/a&gt;&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="25521">LU-5319</issuekey>
        </issuelink>
                            </outwardlinks>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="18287">LU-3127</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzxi1r:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>