<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:58:07 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-6200] Failover recovery-mds-scale test_failover_ost: test_failover_ost returned 1</title>
                <link>https://jira.whamcloud.com/browse/LU-6200</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for sarah &amp;lt;sarah@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/be3ebe76-a817-11e4-93dd-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/be3ebe76-a817-11e4-93dd-5254006e85c2&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The sub-test test_failover_ost failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;test_failover_ost returned 1
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;client 3 shows&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;tar: etc/sysconfig/quota_nld: Cannot write: No such file or directory
tar: etc/sysconfig/quota_nld: Cannot utime: No such file or directory
tar: etc/sysconfig/sandbox: Cannot write: No such file or directory
tar: etc/sysconfig/nfs: Cannot write: No such file or directory
tar: Exiting with failure status due to previous errors
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>client and server: lustre-master build # 2835 RHEL6</environment>
        <key id="28511">LU-6200</key>
            <summary>Failover recovery-mds-scale test_failover_ost: test_failover_ost returned 1</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.whamcloud.com/images/icons/priorities/major.svg">Major</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="hongchao.zhang">Hongchao Zhang</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                            <label>p4hc</label>
                    </labels>
                <created>Tue, 3 Feb 2015 01:40:38 +0000</created>
                <updated>Tue, 26 Mar 2019 07:57:01 +0000</updated>
                            <resolved>Tue, 26 Mar 2019 07:57:01 +0000</resolved>
                                    <version>Lustre 2.7.0</version>
                    <version>Lustre 2.8.0</version>
                    <version>Lustre 2.10.0</version>
                    <version>Lustre 2.11.0</version>
                    <version>Lustre 2.10.4</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>13</watches>
                                                                            <comments>
                            <comment id="105475" author="yujian" created="Tue, 3 Feb 2015 06:23:01 +0000"  >&lt;p&gt;Hi Hongchao,&lt;/p&gt;

&lt;p&gt;Is this similar to &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-4621&quot; title=&quot;recovery-mds-scale: test_failover_ost&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-4621&quot;&gt;&lt;del&gt;LU-4621&lt;/del&gt;&lt;/a&gt;?&lt;/p&gt;</comment>
                            <comment id="105567" author="adilger" created="Tue, 3 Feb 2015 19:02:39 +0000"  >&lt;p&gt;Hongchao, what is the severity of this bug? Is it something that will break normal failover for users or is it only affecting testing?&lt;/p&gt;</comment>
                            <comment id="105568" author="adilger" created="Tue, 3 Feb 2015 19:03:40 +0000"  >&lt;p&gt;Is this a new regression from a recently landed patch?&lt;/p&gt;</comment>
                            <comment id="105655" author="hongchao.zhang" created="Wed, 4 Feb 2015 11:23:26 +0000"  >&lt;p&gt;Hi Andreas, &lt;br/&gt;
this issue should not affecting testing only.&lt;br/&gt;
there is no corresponding object when replaying the &quot;OST_SETATTR&quot;(op=2) request, and return -2(ENOENT).&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;11:14:39:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) @@@ status -2, old was 0  req@ffff8800448fd980 x1491531070413056/t30064787885(30064787885) o2-&amp;gt;lustre-OST0000-osc-ffff880037b57c00@10.2.4.145@tcp:28/4 lens 440/400 e 0 to 0 dl 1422443475 ref 2 fl Interpret:R/4/0 rc -2/-2
11:14:39:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) Skipped 12 previous similar messages
11:14:39:Lustre: lustre-OST0000-osc-ffff880037b57c00: Connection restored to lustre-OST0000 (at 10.2.4.145@tcp)
11:14:39:Lustre: Skipped 2 previous similar messages
11:14:39:Lustre: 2377:0:(client.c:1942:ptlrpc_expire_one_request()) @@@ Request sent has failed due to network error: [sent 1422443458/real 1422443458]  req@ffff88004303c680 x1491531070428380/t0(0) o8-&amp;gt;lustre-OST0001-osc-ffff880037b57c00@10.2.4.141@tcp:28/4 lens 400/544 e 0 to 1 dl 1422443483 ref 1 fl Rpc:XN/0/ffffffff rc 0/-1
11:14:39:Lustre: 2377:0:(client.c:1942:ptlrpc_expire_one_request()) Skipped 26 previous similar messages
11:14:40:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) @@@ status -2, old was 0  req@ffff88004293c680 x1491531070413096/t30064787867(30064787867) o2-&amp;gt;lustre-OST0001-osc-ffff880037b57c00@10.2.4.145@tcp:28/4 lens 440/400 e 0 to 0 dl 1422443544 ref 2 fl Interpret:R/4/0 rc -2/-2
11:14:40:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) Skipped 7 previous similar messages
11:14:40:Lustre: lustre-OST0001-osc-ffff880037b57c00: Connection restored to lustre-OST0001 (at 10.2.4.145@tcp)
11:14:40:Lustre: 2377:0:(client.c:1942:ptlrpc_expire_one_request()) @@@ Request sent has failed due to network error: [sent 1422443593/real 1422443593]  req@ffff880043240080 x1491531070431228/t0(0) o8-&amp;gt;lustre-OST0005-osc-ffff880037b57c00@10.2.4.141@tcp:28/4 lens 400/544 e 0 to 1 dl 1422443619 ref 1 fl Rpc:XN/0/ffffffff rc 0/-1
11:14:40:Lustre: 2377:0:(client.c:1942:ptlrpc_expire_one_request()) Skipped 46 previous similar messages
11:14:40:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) @@@ status -2, old was 0  req@ffff880042d84c80 x1491531070413176/t30064787863(30064787863) o2-&amp;gt;lustre-OST0003-osc-ffff880037b57c00@10.2.4.145@tcp:28/4 lens 440/400 e 0 to 0 dl 1422443660 ref 2 fl Interpret:R/4/0 rc -2/-2
11:14:40:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) Skipped 17 previous similar messages
11:14:40:Lustre: lustre-OST0003-osc-ffff880037b57c00: Connection restored to lustre-OST0003 (at 10.2.4.145@tcp)
11:14:40:Lustre: Skipped 1 previous similar message
11:14:40:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) @@@ status -2, old was 0  req@ffff880042dbec80 x1491531070412976/t30064787924(30064787924) o2-&amp;gt;lustre-OST0005-osc-ffff880037b57c00@10.2.4.145@tcp:28/4 lens 440/400 e 0 to 0 dl 1422443725 ref 2 fl Interpret:R/4/0 rc -2/-2
11:31:06:LustreError: 2377:0:(client.c:2809:ptlrpc_replay_interpret()) Skipped 16 previous similar messages
11:31:06:Lustre: lustre-OST0005-osc-ffff880037b57c00: Connection restored to lustre-OST0005 (at 10.2.4.145@tcp)
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;how about recreating those missing objects in &quot;ofd_setattr_hdl&quot; just like &quot;ofd_preprw_write&quot;? &lt;/p&gt;</comment>
                            <comment id="105999" author="gerrit" created="Fri, 6 Feb 2015 06:52:19 +0000"  >&lt;p&gt;Hongchao Zhang (hongchao.zhang@intel.com) uploaded a new patch: &lt;a href=&quot;http://review.whamcloud.com/13668&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/13668&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6200&quot; title=&quot;Failover recovery-mds-scale test_failover_ost: test_failover_ost returned 1&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6200&quot;&gt;&lt;del&gt;LU-6200&lt;/del&gt;&lt;/a&gt; ofd: recreate objects for setattr&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: c22faf7b7a1721ca3da24f31f2164b18d0cfb666&lt;/p&gt;</comment>
                            <comment id="108215" author="hongchao.zhang" created="Fri, 27 Feb 2015 09:34:49 +0000"  >&lt;p&gt;the patch for this ticket has been merged with the patch for &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5526&quot; title=&quot;recovery-mds-scale test failover_mds: dd: No space left on device&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5526&quot;&gt;&lt;del&gt;LU-5526&lt;/del&gt;&lt;/a&gt;, and the &quot;No such file or directory&quot; seems to have been&lt;br/&gt;
fixed by the patch, and the issue of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5526&quot; title=&quot;recovery-mds-scale test failover_mds: dd: No space left on device&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5526&quot;&gt;&lt;del&gt;LU-5526&lt;/del&gt;&lt;/a&gt;(No space left on device) still occurs, and its cause is known but the best way to fix it&lt;br/&gt;
is still under way. &lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/09d2e80a-b798-11e4-9d63-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/09d2e80a-b798-11e4-9d63-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/78c3db02-b5d1-11e4-a70c-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/78c3db02-b5d1-11e4-a70c-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/815c59b4-b3a7-11e4-add6-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/815c59b4-b3a7-11e4-add6-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="135918" author="standan" created="Thu, 10 Dec 2015 19:48:31 +0000"  >&lt;p&gt;master, build# 3264, 2.7.64 tag&lt;br/&gt;
Hard Failover: EL6.7 Server/Client&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/7b412132-9edd-11e5-87a9-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/7b412132-9edd-11e5-87a9-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="136413" author="sarah" created="Tue, 15 Dec 2015 19:29:35 +0000"  >&lt;p&gt;Hit this issue for every hard failover configs(6), if the patch for &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5526&quot; title=&quot;recovery-mds-scale test failover_mds: dd: No space left on device&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5526&quot;&gt;&lt;del&gt;LU-5526&lt;/del&gt;&lt;/a&gt; can not be landed soon, could we have a separated patch for this particular problem? &lt;/p&gt;

&lt;p&gt;Per the discussion in triage call,  with separated patch the test would still hit &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5526&quot; title=&quot;recovery-mds-scale test failover_mds: dd: No space left on device&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5526&quot;&gt;&lt;del&gt;LU-5526&lt;/del&gt;&lt;/a&gt; every time,  so no improvement in this case with separated patch&lt;/p&gt;</comment>
                            <comment id="139382" author="standan" created="Wed, 20 Jan 2016 01:07:15 +0000"  >&lt;p&gt;Another instance found for hardfailover: EL6.7 Server/Client&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/3e92c154-bc93-11e5-8f65-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/3e92c154-bc93-11e5-8f65-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="139388" author="standan" created="Wed, 20 Jan 2016 01:29:53 +0000"  >&lt;p&gt;Another instance found for hardfailover: EL6.7 Server/SLES11 SP3 Clients&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/762762d0-ba4c-11e5-9a07-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/762762d0-ba4c-11e5-9a07-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="141699" author="standan" created="Tue, 9 Feb 2016 23:51:37 +0000"  >&lt;p&gt;Another instance found for hardfailover : EL6.7 Server/Client, tag 2.7.66, master build 3314&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/7c5e8006-cb2d-11e5-b3e8-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/7c5e8006-cb2d-11e5-b3e8-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Another instance found for hardfailover : EL6.7 Server/Client - ZFS, tag 2.7.66, master build 3314&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/766ea3ec-cb55-11e5-b49e-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/766ea3ec-cb55-11e5-b49e-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Another instance found for hardfailover : EL7 Server/Client, tag 2.7.66, master build 3314&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/8d13249a-ca8f-11e5-9609-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/8d13249a-ca8f-11e5-9609-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Another instance found for hardfailover : EL7 Server/SLES11 SP3 Client, tag 2.7.66, master build 3316&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/2fbf67e4-cd4c-11e5-b1fa-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/2fbf67e4-cd4c-11e5-b1fa-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Another instance found for hardfailover : EL7 Server/Client - ZFS, tag 2.7.66, master build 3314&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/f0dd9616-ca6e-11e5-9609-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/f0dd9616-ca6e-11e5-9609-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="143556" author="standan" created="Wed, 24 Feb 2016 16:47:15 +0000"  >&lt;p&gt;Another instance found on b2_8 for failover testing , build# 6.&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/0aed3028-da39-11e5-a8a6-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/0aed3028-da39-11e5-a8a6-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/eaf85780-d65e-11e5-afe8-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/eaf85780-d65e-11e5-afe8-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/54ec62da-d99d-11e5-9ebe-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/54ec62da-d99d-11e5-9ebe-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/eb9f29ec-d8da-11e5-83e2-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/eb9f29ec-d8da-11e5-83e2-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/2f0aa9f6-d5a5-11e5-9cc2-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/2f0aa9f6-d5a5-11e5-9cc2-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/c5a8e44c-d9c7-11e5-85dd-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/c5a8e44c-d9c7-11e5-85dd-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="185644" author="hongchao.zhang" created="Tue, 21 Feb 2017 13:27:07 +0000"  >&lt;p&gt;the patch &lt;a href=&quot;https://review.whamcloud.com/#/c/13668/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/#/c/13668/&lt;/a&gt; has been updated.&lt;/p&gt;</comment>
                            <comment id="196577" author="shadow" created="Mon, 22 May 2017 04:27:02 +0000"  >&lt;p&gt;Hongchao,&lt;/p&gt;

&lt;p&gt;i don&apos;t have an access to gerrit now, but you patch &lt;br/&gt;
&lt;a href=&quot;https://git.hpdd.intel.com/?p=fs/lustre-release.git;a=commitdiff;h=daa98c46817c98d6fbf70dafa9fbdde678f8b9ba;hp=32d1a1c5d610d054ad4609c1cf332172e8310805&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://git.hpdd.intel.com/?p=fs/lustre-release.git;a=commitdiff;h=daa98c46817c98d6fbf70dafa9fbdde678f8b9ba;hp=32d1a1c5d610d054ad4609c1cf332172e8310805&lt;/a&gt;&lt;br/&gt;
is bad.&lt;/p&gt;

&lt;p&gt;Looks, You can&apos;t use a &lt;/p&gt;

&lt;p&gt;+               /* Do sync create if the seq is about to used up */&lt;br/&gt;
+               if (fid_seq_is_idif(seq) || fid_seq_is_mdt0(seq)) {&lt;br/&gt;
+                       if (unlikely(oid &amp;gt;= IDIF_MAX_OID - 1))&lt;br/&gt;
+                               sync = 1;&lt;/p&gt;

&lt;p&gt;because ost id in this case need to account lower 16 bits from seq, please look to the ost id macros.&lt;/p&gt;</comment>
                            <comment id="197031" author="hongchao.zhang" created="Thu, 25 May 2017 10:16:46 +0000"  >&lt;p&gt;the patch has been updated as per the review feedback.&lt;/p&gt;</comment>
                            <comment id="200062" author="hongchao.zhang" created="Fri, 23 Jun 2017 12:19:56 +0000"  >&lt;p&gt;the patch &lt;a href=&quot;https://review.whamcloud.com/#/c/13668/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/#/c/13668/&lt;/a&gt; has been updated.&lt;/p&gt;</comment>
                            <comment id="209669" author="casperjx" created="Tue, 26 Sep 2017 22:04:46 +0000"  >&lt;p&gt;2.11.0:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/e6578085-2eed-486d-8601-e5214bac4bb0&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/e6578085-2eed-486d-8601-e5214bac4bb0&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="210793" author="hongchao.zhang" created="Wed, 11 Oct 2017 10:30:44 +0000"  >&lt;p&gt;the patch &lt;a href=&quot;https://review.whamcloud.com/#/c/13668/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/#/c/13668/&lt;/a&gt; has been updated&lt;/p&gt;</comment>
                            <comment id="244106" author="sergey" created="Mon, 18 Mar 2019 09:46:27 +0000"  >&lt;p&gt;Hi,&lt;/p&gt;

&lt;p&gt;Description of the problem looks similar with I&apos;ve already fixed in &lt;a href=&quot;https://review.whamcloud.com/#/c/33836/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/#/c/33836/&lt;/a&gt;&#160;.&lt;br/&gt;
Please look carefully and If I am right this could be resolved as a dup of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-11765&quot; title=&quot;during failover test run, mdtest job fails, numerous stat failures &amp;#39;No such file or directory&amp;#39;&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-11765&quot;&gt;&lt;del&gt;LU-11765&lt;/del&gt;&lt;/a&gt;.&lt;/p&gt;</comment>
                            <comment id="244111" author="hongchao.zhang" created="Mon, 18 Mar 2019 11:18:28 +0000"  >&lt;p&gt;Hi Sergey,&lt;/p&gt;

&lt;p&gt;Thanks!&lt;br/&gt;
This should be the same as &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-11765&quot; title=&quot;during failover test run, mdtest job fails, numerous stat failures &amp;#39;No such file or directory&amp;#39;&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-11765&quot;&gt;&lt;del&gt;LU-11765&lt;/del&gt;&lt;/a&gt;, only the fixing ways are different, the patch in this ticket recreates the object&lt;br/&gt;
if it doesn&apos;t exist and the patch in &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-11765&quot; title=&quot;during failover test run, mdtest job fails, numerous stat failures &amp;#39;No such file or directory&amp;#39;&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-11765&quot;&gt;&lt;del&gt;LU-11765&lt;/del&gt;&lt;/a&gt; returns -EAGAIN to notify the caller to retry.&lt;/p&gt;</comment>
                            <comment id="244650" author="hongchao.zhang" created="Tue, 26 Mar 2019 07:57:01 +0000"  >&lt;p&gt;Resolved as duplicate of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-11765&quot; title=&quot;during failover test run, mdtest job fails, numerous stat failures &amp;#39;No such file or directory&amp;#39;&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-11765&quot;&gt;&lt;del&gt;LU-11765&lt;/del&gt;&lt;/a&gt;&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                                                <inwardlinks description="is duplicated by">
                                        <issuelink>
            <issuekey id="11241">LU-463</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="26005">LU-5483</issuekey>
        </issuelink>
            <issuelink>
            <issuekey id="26094">LU-5526</issuekey>
        </issuelink>
                            </outwardlinks>
                                                                <inwardlinks description="is related to">
                                                        </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzx5gv:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>17329</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>