<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:02:13 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-6669] Hard Failover recovery-mds-scale test_failover_mds: test_failover_mds returned 3</title>
                <link>https://jira.whamcloud.com/browse/LU-6669</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for sarah_lw &amp;lt;wei3.liu@intel.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/f446d474-007b-11e5-9650-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/f446d474-007b-11e5-9650-5254006e85c2&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The sub-test test_failover_mds failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;test_failover_mds returned 3
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;test log&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;CMD: shadow-21vm1,shadow-21vm2 cat /tmp/client-load.pid
shadow-21vm2: cat: /tmp/client-load.pid: No such file or directory
shadow-21vm1: cat: /tmp/client-load.pid: No such file or directory
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment></environment>
        <key id="30451">LU-6669</key>
            <summary>Hard Failover recovery-mds-scale test_failover_mds: test_failover_mds returned 3</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Mon, 1 Jun 2015 18:54:06 +0000</created>
                <updated>Tue, 14 Dec 2021 22:36:49 +0000</updated>
                            <resolved>Tue, 14 Dec 2021 22:36:49 +0000</resolved>
                                    <version>Lustre 2.8.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>5</watches>
                                                                            <comments>
                            <comment id="118310" author="sarah" created="Fri, 12 Jun 2015 00:22:03 +0000"  >
&lt;p&gt;The failure is due to failed writing pid into LOAD_PID_FILE in run_*.sh&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;# recovery-*-scale scripts use this to signal the client loads to die
echo $$ &amp;gt;$LOAD_PID_FILE
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="118321" author="green" created="Fri, 12 Jun 2015 04:12:45 +0000"  >&lt;p&gt;To mee it looks like something wiped all or most of /tmp.&lt;br/&gt;
This is supported by lack of various log files that would have been there otherwise.&lt;/p&gt;

&lt;p&gt;This could be some external force or a bug in one of the test scripts.&lt;/p&gt;</comment>
                            <comment id="119974" author="sarah" created="Tue, 30 Jun 2015 21:31:14 +0000"  >&lt;p&gt;recovery-*-scale.sh are all affected by this failure&lt;/p&gt;</comment>
                            <comment id="123927" author="sarah" created="Wed, 12 Aug 2015 06:58:17 +0000"  >&lt;p&gt;lustre-master build 3118&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/057df5c4-35c4-11e5-8c30-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/057df5c4-35c4-11e5-8c30-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="135915" author="standan" created="Thu, 10 Dec 2015 19:41:45 +0000"  >&lt;p&gt;master, build# 3264, 2.7.64 tag&lt;br/&gt;
Hard Failover: EL6.7 Server/Client&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/7b412132-9edd-11e5-87a9-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/7b412132-9edd-11e5-87a9-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="135917" author="standan" created="Thu, 10 Dec 2015 19:46:49 +0000"  >&lt;p&gt;This test has failed with the same error around 14 times in the past month.&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/5c07fe72-9e2d-11e5-87a9-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/5c07fe72-9e2d-11e5-87a9-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/cf49392e-9e9a-11e5-b163-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/cf49392e-9e9a-11e5-b163-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/7b4609e0-9edd-11e5-87a9-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/7b4609e0-9edd-11e5-87a9-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/2c618474-9ebc-11e5-98a4-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/2c618474-9ebc-11e5-98a4-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/0c64a484-9d37-11e5-8e88-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/0c64a484-9d37-11e5-8e88-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/3b5e3922-9c64-11e5-9866-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/3b5e3922-9c64-11e5-9866-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/ab3c95d2-9b7d-11e5-9930-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/ab3c95d2-9b7d-11e5-9930-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/c1059ce0-9a8a-11e5-8b28-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/c1059ce0-9a8a-11e5-8b28-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/d5af25c2-99b2-11e5-9bd2-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/d5af25c2-99b2-11e5-9bd2-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/fae19aac-9938-11e5-802b-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/fae19aac-9938-11e5-802b-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/b2f329fe-98fc-11e5-8079-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/b2f329fe-98fc-11e5-8079-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/7d5e3812-9909-11e5-aeec-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/7d5e3812-9909-11e5-aeec-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/e1399bd6-95f0-11e5-a6d2-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/e1399bd6-95f0-11e5-a6d2-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/sub_tests/f7c7dc36-8a70-11e5-ba42-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/sub_tests/f7c7dc36-8a70-11e5-ba42-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="135931" author="standan" created="Thu, 10 Dec 2015 20:23:59 +0000"  >&lt;p&gt;master, build# 3264, 2.7.64 tag&lt;br/&gt;
Hard Failover: EL6.7 Server/Client - ZFS&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/2c5961c2-9ebc-11e5-98a4-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/2c5961c2-9ebc-11e5-98a4-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="135935" author="standan" created="Thu, 10 Dec 2015 20:36:28 +0000"  >&lt;p&gt;master, build# 3264, 2.7.64 tag&lt;br/&gt;
Hard Failover: EL6.7 Server/Client - ZFS&lt;br/&gt;
recovery-double-scale test_pairwise_fail failed with the the same issue.&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/2ce07428-9ebc-11e5-98a4-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/2ce07428-9ebc-11e5-98a4-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="135955" author="standan" created="Thu, 10 Dec 2015 22:06:49 +0000"  >&lt;p&gt;master, build# 3264, 2.7.64 tag&lt;br/&gt;
Hard Failover: EL7 Server/Client&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/cf43bf1c-9e9a-11e5-b163-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/cf43bf1c-9e9a-11e5-b163-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="136038" author="standan" created="Fri, 11 Dec 2015 16:35:27 +0000"  >&lt;p&gt;master, build# 3264, 2.7.64 tag&lt;br/&gt;
Hard Failover: EL7 Server/Client - ZFS&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/572d25ba-9e20-11e5-91b0-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/572d25ba-9e20-11e5-91b0-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="136359" author="standan" created="Tue, 15 Dec 2015 16:36:43 +0000"  >&lt;p&gt;master, build# 3266, 2.7.64 tag&lt;br/&gt;
Hard Failover: EL6.7 Server/SLES11 SP3 Clients&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/b71e8f10-a080-11e5-85ed-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/b71e8f10-a080-11e5-85ed-5254006e85c2&lt;/a&gt;&lt;br/&gt;
Hard Failover:EL7 Server/SLES11 SP3 Client&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/a39034e8-a077-11e5-8d69-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/a39034e8-a077-11e5-8d69-5254006e85c2&lt;/a&gt;&lt;br/&gt;
Hard Failover: EL7 Server/SLES11 SP3 Client&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/a403d43e-a077-11e5-8d69-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/a403d43e-a077-11e5-8d69-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Also, recovery-mds-scale test_failover_ost failed with same error.&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/b6b0f202-a080-11e5-85ed-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/b6b0f202-a080-11e5-85ed-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;recovery-double-scale test_pairwise_fail failing with same issue.&lt;br/&gt;
Hard Failover: EL7 Server/SLES11 SP3 Client&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/a428828e-a077-11e5-8d69-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/a428828e-a077-11e5-8d69-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="139387" author="standan" created="Wed, 20 Jan 2016 01:29:14 +0000"  >&lt;p&gt;Another instance found for hardfailover: EL6.7 Server/SLES11 SP3 Clients&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/762762d0-ba4c-11e5-9a07-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/762762d0-ba4c-11e5-9a07-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzxemn:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>