<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:51:25 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-5429] insanity test_1: FAIL: Start of /dev/lvm-Role_MDS/P1 on mds1 failed 5</title>
                <link>https://jira.whamcloud.com/browse/LU-5429</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;While running insanity test with MDSCOUNT=4 on master branch, test 1 failed as follows:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;== insanity test 1: MDS/MDS failure == 22:28:38 (1406672918)
CMD: shadow-23vm4 grep -c /mnt/mds1&apos; &apos; /proc/mounts
Stopping /mnt/mds1 (opts:) on shadow-23vm4
CMD: shadow-23vm4 umount -d /mnt/mds1
CMD: shadow-23vm4 lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp; lctl dl | grep &apos; ST &apos;
Failover mds1 to shadow-23vm4
CMD: shadow-23vm4 grep -c /mnt/mds2&apos; &apos; /proc/mounts
Stopping /mnt/mds2 (opts:) on shadow-23vm4
CMD: shadow-23vm4 umount -d /mnt/mds2
CMD: shadow-23vm4 lsmod | grep lnet &amp;gt; /dev/null &amp;amp;&amp;amp; lctl dl | grep &apos; ST &apos;
Reintegrating MDS2
22:29:17 (1406672957) waiting for shadow-23vm4 network 900 secs ...
22:29:17 (1406672957) network interface is UP
CMD: shadow-23vm4 hostname
CMD: shadow-23vm4 mkdir -p /mnt/mds2
CMD: shadow-23vm4 test -b /dev/lvm-Role_MDS/P2
Starting mds2:   /dev/lvm-Role_MDS/P2 /mnt/mds2
CMD: shadow-23vm4 mkdir -p /mnt/mds2; mount -t lustre   		                   /dev/lvm-Role_MDS/P2 /mnt/mds2
CMD: shadow-23vm4 PATH=/usr/lib64/lustre/tests:/usr/lib/lustre/tests:/usr/lib64/lustre/tests:/opt/iozone/bin:/opt/iozone/bin:/usr/lib64/lustre/tests/mpi:/usr/lib64/lustre/tests/racer:/usr/lib64/lustre/../lustre-iokit/sgpdd-survey:/usr/lib64/lustre/tests:/usr/lib64/lustre/utils/gss:/usr/lib64/lustre/utils:/usr/lib64/openmpi/bin:/usr/bin:/bin:/sbin:/usr/sbin::/sbin:/bin:/usr/sbin: NAME=autotest_config sh rpc.sh set_default_debug \&quot;vfstrace rpctrace dlmtrace neterror ha config ioctl super lfsck\&quot; \&quot;all -lnet -lnd -pinger\&quot; 4 
CMD: shadow-23vm4 e2label /dev/lvm-Role_MDS/P2 2&amp;gt;/dev/null
Started lustre-MDT0001
22:29:39 (1406672979) waiting for shadow-23vm4 network 900 secs ...
22:29:39 (1406672979) network interface is UP
CMD: shadow-23vm4 hostname
CMD: shadow-23vm4 mkdir -p /mnt/mds1
CMD: shadow-23vm4 test -b /dev/lvm-Role_MDS/P1
Starting mds1:   /dev/lvm-Role_MDS/P1 /mnt/mds1
CMD: shadow-23vm4 mkdir -p /mnt/mds1; mount -t lustre   		                   /dev/lvm-Role_MDS/P1 /mnt/mds1
shadow-23vm4: mount.lustre: mount /dev/mapper/lvm--Role_MDS-P1 at /mnt/mds1 failed: Input/output error
shadow-23vm4: Is the MGS running?
Start of /dev/lvm-Role_MDS/P1 on mds1 failed 5
 insanity test_1: @@@@@@ FAIL: test_1 failed with 5
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Console log on MDS node:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;16:29:54:LustreError: 15c-8: MGC10.1.5.25@tcp: The configuration from log &apos;lustre-MDT0000&apos; failed (-5). This may be the result of communication errors between this node and the MGS, a bad configuration, or other errors. See the syslog for more information.
16:29:54:Lustre: Evicted from MGS (at MGC10.1.5.25@tcp_0) after server handle changed from 0xcc4f15973e89079d to 0xcc4f15973e891c96
16:29:54:LustreError: 11730:0:(obd_mount_server.c:1299:server_start_targets()) failed to start server lustre-MDT0000: -5
16:29:54:LustreError: 11730:0:(obd_mount_server.c:1771:server_fill_super()) Unable to start targets: -5
16:29:54:LustreError: 11730:0:(obd_mount_server.c:1498:server_put_super()) no obd lustre-MDT0000
16:45:01:Lustre: server umount lustre-MDT0000 complete
16:45:01:LustreError: 11730:0:(obd_mount.c:1342:lustre_fill_super()) Unable to mount  (-5)
16:45:01:Lustre: DEBUG MARKER: lctl set_param -n mdt.lustre*.enable_remote_dir=1
16:45:01:Lustre: DEBUG MARKER: /usr/sbin/lctl mark  insanity test_1: @@@@@@ FAIL: test_1 failed with 5 
16:45:01:Lustre: DEBUG MARKER: insanity test_1: @@@@@@ FAIL: test_1 failed with 5
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Maloo report: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/f68fd6dc-177d-11e4-a76a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/f68fd6dc-177d-11e4-a76a-5254006e85c2&lt;/a&gt;&lt;/p&gt;</description>
                <environment>Lustre Branch: master&lt;br/&gt;
MDSCOUNT=4</environment>
        <key id="25805">LU-5429</key>
            <summary>insanity test_1: FAIL: Start of /dev/lvm-Role_MDS/P1 on mds1 failed 5</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.whamcloud.com/images/icons/priorities/major.svg">Major</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="yujian">Jian Yu</reporter>
                        <labels>
                    </labels>
                <created>Wed, 30 Jul 2014 01:35:44 +0000</created>
                <updated>Thu, 22 Nov 2018 04:28:54 +0000</updated>
                            <resolved>Thu, 22 Nov 2018 04:28:54 +0000</resolved>
                                    <version>Lustre 2.7.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>6</watches>
                                                                            <comments>
                            <comment id="90410" author="yujian" created="Wed, 30 Jul 2014 01:38:26 +0000"  >&lt;p&gt;More instances on master branch:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/ac193a30-177d-11e4-a76a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/ac193a30-177d-11e4-a76a-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/e0f03cfa-177c-11e4-a76a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/e0f03cfa-177c-11e4-a76a-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/03f91294-177d-11e4-a76a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/03f91294-177d-11e4-a76a-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="90467" author="adilger" created="Wed, 30 Jul 2014 17:44:38 +0000"  >&lt;p&gt;It looks like the first two tests (for patch &lt;a href=&quot;http://review.whamcloud.com/11268&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/11268&lt;/a&gt; which is just master) are actually hitting &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5420&quot; title=&quot;Failure on test suite sanity test_17m: mount MDS failed, Input/output error&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5420&quot;&gt;&lt;del&gt;LU-5420&lt;/del&gt;&lt;/a&gt;.  The second tests (for &lt;a href=&quot;http://review.whamcloud.com/11269&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/11269&lt;/a&gt; with the &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-4877&quot; title=&quot;mdt_fix_reply()) ASSERTION( md_packed &amp;gt; 0 ) failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-4877&quot;&gt;&lt;del&gt;LU-4877&lt;/del&gt;&lt;/a&gt; patch reverted) are possibly hitting a different problem as described here.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="24148">LU-4877</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzwslj:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>15115</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>