<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:12:39 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-7872] conf-sanity: test_50i &apos;test failed to respond and timed out&apos;</title>
                <link>https://jira.whamcloud.com/browse/LU-7872</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;Canary patch failed, during &apos;review-dne-part-1&apos;&lt;/p&gt;

&lt;p&gt;This issue was created by maloo for Richard Henwood &amp;lt;richard.henwood@intel.com&amp;gt;&lt;/p&gt;

&lt;p&gt;Please provide additional information about the failure here.&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/5e327dde-e86b-11e5-be76-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/5e327dde-e86b-11e5-be76-5254006e85c2&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;looks happy enough until:&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;...
CMD: trevis-45vm1.trevis.hpdd.intel.com /usr/sbin/lctl get_param -n mdc.lustre-MDT0001-mdc-[!M]*.active
CMD: trevis-45vm1.trevis.hpdd.intel.com /usr/sbin/lctl get_param -n mdc.lustre-MDT0001-mdc-[!M]*.active
Updated after 7s: wanted &apos;0&apos; got &apos;0&apos;
error on LL_IOC_LMV_SETSTRIPE &apos;/mnt/lustre/d50i.conf-sanity/2&apos; (3): No such device
error: mkdir: create stripe dir &apos;/mnt/lustre/d50i.conf-sanity/2&apos; failed
umount lustre on /mnt/lustre.....
CMD: trevis-45vm1.trevis.hpdd.intel.com grep -c /mnt/lustre&apos; &apos; /proc/mounts
Stopping client trevis-45vm1.trevis.hpdd.intel.com /mnt/lustre (opts:)
CMD: trevis-45vm1.trevis.hpdd.intel.com lsof -t /mnt/lustre
CMD: trevis-45vm1.trevis.hpdd.intel.com umount  /mnt/lustre 2&amp;gt;&amp;amp;1
stop mds service on trevis-45vm7
CMD: trevis-45vm7 grep -c /mnt/mds1&apos; &apos; /proc/mounts
...
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment></environment>
        <key id="35313">LU-7872</key>
            <summary>conf-sanity: test_50i &apos;test failed to respond and timed out&apos;</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="1" iconUrl="https://jira.whamcloud.com/images/icons/statuses/open.png" description="The issue is open and ready for the assignee to start work on it.">Open</status>
                    <statusCategory id="2" key="new" colorName="default"/>
                                    <resolution id="-1">Unresolved</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Mon, 14 Mar 2016 15:24:28 +0000</created>
                <updated>Mon, 22 Oct 2018 21:42:28 +0000</updated>
                                                                                <due></due>
                            <votes>0</votes>
                                    <watches>4</watches>
                                                                            <comments>
                            <comment id="145495" author="jamesanunez" created="Mon, 14 Mar 2016 22:05:28 +0000"  >&lt;p&gt;From the MDS1 and MDS3 console log, we see:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;13:37:35:LustreError: 18333:0:(osp_dev.c:1259:osp_device_free()) } header@ffff8800451c2b40
13:37:35:
13:37:35:LustreError: 18333:0:(osp_dev.c:1259:osp_device_free()) header@ffff880040837b00[0x1, 1, [0x200000001:0x1017:0x0] hash exist]{
13:37:35:
13:37:35:LustreError: 18333:0:(osp_dev.c:1259:osp_device_free()) ....local_storage@ffff880040837b50
13:37:35:
13:37:35:LustreError: 18333:0:(osp_dev.c:1259:osp_device_free()) ....osd-ldiskfs@ffff8800451e5480osd-ldiskfs-object@ffff8800451e5480(i:ffff88007bb7f6e0:25001/3959569064)[plain]
13:37:35:
13:37:35:LustreError: 18333:0:(osp_dev.c:1259:osp_device_free()) } header@ffff880040837b00
13:37:35:
13:37:35:LustreError: 18333:0:(lu_object.c:1224:lu_device_fini()) ASSERTION( atomic_read(&amp;amp;d-&amp;gt;ld_ref) == 0 ) failed: Refcount is 1
13:37:35:LustreError: 18333:0:(lu_object.c:1224:lu_device_fini()) LBUG
13:37:35:Pid: 18333, comm: obd_zombid
13:37:35:
13:37:35:Call Trace:
13:37:35: [&amp;lt;ffffffffa06b6875&amp;gt;] libcfs_debug_dumpstack+0x55/0x80 [libcfs]
13:37:35: [&amp;lt;ffffffffa06b6e77&amp;gt;] lbug_with_loc+0x47/0xb0 [libcfs]
13:37:35: [&amp;lt;ffffffffa0fadd38&amp;gt;] lu_device_fini+0xb8/0xc0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0fb36ce&amp;gt;] dt_device_fini+0xe/0x10 [obdclass]
13:37:35: [&amp;lt;ffffffffa185f196&amp;gt;] osp_device_free+0x96/0x180 [osp]
13:37:35: [&amp;lt;ffffffffa0f98a2d&amp;gt;] class_decref+0x3dd/0x4c0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f84b21&amp;gt;] obd_zombie_impexp_cull+0x611/0x970 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f84ee5&amp;gt;] obd_zombie_impexp_thread+0x65/0x190 [obdclass]
13:37:35: [&amp;lt;ffffffff810672b0&amp;gt;] ? default_wake_function+0x0/0x20
13:37:35: [&amp;lt;ffffffffa0f84e80&amp;gt;] ? obd_zombie_impexp_thread+0x0/0x190 [obdclass]
13:37:35: [&amp;lt;ffffffff810a0fce&amp;gt;] kthread+0x9e/0xc0
13:37:35: [&amp;lt;ffffffff8100c28a&amp;gt;] child_rip+0xa/0x20
13:37:35: [&amp;lt;ffffffff810a0f30&amp;gt;] ? kthread+0x0/0xc0
13:37:35: [&amp;lt;ffffffff8100c280&amp;gt;] ? child_rip+0x0/0x20
13:37:35:
13:37:35:LustreError: 4510:0:(mdt_handler.c:4395:mdt_fini()) ASSERTION( atomic_read(&amp;amp;d-&amp;gt;ld_ref) == 0 ) failed: 
13:37:35:LustreError: 4510:0:(mdt_handler.c:4395:mdt_fini()) LBUG
13:37:35:Pid: 4510, comm: umount
13:37:35:
13:37:35:Call Trace:
13:37:35: [&amp;lt;ffffffffa06b6875&amp;gt;] libcfs_debug_dumpstack+0x55/0x80 [libcfs]
13:37:35: [&amp;lt;ffffffffa06b6e77&amp;gt;] lbug_with_loc+0x47/0xb0 [libcfs]
13:37:35: [&amp;lt;ffffffffa17141ba&amp;gt;] mdt_device_fini+0x121a/0x12e0 [mdt]
13:37:35: [&amp;lt;ffffffffa0f85b1d&amp;gt;] ? class_disconnect_exports+0x17d/0x2f0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f9e302&amp;gt;] class_cleanup+0x572/0xd20 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f81336&amp;gt;] ? class_name2dev+0x56/0xe0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0fa0616&amp;gt;] class_process_config+0x1b66/0x24c0 [obdclass]
13:37:35: [&amp;lt;ffffffffa06c1cf1&amp;gt;] ? libcfs_debug_msg+0x41/0x50 [libcfs]
13:37:35: [&amp;lt;ffffffffa0fa142f&amp;gt;] class_manual_cleanup+0x4bf/0xc90 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f81336&amp;gt;] ? class_name2dev+0x56/0xe0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0fd29ec&amp;gt;] server_put_super+0x8bc/0xcd0 [obdclass]
13:37:35: [&amp;lt;ffffffff811946eb&amp;gt;] generic_shutdown_super+0x5b/0xe0
13:37:35: [&amp;lt;ffffffff811947d6&amp;gt;] kill_anon_super+0x16/0x60
13:37:35: [&amp;lt;ffffffffa0fa4616&amp;gt;] lustre_kill_super+0x36/0x60 [obdclass]
13:37:35: [&amp;lt;ffffffff81194f77&amp;gt;] deactivate_super+0x57/0x80
13:37:35: [&amp;lt;ffffffff811b4f5f&amp;gt;] mntput_no_expire+0xbf/0x110
13:37:35: [&amp;lt;ffffffff811b5aab&amp;gt;] sys_umount+0x7b/0x3a0
13:37:35: [&amp;lt;ffffffff8100b0d2&amp;gt;] system_call_fastpath+0x16/0x1b
13:37:35:
13:37:35:Kernel panic - not syncing: LBUG
13:37:35:Pid: 4510, comm: umount Not tainted 2.6.32-573.18.1.el6_lustre.ge5f28dc.x86_64 #1
13:37:35:Call Trace:
13:37:35: [&amp;lt;ffffffff81539011&amp;gt;] ? panic+0xa7/0x16f
13:37:35: [&amp;lt;ffffffffa06b6ecb&amp;gt;] ? lbug_with_loc+0x9b/0xb0 [libcfs]
13:37:35: [&amp;lt;ffffffffa17141ba&amp;gt;] ? mdt_device_fini+0x121a/0x12e0 [mdt]
13:37:35: [&amp;lt;ffffffffa0f85b1d&amp;gt;] ? class_disconnect_exports+0x17d/0x2f0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f9e302&amp;gt;] ? class_cleanup+0x572/0xd20 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f81336&amp;gt;] ? class_name2dev+0x56/0xe0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0fa0616&amp;gt;] ? class_process_config+0x1b66/0x24c0 [obdclass]
13:37:35: [&amp;lt;ffffffffa06c1cf1&amp;gt;] ? libcfs_debug_msg+0x41/0x50 [libcfs]
13:37:35: [&amp;lt;ffffffffa0fa142f&amp;gt;] ? class_manual_cleanup+0x4bf/0xc90 [obdclass]
13:37:35: [&amp;lt;ffffffffa0f81336&amp;gt;] ? class_name2dev+0x56/0xe0 [obdclass]
13:37:35: [&amp;lt;ffffffffa0fd29ec&amp;gt;] ? server_put_super+0x8bc/0xcd0 [obdclass]
13:37:35: [&amp;lt;ffffffff811946eb&amp;gt;] ? generic_shutdown_super+0x5b/0xe0
13:37:35: [&amp;lt;ffffffff811947d6&amp;gt;] ? kill_anon_super+0x16/0x60
13:37:35: [&amp;lt;ffffffffa0fa4616&amp;gt;] ? lustre_kill_super+0x36/0x60 [obdclass]
13:37:35: [&amp;lt;ffffffff81194f77&amp;gt;] ? deactivate_super+0x57/0x80
13:37:35: [&amp;lt;ffffffff811b4f5f&amp;gt;] ? mntput_no_expire+0xbf/0x110
13:37:35: [&amp;lt;ffffffff811b5aab&amp;gt;] ? sys_umount+0x7b/0x3a0
13:37:35: [&amp;lt;ffffffff8100b0d2&amp;gt;] ? system_call_fastpath+0x16/0x1b
13:37:35:Initializing cgroup subsys cpuset
13:37:35:Initializing cgroup subsys cpu
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="145907" author="yong.fan" created="Thu, 17 Mar 2016 08:07:07 +0000"  >&lt;p&gt;Another failure instance on master:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/fdcbeeb6-ebb8-11e5-93cc-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/fdcbeeb6-ebb8-11e5-93cc-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="53689">LU-11556</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzy4c7:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>