<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:37:14 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-10678] LBUG: osd_handler.c:2353:osd_read_lock()) ASSERTION( obj-&gt;oo_owner == ((void *)0) ) failed:</title>
                <link>https://jira.whamcloud.com/browse/LU-10678</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;Soak MDT was in normal operation. Sudden LBUG&lt;/p&gt;
&lt;div class=&quot;code panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;codeContent panelContent&quot;&gt;
&lt;pre class=&quot;code-java&quot;&gt;Feb 16 09:28:39 soak-8 kernel: LustreError: 2688:0:(osd_handler.c:2353:osd_read_lock()) ASSERTION( obj-&amp;gt;oo_owner == ((void *)0) ) failed:
Feb 16 09:28:39 soak-8 kernel: LustreError: 2688:0:(osd_handler.c:2353:osd_read_lock()) LBUG
Feb 16 09:28:39 soak-8 kernel: Pid: 2688, comm: mdt00_028
Feb 16 09:28:39 soak-8 kernel: #012Call Trace:
Feb 16 09:28:39 soak-8 kernel: [&amp;lt;ffffffffc0dbc7ae&amp;gt;] libcfs_call_trace+0x4e/0x60 [libcfs]
Feb 16 09:28:39 soak-8 kernel: [&amp;lt;ffffffffc0dbc83c&amp;gt;] lbug_with_loc+0x4c/0xb0 [libcfs]
Feb 16 09:28:39 soak-8 kernel: [&amp;lt;ffffffffc140599a&amp;gt;] osd_read_lock+0xda/0xe0 [osd_ldiskfs]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc1691287&amp;gt;] lod_read_lock+0x37/0xd0 [lod]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc17125c7&amp;gt;] mdd_read_lock+0x37/0xd0 [mdd]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc1715dcc&amp;gt;] mdd_xattr_get+0x6c/0x390 [mdd]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc1593c3f&amp;gt;] mdt_pack_acl2body+0x1af/0x800 [mdt]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc15beaf9&amp;gt;] mdt_finish_open+0x289/0x690 [mdt]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc15c120b&amp;gt;] mdt_reint_open+0x230b/0x3260 [mdt]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc0f27d2e&amp;gt;] ? upcall_cache_get_entry+0x20e/0x8f0 [obdclass]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc15a4b43&amp;gt;] ? ucred_set_jobid+0x53/0x70 [mdt]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc15b5400&amp;gt;] mdt_reint_rec+0x80/0x210 [mdt]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc1594f8b&amp;gt;] mdt_reint_internal+0x5fb/0x9c0 [mdt]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc15a1437&amp;gt;] mdt_intent_reint+0x157/0x420 [mdt]
Feb 16 09:28:40 soak-8 kernel: [&amp;lt;ffffffffc15980b2&amp;gt;] mdt_intent_opc+0x442/0xad0 [mdt]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc1144470&amp;gt;] ? lustre_swab_ldlm_intent+0x0/0x20 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc159fc63&amp;gt;] mdt_intent_policy+0x1a3/0x360 [mdt]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc10f4202&amp;gt;] ldlm_lock_enqueue+0x382/0x8f0 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc111c753&amp;gt;] ldlm_handle_enqueue0+0x8f3/0x13e0 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc11444f0&amp;gt;] ? lustre_swab_ldlm_request+0x0/0x30 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc11a2202&amp;gt;] tgt_enqueue+0x62/0x210 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc11aa405&amp;gt;] tgt_request_handle+0x925/0x13b0 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc114e58e&amp;gt;] ptlrpc_server_handle_request+0x24e/0xab0 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc114b448&amp;gt;] ? ptlrpc_wait_event+0x98/0x340 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffff810c6440&amp;gt;] ? default_wake_function+0x0/0x20
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc1151d42&amp;gt;] ptlrpc_main+0xa92/0x1e40 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffffc11512b0&amp;gt;] ? ptlrpc_main+0x0/0x1e40 [ptlrpc]
Feb 16 09:28:41 soak-8 kernel: [&amp;lt;ffffffff810b252f&amp;gt;] kthread+0xcf/0xe0
Feb 16 09:28:42 soak-8 kernel: [&amp;lt;ffffffff810b2460&amp;gt;] ? kthread+0x0/0xe0
Feb 16 09:28:42 soak-8 kernel: [&amp;lt;ffffffff816b8798&amp;gt;] ret_from_fork+0x58/0x90
Feb 16 09:28:42 soak-8 kernel: [&amp;lt;ffffffff810b2460&amp;gt;] ? kthread+0x0/0xe0
Feb 16 09:28:42 soak-8 kernel:
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>Soak stress cluster  - Lustre version=2.10.57_58_gf24340c. </environment>
        <key id="50852">LU-10678</key>
            <summary>LBUG: osd_handler.c:2353:osd_read_lock()) ASSERTION( obj-&gt;oo_owner == ((void *)0) ) failed:</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="3" iconUrl="https://jira.whamcloud.com/images/icons/priorities/major.svg">Major</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="6">Not a Bug</resolution>
                                        <assignee username="ys">Yang Sheng</assignee>
                                    <reporter username="cliffw">Cliff White</reporter>
                        <labels>
                            <label>soak</label>
                    </labels>
                <created>Fri, 16 Feb 2018 16:08:26 +0000</created>
                <updated>Wed, 27 Oct 2021 16:58:18 +0000</updated>
                            <resolved>Tue, 11 Jun 2019 15:37:43 +0000</resolved>
                                    <version>Lustre 2.11.0</version>
                                                        <due></due>
                            <votes>1</votes>
                                    <watches>15</watches>
                                                                            <comments>
                            <comment id="221182" author="pjones" created="Fri, 16 Feb 2018 18:08:56 +0000"  >&lt;p&gt;Yang Sheng&lt;/p&gt;

&lt;p&gt;Can you please advise&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="221429" author="spiechurski" created="Thu, 22 Feb 2018 10:35:17 +0000"  >&lt;p&gt;Hi,&lt;/p&gt;

&lt;p&gt;We have seen this a couple of times on 2.7.21.2 recently.&lt;/p&gt;

&lt;p&gt;The crashdump collection has failed on the first occurence, and I am waiting for confirmation about the second occurence.&lt;/p&gt;

&lt;p&gt;Would you be interested in a dump if we get one ?&lt;/p&gt;</comment>
                            <comment id="221433" author="ys" created="Thu, 22 Feb 2018 13:00:01 +0000"  >&lt;p&gt;Hi, Sebastien,&lt;/p&gt;

&lt;p&gt;This is helpful if got a crash dump. TIA.&lt;/p&gt;

&lt;p&gt;Thanks,&lt;br/&gt;
YangSheng&lt;/p&gt;</comment>
                            <comment id="221553" author="spiechurski" created="Fri, 23 Feb 2018 09:48:42 +0000"  >&lt;p&gt;Unfortunately, the dump collection failed because the crashkernel=auto parameter does not reserve enough memory for our configuration. I have requested this to be adjusted. Let&apos;s hope we can get a dump at next crash.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Regards,&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Sebastien.&lt;/p&gt;</comment>
                            <comment id="235199" author="jpeyrard" created="Mon, 22 Oct 2018 14:58:17 +0000"  >&lt;p&gt;Hi,&lt;/p&gt;

&lt;p&gt;I have this LBUG on server running : lustre-el7.3-2.7.21.3-255.ddn20.g10dd357.el7.x86_64&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;75345.251740&amp;#93;&lt;/span&gt; LustreError: 7714:0:(osd_handler.c:1751:osd_object_read_lock()) ASSERTION( obj-&amp;gt;oo_owner == ((void *)0) ) failed:&#160;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;75345.265763&amp;#93;&lt;/span&gt; LustreError: 7714:0:(osd_handler.c:1751:osd_object_read_lock()) LBUG&#160;&lt;/p&gt;

&lt;p&gt;Do I need to open a Jira ticket for this one, or we can use this one ? Seem to be similar, but I preffer to ask.&lt;/p&gt;

&lt;p&gt;I will try to get the crash file this week and the whole dmesg.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Regards,&lt;/p&gt;

&lt;p&gt;Johann&lt;/p&gt;</comment>
                            <comment id="237148" author="liu" created="Sat, 17 Nov 2018 15:45:45 +0000"  >&lt;p&gt;We have a few similar crashes on version 2.10.1, kernel 3.10.0-693.2.2.el7_lustre.x86_64.&lt;/p&gt;

&lt;p&gt;We have an incomplete kernel dump and I uploaded to ftp.whamcloud.com in /uploads/&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-10678&quot; title=&quot;LBUG: osd_handler.c:2353:osd_read_lock()) ASSERTION( obj-&amp;gt;oo_owner == ((void *)0) ) failed:&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-10678&quot;&gt;&lt;del&gt;LU-10678&lt;/del&gt;&lt;/a&gt; directory. Not sure if this helps.&lt;/p&gt;

&lt;p&gt;Thanks.&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Lixin Liu&lt;/p&gt;

&lt;p&gt;Simon Fraser University&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;</comment>
                            <comment id="237176" author="spiechurski" created="Mon, 19 Nov 2018 10:24:32 +0000"  >&lt;p&gt;We have one complete vmcore from an MDS running kernel 3.10.0-693.11.1.el7 and lustre 2.7.21.2.&lt;/p&gt;

&lt;p&gt;I have uploaded it to ftp.whamcloud.com/uploads/&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-10678&quot; title=&quot;LBUG: osd_handler.c:2353:osd_read_lock()) ASSERTION( obj-&amp;gt;oo_owner == ((void *)0) ) failed:&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-10678&quot;&gt;&lt;del&gt;LU-10678&lt;/del&gt;&lt;/a&gt;/vmcore-3.10.0-693.11.1.el7.x86_64_lustre-2.7.21.2&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;</comment>
                            <comment id="237194" author="ys" created="Mon, 19 Nov 2018 17:59:57 +0000"  >&lt;p&gt;Hi, Sebastien,&lt;/p&gt;

&lt;p&gt;Looks like you use a non-standard combination of lustre &amp;amp; kernel? 2.7.21.2 should use 3.10.0.514.xx kernel. Can you provide debuginfo rpms?&lt;/p&gt;

&lt;p&gt;Thanks,&lt;br/&gt;
YangSheng&lt;/p&gt;</comment>
                            <comment id="237223" author="spiechurski" created="Tue, 20 Nov 2018 00:17:12 +0000"  >&lt;p&gt;Hi Yang Sheng,&lt;/p&gt;

&lt;p&gt;I have just uploaded the corresponding lustre and kernel debuginfo packages to the same directory.&lt;/p&gt;

&lt;p&gt;Regards,&lt;/p&gt;

&lt;p&gt;&#160;&lt;/p&gt;

&lt;p&gt;Sebastien.&lt;/p&gt;</comment>
                            <comment id="246424" author="raj gautam" created="Sat, 27 Apr 2019 10:28:00 +0000"  >&lt;p&gt;Also seen in server running&#160; lustre version 2.11.0.201 and kernel version 3.10.0-693.21.1.x3.1.11.x86_64&#160;&lt;/p&gt;

&lt;p&gt;Apr 25 11:49:02 hostname-n03 kernel: Pid: 26722, comm: mdt03_004&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: IEC: 026000003: LASSERT: { &quot;pid&quot;: &quot;26722&quot;, &quot;ext_pid&quot;: &quot;0&quot;, &quot;filename&quot;: &quot;osd_handler.c&quot;, &quot;line&quot;: &quot;2382&quot;, &quot;func_name&quot;: &quot;osd_read_lock&quot;, &quot;assert_info&quot;: &quot;( obj-&amp;gt;oo_owner == ((void *)0) ) failed: &quot; }&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: IEC: 026000004: LBUG: { &quot;pid&quot;: &quot;26722&quot;, &quot;ext_pid&quot;: &quot;0&quot;, &quot;filename&quot;: &quot;osd_handler.c&quot;, &quot;line&quot;: &quot;2382&quot;, &quot;func_name&quot;: &quot;osd_read_lock&quot; }&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel:&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0a407ae&amp;gt;&amp;#93;&lt;/span&gt; libcfs_call_trace+0x4e/0x60 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0a4083c&amp;gt;&amp;#93;&lt;/span&gt; lbug_with_loc+0x4c/0xb0 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc12d4800&amp;gt;&amp;#93;&lt;/span&gt; ? mdd_xattr_get+0x0/0x5c0 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdd&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc15569ca&amp;gt;&amp;#93;&lt;/span&gt; osd_read_lock+0xda/0xe0 &lt;span class=&quot;error&quot;&gt;&amp;#91;osd_ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc141256a&amp;gt;&amp;#93;&lt;/span&gt; lod_read_lock+0x3a/0xd0 &lt;span class=&quot;error&quot;&gt;&amp;#91;lod&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc12ce82a&amp;gt;&amp;#93;&lt;/span&gt; mdd_read_lock+0x3a/0xd0 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdd&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc12d4870&amp;gt;&amp;#93;&lt;/span&gt; mdd_xattr_get+0x70/0x5c0 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdd&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc1413b0b&amp;gt;&amp;#93;&lt;/span&gt; ? lod_attr_get+0xab/0x130 &lt;span class=&quot;error&quot;&gt;&amp;#91;lod&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc1370490&amp;gt;&amp;#93;&lt;/span&gt; mdt_get_som+0x90/0x210 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc133ea95&amp;gt;&amp;#93;&lt;/span&gt; mdt_attr_get_complex+0x955/0xb10 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc1368248&amp;gt;&amp;#93;&lt;/span&gt; mdt_reint_open+0x898/0x3190 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0c41261&amp;gt;&amp;#93;&lt;/span&gt; ? upcall_cache_get_entry+0x211/0x8d0 &lt;span class=&quot;error&quot;&gt;&amp;#91;obdclass&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0c46f0e&amp;gt;&amp;#93;&lt;/span&gt; ? lu_ucred+0x1e/0x30 &lt;span class=&quot;error&quot;&gt;&amp;#91;obdclass&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc134ccc5&amp;gt;&amp;#93;&lt;/span&gt; ? mdt_ucred+0x15/0x20 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc134d551&amp;gt;&amp;#93;&lt;/span&gt; ? mdt_root_squash+0x21/0x430 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc135dd93&amp;gt;&amp;#93;&lt;/span&gt; mdt_reint_rec+0x83/0x210 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc133d1bb&amp;gt;&amp;#93;&lt;/span&gt; mdt_reint_internal+0x5fb/0x9c0 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc1349737&amp;gt;&amp;#93;&lt;/span&gt; mdt_intent_reint+0x157/0x420 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:02 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc1340315&amp;gt;&amp;#93;&lt;/span&gt; mdt_intent_opc+0x455/0xae0 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e69d10&amp;gt;&amp;#93;&lt;/span&gt; ? lustre_swab_ldlm_intent+0x0/0x20 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc1347f63&amp;gt;&amp;#93;&lt;/span&gt; mdt_intent_policy+0x1a3/0x360 &lt;span class=&quot;error&quot;&gt;&amp;#91;mdt&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e1af9e&amp;gt;&amp;#93;&lt;/span&gt; ldlm_lock_enqueue+0x34e/0xa50 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0a5674e&amp;gt;&amp;#93;&lt;/span&gt; ? cfs_hash_add+0xbe/0x1a0 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e43843&amp;gt;&amp;#93;&lt;/span&gt; ldlm_handle_enqueue0+0x8f3/0x13e0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e69d90&amp;gt;&amp;#93;&lt;/span&gt; ? lustre_swab_ldlm_request+0x0/0x30 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0ec8572&amp;gt;&amp;#93;&lt;/span&gt; tgt_enqueue+0x62/0x210 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0ece8ba&amp;gt;&amp;#93;&lt;/span&gt; tgt_request_handle+0x92a/0x13b0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e73f13&amp;gt;&amp;#93;&lt;/span&gt; ptlrpc_server_handle_request+0x253/0xab0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e71aa5&amp;gt;&amp;#93;&lt;/span&gt; ? ptlrpc_wait_event+0xa5/0x360 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff810c7c92&amp;gt;&amp;#93;&lt;/span&gt; ? default_wake_function+0x12/0x20&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff810bdc4b&amp;gt;&amp;#93;&lt;/span&gt; ? __wake_up_common+0x5b/0x90&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e77862&amp;gt;&amp;#93;&lt;/span&gt; ptlrpc_main+0xab2/0x1f70 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffc0e76db0&amp;gt;&amp;#93;&lt;/span&gt; ? ptlrpc_main+0x0/0x1f70 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff810b4031&amp;gt;&amp;#93;&lt;/span&gt; kthread+0xd1/0xe0&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff810b3f60&amp;gt;&amp;#93;&lt;/span&gt; ? kthread+0x0/0xe0&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff816c155d&amp;gt;&amp;#93;&lt;/span&gt; ret_from_fork+0x5d/0xb0&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff810b3f60&amp;gt;&amp;#93;&lt;/span&gt; ? kthread+0x0/0xe0&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel:&lt;br/&gt;
 Apr 25 11:49:03 hostname-n03 kernel: Kernel panic - not syncing: LBUG&lt;/p&gt;</comment>
                            <comment id="247969" author="panda" created="Wed, 29 May 2019 16:05:46 +0000"  >&lt;p&gt;We, at Cray, encountered a bunch of similar crashes which we associated with the broken rwsem implementation in certain RHEL7 kernels.&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://access.redhat.com/solutions/3393611&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://access.redhat.com/solutions/3393611&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;Resolution&lt;br/&gt;
Red Hat Enterprise Linux 7.6&lt;br/&gt;
The issue was fixed in kernel-3.10.0-957.12.1.el7 from Errata RHSA-2019:0818&lt;br/&gt;
Red Hat Enterprise Linux 7.4.z (EUS)&lt;br/&gt;
The issue was fixed in kernel-3.10.0-693.47.2.el7 from Errata RHSA-2019:1170&lt;/p&gt;</comment>
                            <comment id="247972" author="ys" created="Wed, 29 May 2019 16:36:48 +0000"  >&lt;p&gt;Hi, Andrew,&lt;/p&gt;

&lt;p&gt;Thanks for the info. It is really a tricky one.&lt;/p&gt;

&lt;p&gt;Thanks,&lt;br/&gt;
YangSheng&lt;/p&gt;</comment>
                            <comment id="249008" author="pjones" created="Tue, 11 Jun 2019 15:37:43 +0000"  >&lt;p&gt;Red Hat bug not Lustre bug&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                                                <inwardlinks description="is duplicated by">
                                        <issuelink>
            <issuekey id="54309">LU-11786</issuekey>
        </issuelink>
            <issuelink>
            <issuekey id="56271">LU-12508</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="66806">LU-15156</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzzsxj:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                                                                                </customfields>
    </item>
</channel>
</rss>