<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:59:47 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-13262] &quot;(lu_ref.c:257:lu_ref_del()) ASSERTION( 0 ) failed&quot; triggered by jbd2/ldlm tasks, with recent master configured with USE_LU_REF defined </title>
                <link>https://jira.whamcloud.com/browse/LU-13262</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This problem has been discovered when trying to run with recent master configured with USE_LU_REF defined (&quot;configure --enable-lu_ref&quot;).&lt;/p&gt;

&lt;p&gt;After re-starting/mounting this specific version with an existing FileSystem, I have tried to clean-up its content using a &quot;rm -rf /mnt/lustre/*&quot; command and have experienced the following LBUG :&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;crash&amp;gt; dmesg | less
.......................................
[84168.041219] Lustre: lustre-OST0001: Connection restored to 73f99590-7799-4 (at 10.8.1.5@tcp)
[84168.053607] Lustre: Skipped 4 previous similar messages
[84247.829852] LustreError: 94510:0:(lu_ref.c:96:lu_ref_print()) lu_ref: ffff8dc74be5c5a0 1 0 ldlm_lock_new:495
[84247.843091] LustreError: 94510:0:(lu_ref.c:98:lu_ref_print())      link: handle ffff8dc7abb78000
[84247.881978] LustreError: 94510:0:(lu_ref.c:257:lu_ref_del()) ASSERTION( 0 ) failed: 
[84247.892251] LustreError: 94510:0:(lu_ref.c:257:lu_ref_del()) LBUG
[84247.900585] Pid: 94510, comm: jbd2/dm-0-8 3.10.0-862.14.4.el7_lustre_ClientSymlink_279c264.x86_64 #1 SMP Thu Oct 17 10:54:24 UTC 2019
[84247.917060] Call Trace:
[84247.921267]  [&amp;lt;ffffffffc0d4f0ec&amp;gt;] libcfs_call_trace+0x8c/0xc0 [libcfs]
[84247.930052]  [&amp;lt;ffffffffc0d4f19c&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[84247.938357]  [&amp;lt;ffffffffc0df9ab0&amp;gt;] lu_ref_set_at+0x0/0x160 [obdclass]
[84247.946908]  [&amp;lt;ffffffffc10d5373&amp;gt;] tgt_cancel_slc_locks+0x183/0x1f0 [ptlrpc]
[84247.956058]  [&amp;lt;ffffffffc10d733f&amp;gt;] tgt_cb_last_committed+0x2cf/0x3d0 [ptlrpc]
[84247.965195]  [&amp;lt;ffffffffc1438a43&amp;gt;] osd_trans_commit_cb+0xd3/0x3c0 [osd_ldiskfs]
[84247.974493]  [&amp;lt;ffffffffc13cbf23&amp;gt;] ldiskfs_journal_commit_callback+0x93/0xd0 [ldiskfs]
[84247.984413]  [&amp;lt;ffffffffc03a36a5&amp;gt;] jbd2_journal_commit_transaction+0x1635/0x19b0 [jbd2]
[84247.994415]  [&amp;lt;ffffffffc03a8ab9&amp;gt;] kjournald2+0xc9/0x260 [jbd2]
[84248.002069]  [&amp;lt;ffffffffb02bdf21&amp;gt;] kthread+0xd1/0xe0
[84248.008622]  [&amp;lt;ffffffffb09255f7&amp;gt;] ret_from_fork_nospec_end+0x0/0x39
[84248.016717]  [&amp;lt;ffffffffffffffff&amp;gt;] 0xffffffffffffffff
[84248.023336] Kernel panic - not syncing: LBUG
[84248.029116] CPU: 23 PID: 94510 Comm: jbd2/dm-0-8 Kdump: loaded Tainted: G          IOE  ------------   3.10.0-862.14.4.el7_lustre_ClientSymlink_279c264.x86_64 #1
[84248.047299] Hardware name: Intel Corporation S2600WTT/S2600WTT, BIOS SE5C610.86B.01.01.0008.021120151325 02/11/2015
[84248.060028] Call Trace:
[84248.063811]  [&amp;lt;ffffffffb0913754&amp;gt;] dump_stack+0x19/0x1b
[84248.070599]  [&amp;lt;ffffffffb090d29f&amp;gt;] panic+0xe8/0x21f
[84248.076984]  [&amp;lt;ffffffffc0d4f1eb&amp;gt;] lbug_with_loc+0x9b/0xa0 [libcfs]
[84248.084924]  [&amp;lt;ffffffffc0df9ab0&amp;gt;] lu_ref_del+0x230/0x230 [obdclass]
[84248.092967]  [&amp;lt;ffffffffc10d5373&amp;gt;] tgt_cancel_slc_locks+0x183/0x1f0 [ptlrpc]
[84248.101777]  [&amp;lt;ffffffffc10d733f&amp;gt;] tgt_cb_last_committed+0x2cf/0x3d0 [ptlrpc]
[84248.110644]  [&amp;lt;ffffffffc1438a43&amp;gt;] osd_trans_commit_cb+0xd3/0x3c0 [osd_ldiskfs]
[84248.119399] LustreError: 94525:0:(lu_ref.c:257:lu_ref_del()) ASSERTION( 0 ) failed: 
[84248.119402] LustreError: 94525:0:(lu_ref.c:257:lu_ref_del()) LBUG
[84248.119405] Pid: 94525, comm: ldlm_bl_02 3.10.0-862.14.4.el7_lustre_ClientSymlink_279c264.x86_64 #1 SMP Thu Oct 17 10:54:24 UTC 2019
[84248.119406] Call Trace:
[84248.119442]  [&amp;lt;ffffffffc0d4f0ec&amp;gt;] libcfs_call_trace+0x8c/0xc0 [libcfs]
[84248.119453]  [&amp;lt;ffffffffc0d4f19c&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[84248.119513]  [&amp;lt;ffffffffc0df9ab0&amp;gt;] lu_ref_set_at+0x0/0x160 [obdclass]
[84248.119614]  [&amp;lt;ffffffffc10d308c&amp;gt;] tgt_discard_slc_lock+0x8c/0x120 [ptlrpc]
[84248.119642]  [&amp;lt;ffffffffc15c4ac7&amp;gt;] mdt_remote_blocking_ast+0x97/0x5a0 [mdt]
[84248.119695]  [&amp;lt;ffffffffc102efaa&amp;gt;] ldlm_cancel_callback+0x8a/0x2e0 [ptlrpc]
[84248.119754]  [&amp;lt;ffffffffc1046891&amp;gt;] ldlm_cli_cancel_local+0xd1/0x420 [ptlrpc]
[84248.119811]  [&amp;lt;ffffffffc104c76c&amp;gt;] ldlm_cli_cancel+0x12c/0x560 [ptlrpc]
[84248.119838]  [&amp;lt;ffffffffc15c4bb7&amp;gt;] mdt_remote_blocking_ast+0x187/0x5a0 [mdt]
[84248.119896]  [&amp;lt;ffffffffc1058668&amp;gt;] ldlm_handle_bl_callback+0xc8/0x3e0 [ptlrpc]
[84248.119953]  [&amp;lt;ffffffffc105911d&amp;gt;] ldlm_bl_thread_main+0x79d/0x9c0 [ptlrpc]
[84248.119959]  [&amp;lt;ffffffffb02bdf21&amp;gt;] kthread+0xd1/0xe0
[84248.119965]  [&amp;lt;ffffffffb09255f7&amp;gt;] ret_from_fork_nospec_end+0x0/0x39
[84248.119997]  [&amp;lt;ffffffffffffffff&amp;gt;] 0xffffffffffffffff
[84248.269937]  [&amp;lt;ffffffffc13cbf23&amp;gt;] ldiskfs_journal_commit_callback+0x93/0xd0 [ldiskfs]
[84248.279610]  [&amp;lt;ffffffffc03a36a5&amp;gt;] jbd2_journal_commit_transaction+0x1635/0x19b0 [jbd2]
[84248.289367]  [&amp;lt;ffffffffc03a8ab9&amp;gt;] kjournald2+0xc9/0x260 [jbd2]
[84248.296764]  [&amp;lt;ffffffffb02bef10&amp;gt;] ? wake_up_atomic_t+0x30/0x30
[84248.304160]  [&amp;lt;ffffffffc03a89f0&amp;gt;] ? commit_timeout+0x10/0x10 [jbd2]
[84248.312030]  [&amp;lt;ffffffffb02bdf21&amp;gt;] kthread+0xd1/0xe0
[84248.318355]  [&amp;lt;ffffffffb02bde50&amp;gt;] ? insert_kthread_work+0x40/0x40
[84248.326046]  [&amp;lt;ffffffffb09255f7&amp;gt;] ret_from_fork_nospec_begin+0x21/0x21
[84248.334218]  [&amp;lt;ffffffffb02bde50&amp;gt;] ? insert_kthread_work+0x40/0x40
(END)
crash&amp;gt; 
crash&amp;gt; bt
PID: 94510  TASK: ffff8dc8d9761fa0  CPU: 23  COMMAND: &quot;jbd2/dm-0-8&quot;
 #0 [ffff8dc743b17998] machine_kexec at ffffffffb0262a0a
 #1 [ffff8dc743b179f8] __crash_kexec at ffffffffb03166c2
 #2 [ffff8dc743b17ac8] panic at ffffffffb090d2aa
 #3 [ffff8dc743b17b48] lbug_with_loc at ffffffffc0d4f1eb [libcfs]
 #4 [ffff8dc743b17ba0] tgt_cancel_slc_locks at ffffffffc10d5373 [ptlrpc]
 #5 [ffff8dc743b17c08] tgt_cb_last_committed at ffffffffc10d733f [ptlrpc]
 #6 [ffff8dc743b17c20] osd_trans_commit_cb at ffffffffc1438a43 [osd_ldiskfs]
 #7 [ffff8dc743b17c68] ldiskfs_journal_commit_callback at ffffffffc13cbf23 [ldiskfs]
 #8 [ffff8dc743b17ca8] jbd2_journal_commit_transaction at ffffffffc03a36a5 [jbd2]
 #9 [ffff8dc743b17e48] kjournald2 at ffffffffc03a8ab9 [jbd2]
#10 [ffff8dc743b17ec8] kthread at ffffffffb02bdf21
crash&amp;gt; 
crash&amp;gt; bt 94525
PID: 94525  TASK: ffff8dc8862e6eb0  CPU: 11  COMMAND: &quot;ldlm_bl_02&quot;
 #0 [ffff8dc885f1bae0] panic at ffffffffb090d223
 #1 [ffff8dc885f1bb60] lbug_with_loc at ffffffffc0d4f1eb [libcfs]
 #2 [ffff8dc885f1bbb8] tgt_discard_slc_lock at ffffffffc10d308c [ptlrpc]
 #3 [ffff8dc885f1bbe0] mdt_remote_blocking_ast at ffffffffc15c4ac7 [mdt]
 #4 [ffff8dc885f1bc60] ldlm_cancel_callback at ffffffffc102efaa [ptlrpc]
 #5 [ffff8dc885f1bcd0] ldlm_cli_cancel_local at ffffffffc1046891 [ptlrpc]
 #6 [ffff8dc885f1bcf8] ldlm_cli_cancel at ffffffffc104c76c [ptlrpc]
 #7 [ffff8dc885f1bd78] mdt_remote_blocking_ast at ffffffffc15c4bb7 [mdt]
 #8 [ffff8dc885f1bdf8] ldlm_handle_bl_callback at ffffffffc1058668 [ptlrpc]
 #9 [ffff8dc885f1be28] ldlm_bl_thread_main at ffffffffc105911d [ptlrpc]
#10 [ffff8dc885f1bec8] kthread at ffffffffb02bdf21
crash&amp;gt; 
crash&amp;gt; lu_ref_link 0xffff8dc74be64398
struct lu_ref_link {
  ll_ref = 0xffff8dc74be5c5a0, 
  ll_linkage = {
    next = 0xffff8dc74be5c5a8, 
    prev = 0xffff8dc74be5c5a8
  }, 
  ll_scope = 0xffffffffc1134dc0 &quot;handle&quot;, 
  ll_source = 0xffff8dc7abb78000
}
crash&amp;gt; 
crash&amp;gt; bt ffff8dc7abb78000
PID: 95382  TASK: ffff8dc7abb78000  CPU: 26  COMMAND: &quot;mdt01_008&quot;
 #0 [ffff8dc72568bcb8] __schedule at ffffffffb09188c4
 #1 [ffff8dc72568bd40] schedule at ffffffffb0918f39
 #2 [ffff8dc72568bd50] ptlrpc_wait_event at ffffffffc1086c45 [ptlrpc]
 #3 [ffff8dc72568bdf0] ptlrpc_main at ffffffffc108ed6d [ptlrpc]
 #4 [ffff8dc72568bec8] kthread at ffffffffb02bdf21
crash&amp;gt; 
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;Further crash-dump debugging and associated source code browsing points to the fact that lu_ref_del()/REFASSERT() triggers the !(ref-&amp;gt;lf_failed &amp;gt; 0) error/LBUG condition because lu_ref_find() has not been able to find the original lu_ref_link because it has been tagged using the &quot;current&quot; task_struct pointer but it is being deleted within an other jbd2/ldlm task context.&lt;/p&gt;

&lt;p&gt;So a possible fix could be to not use &quot;current&quot; as the tag, in both concerned __ldlm_handle2lock() function and LDLM_LOCK_REF_DEL()macro, but the ldlm_lock address instead... Will push a patch soon to implement this fix and see how it works.&lt;/p&gt;</description>
                <environment></environment>
        <key id="58121">LU-13262</key>
            <summary>&quot;(lu_ref.c:257:lu_ref_del()) ASSERTION( 0 ) failed&quot; triggered by jbd2/ldlm tasks, with recent master configured with USE_LU_REF defined </summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="bruno">Bruno Faccini</assignee>
                                    <reporter username="bruno">Bruno Faccini</reporter>
                        <labels>
                    </labels>
                <created>Wed, 19 Feb 2020 13:24:29 +0000</created>
                <updated>Wed, 27 May 2020 14:56:18 +0000</updated>
                            <resolved>Wed, 27 May 2020 14:56:18 +0000</resolved>
                                                    <fixVersion>Lustre 2.14.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>2</watches>
                                                                            <comments>
                            <comment id="263563" author="gerrit" created="Wed, 19 Feb 2020 13:51:14 +0000"  >&lt;p&gt;Faccini Bruno (bruno.faccini@intel.com) uploaded a new patch: &lt;a href=&quot;https://review.whamcloud.com/37624&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/37624&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-13262&quot; title=&quot;&amp;quot;(lu_ref.c:257:lu_ref_del()) ASSERTION( 0 ) failed&amp;quot; triggered by jbd2/ldlm tasks, with recent master configured with USE_LU_REF defined &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-13262&quot;&gt;&lt;del&gt;LU-13262&lt;/del&gt;&lt;/a&gt; ldlm: no current source if lu_ref_del not in same tsk&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: d6e38b0c63de996e7cb8fc3109c358fa0132eca0&lt;/p&gt;</comment>
                            <comment id="271238" author="gerrit" created="Wed, 27 May 2020 05:04:32 +0000"  >&lt;p&gt;Oleg Drokin (green@whamcloud.com) merged in patch &lt;a href=&quot;https://review.whamcloud.com/37624/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/37624/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-13262&quot; title=&quot;&amp;quot;(lu_ref.c:257:lu_ref_del()) ASSERTION( 0 ) failed&amp;quot; triggered by jbd2/ldlm tasks, with recent master configured with USE_LU_REF defined &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-13262&quot;&gt;&lt;del&gt;LU-13262&lt;/del&gt;&lt;/a&gt; ldlm: no current source if lu_ref_del not in same tsk&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: 419325a2c9a29e9b372d071992cef5ea9def8cc0&lt;/p&gt;</comment>
                            <comment id="271293" author="pjones" created="Wed, 27 May 2020 14:56:18 +0000"  >&lt;p&gt;Landed for 2.14&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i00ty7:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>