<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:15:52 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-8244] BUG: unable to handle kernel paging request at remove_proc_subtree</title>
                <link>https://jira.whamcloud.com/browse/LU-8244</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;I hit this crash twice running Lustre and kernel RPMs from master commit ae6fc0156d1.  It appears to be during the cleanup at the start of conf-sanity.sh.  This is the first I&apos;ve been running any testing in this VM, so it may be caused by some configuration problem.&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[17289.106395] Lustre: DEBUG MARKER: -----============= acceptance-small: conf-sanity ============----- Mon Jun 6 22:44:28 MDT 2016
[17290.211370] Lustre: DEBUG MARKER: excepting tests: 32newtarball 84
[17290.235576] Lustre: DEBUG MARKER: skipping tests SLOW=no: 45 69
[17290.326440] BUG: unable to handle kernel paging request at 000000200000003f
[17290.326703] IP: [&amp;lt;ffffffff8124fd87&amp;gt;] remove_proc_subtree+0x67/0x190
[17290.326870] PGD 0
[17290.327025] Oops: 0000 [#1] SMP
[17290.335098] CPU: 1 PID: 29352 Comm: umount Tainted: P           OE  ------------   3.10.0-327.13.1.el7_lustre.x86_64 #1
[17290.335278] RIP: 0010:[&amp;lt;ffffffff8124fd87&amp;gt;]  [&amp;lt;ffffffff8124fd87&amp;gt;] remove_proc_subtree+0x67/0x190
[17290.336912] Call Trace:
[17290.336987]  [&amp;lt;ffffffff8124fecb&amp;gt;] proc_remove+0x1b/0x20
[17290.337079]  [&amp;lt;ffffffffa0631d65&amp;gt;] lprocfs_remove+0x15/0x20 [obdclass]
[17290.337155]  [&amp;lt;ffffffffa0ff2236&amp;gt;] osp_device_fini+0x186/0x540 [osp]
[17290.337230]  [&amp;lt;ffffffffa064737c&amp;gt;] class_cleanup+0x8dc/0xd70 [obdclass]
[17290.337321]  [&amp;lt;ffffffffa0649d5c&amp;gt;] class_process_config+0x1e2c/0x2f70 [obdclass]
[17290.337542]  [&amp;lt;ffffffffa064af8f&amp;gt;] class_manual_cleanup+0xef/0x810 [obdclass]
[17290.337608]  [&amp;lt;ffffffffa0ff1268&amp;gt;] osp_obd_disconnect+0x178/0x210 [osp]
[17290.337674]  [&amp;lt;ffffffffa0f93ef8&amp;gt;] lod_putref+0x258/0x960 [lod]
[17290.337738]  [&amp;lt;ffffffffa0f9608d&amp;gt;] lod_fini_tgt+0xdd/0x3a0 [lod]
[17290.337802]  [&amp;lt;ffffffffa0f8ac70&amp;gt;] lod_device_fini+0x60/0x1f0 [lod]
[17290.337875]  [&amp;lt;ffffffffa064737c&amp;gt;] class_cleanup+0x8dc/0xd70 [obdclass]
[17290.337949]  [&amp;lt;ffffffffa0649d5c&amp;gt;] class_process_config+0x1e2c/0x2f70 [obdclass]
[17290.338159]  [&amp;lt;ffffffffa064af8f&amp;gt;] class_manual_cleanup+0xef/0x810 [obdclass]
[17290.338224]  [&amp;lt;ffffffffa0f8abff&amp;gt;] lod_obd_disconnect+0x1bf/0x1d0 [lod]
[17290.338289]  [&amp;lt;ffffffffa0e820ba&amp;gt;] mdd_process_config+0x2ba/0x5c0 [mdd]
[17290.338359]  [&amp;lt;ffffffffa0edc4a3&amp;gt;] mdt_stack_fini+0x1b3/0xe00 [mdt]
[17290.338420]  [&amp;lt;ffffffffa0edd816&amp;gt;] mdt_device_fini+0x726/0xfc0 [mdt]
[17290.338496]  [&amp;lt;ffffffffa064737c&amp;gt;] class_cleanup+0x8dc/0xd70 [obdclass]
[17290.338569]  [&amp;lt;ffffffffa0649d5c&amp;gt;] class_process_config+0x1e2c/0x2f70 [obdclass]
[17290.338776]  [&amp;lt;ffffffffa064af8f&amp;gt;] class_manual_cleanup+0xef/0x810 [obdclass]
[17290.338854]  [&amp;lt;ffffffffa067c26e&amp;gt;] server_put_super+0x8de/0xcd0 [obdclass]
[17290.338917]  [&amp;lt;ffffffff811e0ad6&amp;gt;] generic_shutdown_super+0x56/0xe0
[17290.338976]  [&amp;lt;ffffffff811e0eb2&amp;gt;] kill_anon_super+0x12/0x20
[17290.339049]  [&amp;lt;ffffffffa064e6f2&amp;gt;] lustre_kill_super+0x32/0x50 [obdclass]
[17290.339109]  [&amp;lt;ffffffff811e1269&amp;gt;] deactivate_locked_super+0x49/0x60
[17290.339168]  [&amp;lt;ffffffff811e1866&amp;gt;] deactivate_super+0x46/0x60
[17290.339229]  [&amp;lt;ffffffff811fe7d5&amp;gt;] mntput_no_expire+0xc5/0x120
[17290.339289]  [&amp;lt;ffffffff811ff90f&amp;gt;] SyS_umount+0x9f/0x3c0
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>Parallels VM: RHEL7.2 kernel-3.10.0-327.13.1.el7_lustre&lt;br/&gt;
Lustre: 2.8.53_27_gae6fc01-3.10.0-327.13.1.el7_lustre.x86_64</environment>
        <key id="37421">LU-8244</key>
            <summary>BUG: unable to handle kernel paging request at remove_proc_subtree</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="adilger">Andreas Dilger</reporter>
                        <labels>
                    </labels>
                <created>Tue, 7 Jun 2016 05:24:24 +0000</created>
                <updated>Fri, 18 Jun 2021 20:16:16 +0000</updated>
                            <resolved>Fri, 18 Jun 2021 20:16:16 +0000</resolved>
                                    <version>Lustre 2.9.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>1</watches>
                                                                            <comments>
                            <comment id="154865" author="adilger" created="Tue, 7 Jun 2016 05:25:44 +0000"  >&lt;p&gt;&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-4532&quot; title=&quot;Test failure on test suite sanity, subtest test_17n (remove_proc_entry: removing non-empty directory &amp;#39;lustre/osc&amp;#39;, leaking at least &amp;#39;lustre-OST0007-osc-MDT0003&amp;#39;)&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-4532&quot;&gt;&lt;del&gt;LU-4532&lt;/del&gt;&lt;/a&gt; has the same stack.&lt;/p&gt;</comment>
                            <comment id="155034" author="adilger" created="Wed, 8 Jun 2016 04:59:50 +0000"  >&lt;p&gt;I checked the other crash, and it had the same bad pointer:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;BUG: unable to handle kernel paging request at 000000200000003f
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="304908" author="adilger" created="Fri, 18 Jun 2021 20:16:16 +0000"  >&lt;p&gt;Have not seen this since initial reports.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="22858">LU-4532</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzydtj:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>