<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:52:04 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-5507] sanity-quota test_18: Oops: IP: lustre_msg_get_opc+0xe/0x110 [ptlrpc]</title>
                <link>https://jira.whamcloud.com/browse/LU-5507</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;While running sanity-quota test 18, one of the client nodes hit the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[60756.462327] BUG: unable to handle kernel NULL pointer dereference at 0000000000000007^M
[60756.465418] IP: [&amp;lt;ffffffffa088a9d1&amp;gt;] lustre_msg_get_opc+0x1/0x100 [ptlrpc]^M
[60756.466234] PGD 0 ^M
[60756.466234] Oops: 0000 [#1] SMP ^M
[60756.466234] CPU 0 ^M
[60756.466234] Modules linked in: lustre(EN) obdecho(EN) mgc(EN) lov(EN) osc(EN) mdc(EN) lmv(EN) fid(EN) fld(EN) ptlrpc(EN) obdclass(EN) lvfs(EN) ksocklnd(EN) lnet(EN) libcfs(EN) ext2 sha512_generic sha1_generic md5 crc32c nfs lockd fscache auth_rpcgss nfs_acl sunrpc rdma_ucm rdma_cm iw_cm ib_addr ib_srp scsi_transport_srp scsi_tgt ib_ipoib ib_cm ib_uverbs ib_umad iw_cxgb3 cxgb3 mdio mlx4_en mlx4_ib ib_sa mlx4_core ib_mthca ib_mad ib_core mperf loop dm_mod floppy 8139too ipv6 ipv6_lib rtc_cmos pcspkr virtio_balloon i2c_piix4 8139cp mii button ttm drm_kms_helper drm i2c_core sysimgblt sysfillrect syscopyarea uhci_hcd ehci_hcd usbcore usb_common intel_agp intel_gtt scsi_dh_emc scsi_dh_rdac scsi_dh_alua scsi_dh_hp_sw scsi_dh virtio_pci ata_generic virtio_blk virtio virtio_ring ata_piix edd ext3 mbcache jbd fan processor ahci libahci libata scsi_mod thermal thermal_sys hwmon [last unloaded: libcfs]^M
[60756.466234] Supported: No, Unsupported modules are loaded^M
[60756.466234] ^M
[60756.466234] Pid: 12735, comm: ptlrpcd_rcv Tainted: G           EN  3.0.101-0.35-default #1 Red Hat KVM^M
[60756.466234] RIP: 0010:[&amp;lt;ffffffffa088a9d1&amp;gt;]  [&amp;lt;ffffffffa088a9d1&amp;gt;] lustre_msg_get_opc+0x1/0x100 [ptlrpc]^M
[60756.466234] RSP: 0018:ffff880078f3dcb0  EFLAGS: 00010286^M
[60756.466234] RAX: ffff8800201efa08 RBX: 0000000000000000 RCX: 0000000000000002^M
[60756.466234] RDX: 0000000000000002 RSI: 0000000000000000 RDI: ffffffffffffffff^M
[60756.466234] RBP: ffff88006a655500 R08: ffff8800201efa08 R09: 00000000000000d8^M
[60756.466234] R10: 000000000000000a R11: 0000000000000000 R12: ffff88006295c800^M
[60756.466234] R13: ffff8800201efa08 R14: ffff880079dcbee0 R15: ffff88006e9838f0^M
[60756.466234] FS:  0000000000000000(0000) GS:ffff88007fc00000(0000) knlGS:0000000000000000^M
[60756.494980] CS:  0010 DS: 0000 ES: 0000 CR0: 000000008005003b^M
[60756.494980] CR2: 0000000000000007 CR3: 000000007ae8a000 CR4: 00000000000006f0^M
[60756.494980] DR0: 0000000000000000 DR1: 0000000000000000 DR2: 0000000000000000^M
[60756.494980] DR3: 0000000000000000 DR6: 00000000ffff0ff0 DR7: 0000000000000400^M
[60756.494980] Process ptlrpcd_rcv (pid: 12735, threadinfo ffff880078f3c000, task ffff880017dde540)^M
[60756.494980] Stack:^M
[60756.494980]  0000000000000000 ffffffffa099d04b ffff880079dcbc00 000000c10002ee7e^M
[60756.494980]  ffff880079dcbc00 000000c10002ee7e ffff880079dcbc00 ffff8800290c0a88^M
[60756.494980]  ffff8800290c0800 ffffffffa087eb5a 00000000ebc0de01 ffff880079dcbc00^M
[60756.494980] Call Trace:^M
[60756.494980]  [&amp;lt;ffffffffa099d04b&amp;gt;] mdc_replay_open+0xab/0x430 [mdc]^M
[60756.494980]  [&amp;lt;ffffffffa087eb5a&amp;gt;] ptlrpc_replay_interpret+0x14a/0x740 [ptlrpc]^M
[60756.494980]  [&amp;lt;ffffffffa0880452&amp;gt;] ptlrpc_check_set+0x532/0x1b30 [ptlrpc]^M
[60756.494980]  [&amp;lt;ffffffffa08abdcb&amp;gt;] ptlrpcd_check+0x52b/0x550 [ptlrpc]^M
[60756.494980]  [&amp;lt;ffffffffa08ac32b&amp;gt;] ptlrpcd+0x24b/0x3b0 [ptlrpc]^M
[60756.494980]  [&amp;lt;ffffffff810829a6&amp;gt;] kthread+0x96/0xa0^M
[60756.494980]  [&amp;lt;ffffffff8146b164&amp;gt;] kernel_thread_helper+0x4/0x10^M
[60756.494980] Code: 89 44 24 48 48 83 c4 58 4c 89 e0 5b 5d 41 5c 41 5d 41 5e 41 5f c3 45 31 ed e9 fb fe ff ff 66 66 66 2e 0f 1f 84 00 00 00 00 00 53 ^M
[60756.494980]  7f 08 d3 0b d0 0b 48 89 fb 74 73 c7 05 49 0[    0.000000] Initializing cgroup subsys cpuset^M
[    0.000000] Initializing cgroup subsys cpu^M
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Maloo report: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/4f4c437a-268b-11e4-84f2-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/4f4c437a-268b-11e4-84f2-5254006e85c2&lt;/a&gt;&lt;/p&gt;</description>
                <environment>Lustre Build: &lt;a href=&quot;https://build.hpdd.intel.com/job/lustre-b2_5/80/&quot;&gt;https://build.hpdd.intel.com/job/lustre-b2_5/80/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: SLES11SP3/x86_64 (client), RHEL6.5/x86_64 (server)</environment>
        <key id="26074">LU-5507</key>
            <summary>sanity-quota test_18: Oops: IP: lustre_msg_get_opc+0xe/0x110 [ptlrpc]</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="1" iconUrl="https://jira.whamcloud.com/images/icons/priorities/blocker.svg">Blocker</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="niu">Niu Yawei</assignee>
                                    <reporter username="yujian">Jian Yu</reporter>
                        <labels>
                    </labels>
                <created>Wed, 20 Aug 2014 06:38:28 +0000</created>
                <updated>Tue, 9 Jun 2015 17:17:24 +0000</updated>
                            <resolved>Mon, 5 Jan 2015 03:50:23 +0000</resolved>
                                    <version>Lustre 2.5.3</version>
                                    <fixVersion>Lustre 2.7.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>7</watches>
                                                                            <comments>
                            <comment id="92013" author="yujian" created="Wed, 20 Aug 2014 07:27:31 +0000"  >&lt;p&gt;Lustre client build: &lt;a href=&quot;https://build.hpdd.intel.com/job/lustre-b2_5/80/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://build.hpdd.intel.com/job/lustre-b2_5/80/&lt;/a&gt;&lt;br/&gt;
Lustre server build: &lt;a href=&quot;https://build.hpdd.intel.com/job/lustre-b2_4/73/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://build.hpdd.intel.com/job/lustre-b2_4/73/&lt;/a&gt; (2.4.3)&lt;br/&gt;
Distro/Arch: RHEL6.5/x86_64&lt;/p&gt;

&lt;p&gt;The same failure occurred: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/ea35137e-266f-11e4-8ee8-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/ea35137e-266f-11e4-8ee8-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="92114" author="yujian" created="Thu, 21 Aug 2014 07:11:06 +0000"  >&lt;p&gt;So far, the failure has not occurred in Lustre b2_5 build #82 and #83.&lt;/p&gt;</comment>
                            <comment id="92899" author="yujian" created="Sun, 31 Aug 2014 07:51:09 +0000"  >&lt;p&gt;Lustre Build: &lt;a href=&quot;https://build.hpdd.intel.com/job/lustre-b2_5/86/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://build.hpdd.intel.com/job/lustre-b2_5/86/&lt;/a&gt; (2.5.3 RC1)&lt;/p&gt;

&lt;p&gt;The same failure occurred: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/651d9592-30da-11e4-b503-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/651d9592-30da-11e4-b503-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="98264" author="pjones" created="Tue, 4 Nov 2014 04:12:31 +0000"  >&lt;p&gt;This seems to occur sometimes. Any idea why?&lt;/p&gt;</comment>
                            <comment id="98850" author="niu" created="Tue, 11 Nov 2014 02:33:23 +0000"  >&lt;p&gt;Seems it&apos;s a race of close vs. open replay, that&apos;s introduced in the fix of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-2613&quot; title=&quot;opening and closing file can generate &amp;#39;unreclaimable slab&amp;#39; space&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-2613&quot;&gt;&lt;del&gt;LU-2613&lt;/del&gt;&lt;/a&gt; (4322e0f9): To free the queued open &amp;amp; close requests promptly, we free them on file close, however, replay open may jump in at this time to fix the stale open handle on the open &amp;amp; close requests. I&apos;m going to post a patch soon.&lt;/p&gt;</comment>
                            <comment id="98855" author="niu" created="Tue, 11 Nov 2014 05:57:22 +0000"  >&lt;p&gt;&lt;a href=&quot;http://review.whamcloud.com/12667&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/12667&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="100532" author="gerrit" created="Wed, 3 Dec 2014 02:25:12 +0000"  >&lt;p&gt;Oleg Drokin (oleg.drokin@intel.com) merged in patch &lt;a href=&quot;http://review.whamcloud.com/12667/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/12667/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5507&quot; title=&quot;sanity-quota test_18: Oops: IP: lustre_msg_get_opc+0xe/0x110 [ptlrpc]&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5507&quot;&gt;&lt;del&gt;LU-5507&lt;/del&gt;&lt;/a&gt; recovery: don&apos;t replay closed open&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: cfbfcc6ad9ebb5893be2d1e85fc959794fd914ed&lt;/p&gt;</comment>
                            <comment id="102516" author="niu" created="Mon, 5 Jan 2015 03:50:23 +0000"  >&lt;p&gt;patch landed on master.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                                        </outwardlinks>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="25083">LU-5169</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzwu2v:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>15364</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>