<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:25:31 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-9360]  lustre-initialization-1 failed, MDS crash</title>
                <link>https://jira.whamcloud.com/browse/LU-9360</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;ldiskfs: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/3158fa00-d534-4820-84a0-ab9e2f179306&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/3158fa00-d534-4820-84a0-ab9e2f179306&lt;/a&gt;&lt;br/&gt;
zfs: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/be2a47a0-e2ef-485e-952e-d11206fe309c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/be2a47a0-e2ef-485e-952e-d11206fe309c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The latest master build #3558,  el6.8 server/client, MDS crash during the initialization for ldiskfs and zfs&lt;/p&gt;

&lt;p&gt;MDS console&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;01:48:26:Lustre: DEBUG MARKER: /usr/sbin/lctl conf_param lustre.sys.jobid_var=&apos;procname_uid&apos;
01:48:26:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n osd-ldiskfs.lustre-MDT0000.quota_slave.enabled
01:48:26:Lustre: DEBUG MARKER: /usr/sbin/lctl conf_param lustre.quota.mdt=ug3
01:48:26:Lustre: DEBUG MARKER: /usr/sbin/lctl conf_param lustre.quota.ost=ug3
01:48:26:BUG: unable to handle kernel NULL pointer dereference at (null)
01:48:26:IP: [&amp;lt;ffffffffa041fca9&amp;gt;] cfs_hash_lookup+0x29/0xa0 [libcfs]
01:48:26:PGD 0 
01:48:26:Oops: 0000 [#1] SMP 
01:48:26:last sysfs file: /sys/devices/system/cpu/online
01:48:26:CPU 0 
01:48:26:Modules linked in: osp(U) mdd(U) lod(U) mdt(U) lfsck(U) mgs(U) mgc(U) osd_ldiskfs(U) lquota(U) lustre(U) lov(U) mdc(U) fid(U) lmv(U) fld(U) ksocklnd(U) ptlrpc(U) obdclass(U) lnet(U) ldiskfs(U) jbd2 crc32c_intel libcfs(U) nfsd exportfs autofs4 nfs lockd fscache auth_rpcgss nfs_acl sunrpc ib_ipoib rdma_ucm ib_ucm ib_uverbs ib_umad rdma_cm ib_cm iw_cm ib_sa ib_mad ib_core ib_addr ipv6 microcode virtio_balloon 8139too 8139cp mii i2c_piix4 i2c_core ext3 jbd mbcache virtio_blk virtio_pci virtio_ring virtio pata_acpi ata_generic ata_piix dm_mirror dm_region_hash dm_log dm_mod [last unloaded: speedstep_lib]
01:48:26:
01:48:26:Pid: 4252, comm: mdt00_002 Not tainted 2.6.32-642.13.1.el6_lustre.x86_64 #1 Red Hat KVM
01:48:26:RIP: 0010:[&amp;lt;ffffffffa041fca9&amp;gt;]  [&amp;lt;ffffffffa041fca9&amp;gt;] cfs_hash_lookup+0x29/0xa0 [libcfs]
01:48:26:RSP: 0018:ffff880059fa7b00  EFLAGS: 00010246
01:48:26:RAX: 0000000000000000 RBX: ffff880041ffe490 RCX: 0000000000000000
01:48:26:RDX: ffff880066263e20 RSI: 0000000000000000 RDI: ffff880041ffe490
01:48:26:RBP: ffff880059fa7b40 R08: 0000000000000003 R09: ffff880066263e20
01:48:26:R10: ffff880045058000 R11: 0000000000000400 R12: ffff880059fa7b00
01:48:26:R13: ffff880066263e20 R14: ffff880066263e20 R15: ffff880066270bc0
01:48:26:FS:  0000000000000000(0000) GS:ffff880002200000(0000) knlGS:0000000000000000
01:48:26:CS:  0010 DS: 0018 ES: 0018 CR0: 000000008005003b
01:48:26:CR2: 0000000000000000 CR3: 0000000037d9f000 CR4: 00000000000406f0
01:48:26:DR0: 0000000000000000 DR1: 0000000000000000 DR2: 0000000000000000
01:48:26:DR3: 0000000000000000 DR6: 00000000ffff0ff0 DR7: 0000000000000400
01:48:26:Process mdt00_002 (pid: 4252, threadinfo ffff880059fa4000, task ffff880059fc6ab0)
01:48:26:Stack:
01:48:26: ffff880065eb1b40 0000000a00000000 0000000000000000 0000000000000000
01:48:26:&amp;lt;d&amp;gt; ffff880059fa7b60 ffff880065f3cb00 ffff880066263e20 ffff880067fdfdc0
01:48:26:&amp;lt;d&amp;gt; ffff880059fa7b80 ffffffffa0c8dc18 0000000000000150 ffff880065f3cb00
01:48:26:Call Trace:
01:48:26: [&amp;lt;ffffffffa0c8dc18&amp;gt;] lqe_locate+0x48/0x7b0 [lquota]
01:48:26: [&amp;lt;ffffffffa0caf07b&amp;gt;] qmt_pool_lqe_lookup+0x1ab/0x260 [lquota]
01:48:26: [&amp;lt;ffffffffa0ca5747&amp;gt;] qmt_set.clone.0+0x67/0x700 [lquota]
01:48:26: [&amp;lt;ffffffffa080e06b&amp;gt;] ? lustre_pack_reply_v2+0x1eb/0x280 [ptlrpc]
01:48:26: [&amp;lt;ffffffffa080ce15&amp;gt;] ? lustre_msg_buf+0x55/0x60 [ptlrpc]
01:48:26: [&amp;lt;ffffffffa0834732&amp;gt;] ? __req_capsule_get+0x162/0x6e0 [ptlrpc]
01:48:26: [&amp;lt;ffffffffa0ca6270&amp;gt;] qmt_quotactl+0x490/0x5b0 [lquota]
01:48:26: [&amp;lt;ffffffffa0eeb9b1&amp;gt;] mdt_quotactl+0x611/0x780 [mdt]
01:48:26: [&amp;lt;ffffffffa0874a3c&amp;gt;] tgt_request_handle+0x8ec/0x1440 [ptlrpc]
01:48:26: [&amp;lt;ffffffffa081d83b&amp;gt;] ptlrpc_server_handle_request+0x2eb/0xbd0 [ptlrpc]
01:48:26: [&amp;lt;ffffffffa0818639&amp;gt;] ? ptlrpc_wait_event+0xa9/0x2e0 [ptlrpc]
01:48:26: [&amp;lt;ffffffffa081ebe1&amp;gt;] ptlrpc_main+0xac1/0x18d0 [ptlrpc]
01:48:26: [&amp;lt;ffffffffa081e120&amp;gt;] ? ptlrpc_main+0x0/0x18d0 [ptlrpc]
01:48:26: [&amp;lt;ffffffff810a640e&amp;gt;] kthread+0x9e/0xc0
01:48:26: [&amp;lt;ffffffff8100c28a&amp;gt;] child_rip+0xa/0x20
01:48:26: [&amp;lt;ffffffff810a6370&amp;gt;] ? kthread+0x0/0xc0
01:48:26: [&amp;lt;ffffffff8100c280&amp;gt;] ? child_rip+0x0/0x20
01:48:26:Code: 00 00 55 48 89 e5 48 83 ec 40 48 89 5d e8 4c 89 65 f0 4c 89 6d f8 0f 1f 44 00 00 48 8b 47 10 4c 8d 65 c0 48 89 fb 49 89 f5 31 f6 &amp;lt;ff&amp;gt; 10 4c 89 ee 4c 89 e2 48 89 df e8 77 e8 ff ff 31 d2 4c 89 e6 
01:48:26:RIP  [&amp;lt;ffffffffa041fca9&amp;gt;] cfs_hash_lookup+0x29/0xa0 [libcfs]
01:48:26: RSP &amp;lt;ffff880059fa7b00&amp;gt;
01:48:26:CR2: 0000000000000000
01:48:26:Initializing cgroup subsys cpuset
01:48:26:Initializing cgroup subsys cpu
01:48:26:Linux version 2.6.32-642.13.1.el6_lustre.x86_64 (jenkins@trevis-306-el6-x8664-1.trevis.hpdd.intel.com) (gcc version 4.4.7 20120313 (Red Hat 4.4.7-17) (GCC) ) #1 SMP Wed Apr 5 06:19:32 UTC 2017
01:48:26:Command line: ro root=UUID=48d1ab93-ef14-4cf3-888d-bcd55104d5e8 rd_NO_LUKS rd_NO_LVM LANG=en_US.UTF-8 rd_NO_MD console=tty0 SYSFONT=latarcyrheb-sun16 KEYBOARDTYPE=pc KEYTABLE=us rd_NO_DM console=ttyS0,115200 irqpoll nr_cpus=1 reset_devices cgroup_disable=memory mce=off acpi_no_memhotplug disable_cpu_apicid=0 memmap=exactmap memmap=627K@4K memmap=131449K@49779K elfcorehdr=181228K memmap=4K$0K memmap=9K$631K memmap=64K$960K memmap=12K$2097140K memmap=272K$4194032K
01:48:26:KERNEL supported cpus:
01:48:26:  Intel GenuineIntel
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>master build #3558,  el6.8 server/client&lt;br/&gt;
</environment>
        <key id="45561">LU-9360</key>
            <summary> lustre-initialization-1 failed, MDS crash</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="sarah">Sarah Liu</reporter>
                        <labels>
                    </labels>
                <created>Tue, 18 Apr 2017 17:50:25 +0000</created>
                <updated>Tue, 25 Apr 2017 08:23:46 +0000</updated>
                            <resolved>Tue, 25 Apr 2017 08:23:46 +0000</resolved>
                                    <version>Lustre 2.10.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>4</watches>
                                                                            <comments>
                            <comment id="193275" author="adilger" created="Mon, 24 Apr 2017 20:14:41 +0000"  >&lt;p&gt;This looks like it is quota related.&lt;/p&gt;</comment>
                            <comment id="193331" author="wangshilong" created="Tue, 25 Apr 2017 03:34:22 +0000"  >&lt;p&gt;I think the problem has been fixed by:&lt;br/&gt;
 &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-9339&quot; title=&quot;fix RHEL 7.2 project quota build error&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-9339&quot;&gt;&lt;del&gt;LU-9339&lt;/del&gt;&lt;/a&gt; quota: Replace MAXQUOTAS usage with LL_MAXQUOTAS&lt;/p&gt;</comment>
                            <comment id="193346" author="adilger" created="Tue, 25 Apr 2017 08:22:35 +0000"  >&lt;p&gt;The tests that have failed with this bug are:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/145ed7bc-23e2-11e7-9de9-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/145ed7bc-23e2-11e7-9de9-5254006e85c2&lt;/a&gt;  e9b0c8b55c100e1&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/3b685118-2472-11e7-b742-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/3b685118-2472-11e7-b742-5254006e85c2&lt;/a&gt;  e9b0c8b55c100e1&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/7da97e64-2452-11e7-b742-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/7da97e64-2452-11e7-b742-5254006e85c2&lt;/a&gt;  e9b0c8b55c100e1&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/f1b0561c-23fc-11e7-b742-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/f1b0561c-23fc-11e7-b742-5254006e85c2&lt;/a&gt;  e9b0c8b55c100e1&lt;/p&gt;

&lt;p&gt;Which are all for the same commit, which is one patch before the &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-9339&quot; title=&quot;fix RHEL 7.2 project quota build error&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-9339&quot;&gt;&lt;del&gt;LU-9339&lt;/del&gt;&lt;/a&gt; fix.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                            <outwardlinks description="duplicates">
                                        <issuelink>
            <issuekey id="45496">LU-9339</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzzaj3:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>