<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 03:15:15 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-15078] sanity-sec test_26: nodemap_config_set_active()) ASSERTION( config-&gt;nmc_default_nodemap ) failed</title>
                <link>https://jira.whamcloud.com/browse/LU-15078</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;div class=&quot;code panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;codeContent panelContent&quot;&gt;
&lt;pre class=&quot;code-java&quot;&gt;
[  394.285494] Lustre: DEBUG MARKER: == sanity-sec test 26: test transferring very large nodemap ========================================================== 17:02:01 (1633539721)
[ 1774.903911] LustreError: 11647:0:(nodemap_handler.c:1695:nodemap_config_set_active()) ASSERTION( config-&amp;gt;nmc_default_nodemap ) failed: 
[ 1774.908810] LustreError: 11647:0:(nodemap_handler.c:1695:nodemap_config_set_active()) LBUG
[ 1774.911038] Pid: 11647, comm: ll_cfg_requeue 4.18.0-193.19.1.el8_2.x86_64 #1 SMP Mon Sep 14 14:37:00 UTC 2020
[ 1774.913568] Call Trace TBD:
[ 1774.915018] [&amp;lt;0&amp;gt;] libcfs_call_trace+0x6f/0x90 [libcfs]
[ 1774.916666] [&amp;lt;0&amp;gt;] lbug_with_loc+0x43/0x80 [libcfs]
[ 1774.918650] [&amp;lt;0&amp;gt;] nodemap_config_set_active+0x2a6/0x2b0 [ptlrpc]
[ 1774.920492] [&amp;lt;0&amp;gt;] nodemap_config_set_active_mgc+0x3a/0x210 [ptlrpc]
[ 1774.922355] [&amp;lt;0&amp;gt;] mgc_process_recover_nodemap_log+0x10d1/0x1200 [mgc]
[ 1774.924222] [&amp;lt;0&amp;gt;] mgc_process_log+0x76e/0x800 [mgc]
[ 1774.925804] [&amp;lt;0&amp;gt;] mgc_requeue_thread+0x29e/0x740 [mgc]
[ 1774.927470] [&amp;lt;0&amp;gt;] kthread+0x112/0x130
[ 1774.928857] [&amp;lt;0&amp;gt;] ret_from_fork+0x35/0x40
[ 1774.930299] Kernel panic - not syncing: LBUG
[ 1774.931747] CPU: 0 PID: 11647 Comm: ll_cfg_requeue Tainted: G           OE    ---------r-t - 4.18.0-193.19.1.el8_2.x86_64 #1
[ 1774.934471] Hardware name: Red Hat KVM, BIOS 0.5.1 01/01/2011
[ 1774.936179] Call Trace:
[ 1774.937345]  dump_stack+0x5c/0x80
[ 1774.938612]  panic+0xe7/0x2a9
[ 1774.939788]  ? ret_from_fork+0x35/0x40
[ 1774.941091]  lbug_with_loc.cold.10+0x18/0x18 [libcfs]
[ 1774.942677]  ? nodemap_config_set_active+0x29a/0x2b0 [ptlrpc]
[ 1774.944384]  nodemap_config_set_active+0x2a6/0x2b0 [ptlrpc]
[ 1774.946044]  nodemap_config_set_active_mgc+0x3a/0x210 [ptlrpc]
[ 1774.947719]  ? kmem_cache_free+0x18c/0x1b0
[ 1774.949043]  mgc_process_recover_nodemap_log+0x10d1/0x1200 [mgc]
[ 1774.950710]  mgc_process_log+0x76e/0x800 [mgc]
[ 1774.952075]  mgc_requeue_thread+0x29e/0x740 [mgc]
[ 1774.953524]  ? finish_wait+0x80/0x80
[ 1774.954730]  ? mgc_process_config+0xe60/0xe60 [mgc]
[ 1774.956172]  kthread+0x112/0x130
[ 1774.957304]  ? kthread_flush_work_fn+0x10/0x10
[ 1774.958636]  ret_from_fork+0x35/0x40
[ 1774.960354] Kernel Offset: 0x2cc00000 from 0xffffffff81000000 (relocation range: 0xffffffff80000000-0xffffffffbfffffff)
[ 1774.962825] ---[ end Kernel panic - not syncing: LBUG ]---
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment></environment>
        <key id="66575">LU-15078</key>
            <summary>sanity-sec test_26: nodemap_config_set_active()) ASSERTION( config-&gt;nmc_default_nodemap ) failed</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="1" iconUrl="https://jira.whamcloud.com/images/icons/statuses/open.png" description="The issue is open and ready for the assignee to start work on it.">Open</status>
                    <statusCategory id="2" key="new" colorName="default"/>
                                    <resolution id="-1">Unresolved</resolution>
                                        <assignee username="sebastien">Sebastien Buisson</assignee>
                                    <reporter username="egryaznova">Elena Gryaznova</reporter>
                        <labels>
                    </labels>
                <created>Mon, 11 Oct 2021 11:40:55 +0000</created>
                <updated>Thu, 30 Nov 2023 22:37:51 +0000</updated>
                                            <version>Lustre 2.12.8</version>
                    <version>Lustre 2.15.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>6</watches>
                                                                            <comments>
                            <comment id="318999" author="JIRAUSER17102" created="Tue, 23 Nov 2021 18:06:21 +0000"  >&lt;p&gt;I found similar crash in 2.12.8 tests&#160;&lt;a href=&quot;https://testing.whamcloud.com/test_sets/a6e88cc1-a192-40de-8a3a-1afe126db426&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/a6e88cc1-a192-40de-8a3a-1afe126db426&lt;/a&gt;&lt;/p&gt;
&lt;div class=&quot;code panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;codeContent panelContent&quot;&gt;
&lt;pre class=&quot;code-java&quot;&gt;
[22430.721269] LustreError: 1203:0:(nodemap_handler.c:1663:nodemap_config_set_active()) ASSERTION( config-&amp;gt;nmc_default_nodemap ) failed: 
[22430.723320] LustreError: 1203:0:(nodemap_handler.c:1663:nodemap_config_set_active()) LBUG
[22430.724640] Pid: 1203, comm: ll_cfg_requeue 3.10.0-1160.45.1.el7_lustre.x86_64 #1 SMP Thu Nov 18 19:16:15 UTC 2021
[22430.726275] Call Trace:
[22430.726741]  [&amp;lt;ffffffffc09a87cc&amp;gt;] libcfs_call_trace+0x8c/0xc0 [libcfs]
[22430.727910]  [&amp;lt;ffffffffc09a887c&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[22430.728940]  [&amp;lt;ffffffffc0f1a497&amp;gt;] nodemap_config_set_active+0x2a7/0x2e0 [ptlrpc]
[22430.730566]  [&amp;lt;ffffffffc0f24728&amp;gt;] nodemap_config_set_active_mgc+0x38/0x1e0 [ptlrpc]
[22430.731873]  [&amp;lt;ffffffffc114a796&amp;gt;] mgc_process_recover_nodemap_log+0x316/0xe70 [mgc]
[22430.733151]  [&amp;lt;ffffffffc114d8de&amp;gt;] mgc_process_log+0x7be/0x8f0 [mgc]
[22430.734184]  [&amp;lt;ffffffffc114f988&amp;gt;] mgc_requeue_thread+0x2c8/0x870 [mgc]
[22430.735270]  [&amp;lt;ffffffffabec5e61&amp;gt;] kthread+0xd1/0xe0
[22430.736131]  [&amp;lt;ffffffffac595df7&amp;gt;] ret_from_fork_nospec_end+0x0/0x39
[22430.737192]  [&amp;lt;ffffffffffffffff&amp;gt;] 0xffffffffffffffff
[22430.738233] Kernel panic - not syncing: LBUG
[22430.738932] CPU: 1 PID: 1203 Comm: ll_cfg_requeue Kdump: loaded Tainted: G           OE  ------------   3.10.0-1160.45.1.el7_lustre.x86_64 #1
[22430.740918] Hardware name: Red Hat KVM, BIOS 0.5.1 01/01/2011
[22430.741827] Call Trace:
[22430.742248]  [&amp;lt;ffffffffac583539&amp;gt;] dump_stack+0x19/0x1b
[22430.743084]  [&amp;lt;ffffffffac57d241&amp;gt;] panic+0xe8/0x21f
[22430.743859]  [&amp;lt;ffffffffc09a88cb&amp;gt;] lbug_with_loc+0x9b/0xa0 [libcfs]
[22430.744865]  [&amp;lt;ffffffffc0f1a497&amp;gt;] nodemap_config_set_active+0x2a7/0x2e0 [ptlrpc]
[22430.746064]  [&amp;lt;ffffffffc0f24728&amp;gt;] nodemap_config_set_active_mgc+0x38/0x1e0 [ptlrpc]
[22430.747310]  [&amp;lt;ffffffffc0e77d80&amp;gt;] ? ptlrpc_request_cache_free+0x90/0x1d0 [ptlrpc]
[22430.748525]  [&amp;lt;ffffffffc0e79045&amp;gt;] ? __ptlrpc_req_finished+0x515/0x790 [ptlrpc]
[22430.749681]  [&amp;lt;ffffffffc114a796&amp;gt;] mgc_process_recover_nodemap_log+0x316/0xe70 [mgc]
[22430.750888]  [&amp;lt;ffffffffc114d8de&amp;gt;] mgc_process_log+0x7be/0x8f0 [mgc]
[22430.751878]  [&amp;lt;ffffffffac586d90&amp;gt;] ? schedule_timeout+0x170/0x2d0
[22430.752845]  [&amp;lt;ffffffffc09af177&amp;gt;] ? libcfs_debug_msg+0x57/0x80 [libcfs]
[22430.753912]  [&amp;lt;ffffffffc114f988&amp;gt;] mgc_requeue_thread+0x2c8/0x870 [mgc]
[22430.754965]  [&amp;lt;ffffffffabedadf0&amp;gt;] ? wake_up_state+0x20/0x20
[22430.755857]  [&amp;lt;ffffffffc114f6c0&amp;gt;] ? mgc_process_config+0x13f0/0x13f0 [mgc]
[22430.756935]  [&amp;lt;ffffffffabec5e61&amp;gt;] kthread+0xd1/0xe0
[22430.757712]  [&amp;lt;ffffffffabec5d90&amp;gt;] ? insert_kthread_work+0x40/0x40
[22430.758675]  [&amp;lt;ffffffffac595df7&amp;gt;] ret_from_fork_nospec_begin+0x21/0x21
[22430.759703]  [&amp;lt;ffffffffabec5d90&amp;gt;] ? insert_kthread_work+0x40/0x40 &lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="319265" author="pjones" created="Fri, 26 Nov 2021 22:42:27 +0000"  >&lt;p&gt;Sebastien&lt;/p&gt;

&lt;p&gt;Should this issue be a concern?&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="319313" author="sebastien" created="Mon, 29 Nov 2021 08:33:10 +0000"  >&lt;p&gt;I do not think this issue should be considered a blocker for 2.15. On master, it occurred only once in the past 4 weeks, and only when running full-dne-part-2 test session. All other sessions that run sanity-sec, like review-dne-part-2, review-dne-zfs-part-2 and review-dne-selinux-ssk-part-2, did not hit this bug.&lt;br/&gt;
So I would tend to think this issue is provoked by something not totally clean left by a predecessor in the test list of full-dne-part-2 test session.&lt;/p&gt;</comment>
                            <comment id="375796" author="xinliang" created="Mon, 19 Jun 2023 03:38:22 +0000"  >&lt;p&gt;I found that I can hit this issue easily on arm AlmaLinux 8.8 with 4k page size kernel , master v2.15.56.9.&lt;/p&gt;

&lt;p&gt;```&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;28814.645272&amp;#93;&lt;/span&gt; Lustre: DEBUG MARKER: == sanity-sec test 26: test transferring very large nodemap ========================================================== 18:02:24 (1687024944)&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.906885&amp;#93;&lt;/span&gt; LustreError: 272997:0:(nodemap_handler.c:1819:nodemap_config_set_active()) ASSERTION( config-&amp;gt;nmc_default_nodemap ) failed:&#160;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.910595&amp;#93;&lt;/span&gt; LustreError: 272997:0:(nodemap_handler.c:1819:nodemap_config_set_active()) LBUG&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.912690&amp;#93;&lt;/span&gt; Pid: 272997, comm: ll_cfg_requeue 4.18.0-477.10.1.el8_4k.aarch64 #1 SMP Mon Jun 12 10:35:59 UTC 2023&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.915128&amp;#93;&lt;/span&gt; Call Trace:&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.916038&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; libcfs_call_trace+0xac/0x120 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.917554&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; lbug_with_loc+0x50/0x90 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.918914&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; nodemap_config_set_active+0x2d0/0x3c0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.920759&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; nodemap_config_set_active_mgc+0x54/0x360 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.922581&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; mgc_process_nodemap_log+0x758/0xef8 &lt;span class=&quot;error&quot;&gt;&amp;#91;mgc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.924065&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; mgc_process_log+0xabc/0xb90 &lt;span class=&quot;error&quot;&gt;&amp;#91;mgc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.925422&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; mgc_requeue_thread+0x338/0xc38 &lt;span class=&quot;error&quot;&gt;&amp;#91;mgc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.926928&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; kthread+0x150/0x160&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.928030&amp;#93;&lt;/span&gt; &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;0&amp;gt;&amp;#93;&lt;/span&gt; ret_from_fork+0x10/0x18&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.929201&amp;#93;&lt;/span&gt; Kernel panic - not syncing: LBUG&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.930462&amp;#93;&lt;/span&gt; CPU: 0 PID: 272997 Comm: ll_cfg_requeue Tainted: G &#160; &#160; &#160; &#160; &#160; OE &#160; &#160;--------- - &#160;- 4.18.0-477.10.1.el8_4k.aarch64 #1&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.934355&amp;#93;&lt;/span&gt; Hardware name: QEMU KVM Virtual Machine, BIOS 0.0.0 02/06/2015&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.936202&amp;#93;&lt;/span&gt; Call trace:&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.937113&amp;#93;&lt;/span&gt; &#160;dump_backtrace+0x0/0x178&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.938250&amp;#93;&lt;/span&gt; &#160;show_stack+0x28/0x38&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.939301&amp;#93;&lt;/span&gt; &#160;dump_stack+0x5c/0x74&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.940360&amp;#93;&lt;/span&gt; &#160;panic+0x140/0x30c&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.941355&amp;#93;&lt;/span&gt; &#160;param_set_delay_minmax.isra.0+0x0/0xd8 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.942934&amp;#93;&lt;/span&gt; &#160;nodemap_config_set_active+0x2d0/0x3c0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.944670&amp;#93;&lt;/span&gt; &#160;nodemap_config_set_active_mgc+0x54/0x360 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.946430&amp;#93;&lt;/span&gt; &#160;mgc_process_nodemap_log+0x758/0xef8 &lt;span class=&quot;error&quot;&gt;&amp;#91;mgc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.947865&amp;#93;&lt;/span&gt; &#160;mgc_process_log+0xabc/0xb90 &lt;span class=&quot;error&quot;&gt;&amp;#91;mgc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.949142&amp;#93;&lt;/span&gt; &#160;mgc_requeue_thread+0x338/0xc38 &lt;span class=&quot;error&quot;&gt;&amp;#91;mgc&amp;#93;&lt;/span&gt;&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.950455&amp;#93;&lt;/span&gt; &#160;kthread+0x150/0x160&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.951454&amp;#93;&lt;/span&gt; &#160;ret_from_fork+0x10/0x18&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.952507&amp;#93;&lt;/span&gt; SMP: stopping secondary CPUs&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.953675&amp;#93;&lt;/span&gt; Kernel Offset: 0x3c8886500000 from 0xffff800008000000&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.955245&amp;#93;&lt;/span&gt; PHYS_OFFSET: 0xffff92a3c0000000&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.956493&amp;#93;&lt;/span&gt; CPU features: 0x00400408,8c804201&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.957918&amp;#93;&lt;/span&gt; Memory Limit: none&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;35582.959017&amp;#93;&lt;/span&gt; &lt;del&gt;{&lt;/del&gt;}{&lt;del&gt;}[ end Kernel panic - not syncing: LBUG ]{&lt;/del&gt;}{&lt;del&gt;}&lt;/del&gt;&lt;/p&gt;

&lt;p&gt;```&lt;/p&gt;

&lt;p&gt;Note that, the test start running time is &lt;span class=&quot;error&quot;&gt;&amp;#91;28814.645272&amp;#93;&lt;/span&gt;s and&#160; crash happen time is &lt;span class=&quot;error&quot;&gt;&amp;#91;35582.906885&amp;#93;&lt;/span&gt;s , it seems stuck for quite a long time 112.8 Mins before crash.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                                        </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                            <attachment id="40849" name="1633704046-sanity-sec-dectet_L300-26__2.zip.zip" size="400748" author="egryaznova" created="Mon, 11 Oct 2021 11:40:46 +0000"/>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i026tz:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>