<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:21:20 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-1979] SWL - MDS crash after recovery osd_iam_lfix.c:190:iam_lfix_init()) Wrong magic in node 81689 (#56): 0x0 != 0x1976 or wrong count</title>
                <link>https://jira.whamcloud.com/browse/LU-1979</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;Mds crashes hard, after completing recovery. &lt;/p&gt;

&lt;p&gt;2012-09-19 07:40:19 Lustre: MDS mdd_obd-lustre-MDT0000: lustre-OST000e_UUID now active, resetting orphans&lt;br/&gt;
2012-09-19 07:40:19 Lustre: MDS mdd_obd-lustre-MDT0000: lustre-OST0026_UUID now active, resetting orphans&lt;br/&gt;
2012-09-19 07:40:19 Lustre: Skipped 16 previous similar messages&lt;br/&gt;
2012-09-19 07:40:28 LustreError: 4748:0:(osd_iam_lfix.c:190:iam_lfix_init()) Wrong magic in node 81689 (#56): 0x0 != 0x1976 or wrong count: 0Initializing cgroup subsys cpuset&lt;/p&gt;

&lt;p&gt;Backtrace:&lt;/p&gt;
&lt;div class=&quot;code panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;codeContent panelContent&quot;&gt;
&lt;pre class=&quot;code-java&quot;&gt;
 bt
PID: 4439   TASK: ffff88032a1ee040  CPU: 1   COMMAND: &lt;span class=&quot;code-quote&quot;&gt;&quot;mdt02_000&quot;&lt;/span&gt;
 #0 [ffff8802cf7756f0] machine_kexec at ffffffff8103281b
 #1 [ffff8802cf775750] crash_kexec at ffffffff810ba792
 #2 [ffff8802cf775820] oops_end at ffffffff81501700
 #3 [ffff8802cf775850] no_context at ffffffff81043bab
 #4 [ffff8802cf7758a0] __bad_area_nosemaphore at ffffffff81043e35
 #5 [ffff8802cf7758f0] bad_area_nosemaphore at ffffffff81043f03
 #6 [ffff8802cf775900] __do_page_fault at ffffffff81044661
 #7 [ffff8802cf775a20] do_page_fault at ffffffff815036de
 #8 [ffff8802cf775a50] page_fault at ffffffff81500a95
    [exception RIP: lu_context_key_get+27]
    RIP: ffffffffa072f00b  RSP: ffff8802cf775b00  RFLAGS: 00010246
    RAX: 0000000000000015  RBX: ffff88014362c8c0  RCX: ffffffffa076546f
    RDX: 0000000000000000  RSI: ffffffffa0ee14e0  RDI: ffff880116f9f4c0
    RBP: ffff8802cf775b00   R8: fffffffffffffffe   R9: 0000000000000000
    R10: 0000000000000000  R11: 0000000000000004  R12: ffff8802cf775b60
    R13: ffff880116f9f4c0  R14: ffffffffa076546f  R15: ffff88012f4436f0
    ORIG_RAX: ffffffffffffffff  CS: 0010  SS: 0018
 #9 [ffff8802cf775b08] osd_xattr_get at ffffffffa0ebaf8f [osd_ldiskfs]
#10 [ffff8802cf775b58] dt_version_get at ffffffffa07330d4 [obdclass]
#11 [ffff8802cf775b88] mdt_obj_version_get at ffffffffa0e297cc [mdt]
#12 [ffff8802cf775bb8] mdt_version_get_check_save at ffffffffa0e29d0f [mdt]
#13 [ffff8802cf775be8] mdt_md_create at ffffffffa0e2a03d [mdt]
#14 [ffff8802cf775c68] mdt_reint_create at ffffffffa0e2a6b3 [mdt]
#15 [ffff8802cf775ca8] mdt_reint_rec at ffffffffa0e28151 [mdt]
#16 [ffff8802cf775cc8] mdt_reint_internal at ffffffffa0e219aa [mdt]
#17 [ffff8802cf775d18] mdt_reint at ffffffffa0e21cf4 [mdt]
#18 [ffff8802cf775d38] mdt_handle_common at ffffffffa0e15802 [mdt]
#19 [ffff8802cf775d88] mdt_regular_handle at ffffffffa0e166f5 [mdt]
#20 [ffff8802cf775d98] ptlrpc_server_handle_request at ffffffffa08b199d [ptlrpc]
#21 [ffff8802cf775e98] ptlrpc_main at ffffffffa08b2f89 [ptlrpc]
#22 [ffff8802cf775f48] kernel_thread at ffffffff8100c14a
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>LLNL Hyperion </environment>
        <key id="16040">LU-1979</key>
            <summary>SWL - MDS crash after recovery osd_iam_lfix.c:190:iam_lfix_init()) Wrong magic in node 81689 (#56): 0x0 != 0x1976 or wrong count</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="1" iconUrl="https://jira.whamcloud.com/images/icons/priorities/blocker.svg">Blocker</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="3">Duplicate</resolution>
                                        <assignee username="yong.fan">nasf</assignee>
                                    <reporter username="cliffw">Cliff White</reporter>
                        <labels>
                    </labels>
                <created>Wed, 19 Sep 2012 10:56:58 +0000</created>
                <updated>Thu, 20 Sep 2012 23:01:17 +0000</updated>
                            <resolved>Thu, 20 Sep 2012 23:01:17 +0000</resolved>
                                    <version>Lustre 2.3.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>2</watches>
                                                                            <comments>
                            <comment id="45226" author="pjones" created="Wed, 19 Sep 2012 11:13:24 +0000"  >&lt;p&gt;Fanyong&lt;/p&gt;

&lt;p&gt;Could you please comment on this one too?&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="45227" author="cliffw" created="Wed, 19 Sep 2012 11:19:35 +0000"  >&lt;p&gt;The MDS is now in a state where all it does is crash, every time recovery completes. &lt;br/&gt;
Latest:&lt;/p&gt;
&lt;div class=&quot;code panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;codeContent panelContent&quot;&gt;
&lt;pre class=&quot;code-java&quot;&gt;
2012-09-19 08:14:08 BUG: unable to handle kernel paging request at 00000000cee88efc
2012-09-19 08:14:08 IP: [&amp;lt;ffffffffa086168e&amp;gt;] _ldlm_lock_debug+0x7e/0x5d0 [ptlrpc]
2012-09-19 08:14:08 PGD 0
2012-09-19 08:14:08 Oops: 0000 [#1]
2012-09-19 08:14:08 LustreError: 4138:0:(llog_lvfs.c:430:llog_lvfs_next_block()) Cant read llog block at log id 7340386/1716600893 offset 2048000
2012-09-19 08:14:08 LustreError: 4551:0:(llog_lvfs.c:430:llog_lvfs_next_block()) Cant read llog block at log id 7340336/1716602012 offset 2056192
2012-09-19 08:14:08 SMP
2012-09-19 08:14:08 last sysfs file: /sys/devices/system/cpu/cpu15/cache/index2/shared_cpu_map
2012-09-19 08:14:08 CPU 3
2012-09-19 08:14:08 Modules linked in: cmm(U) osd_ldiskfs(U) mdt(U) mdd(U) mds(U) fsfilt_ldiskfs(U) exportfs mgs(U) mgc(U) ldiskfs(U) mbcache jbd2 lustre(U) lquota(U) lov(U) osc(U) mdc(U) fid(U) fld(U) ptlrpc(U) obdclass(U) lvfs(U) zfs(P)(U) zcommon(P)(U) znvpair(P)(U) zavl(P)(U) zunicode(P)(U) spl(U) zlib_deflate ko2iblnd(U) lnet(U) sha512_generic sha256_generic libcfs(U) cpufreq_ondemand acpi_cpufreq freq_table mperf ib_ipoib rdma_ucm ib_ucm ib_uverbs ib_umad rdma_cm ib_cm iw_cm ib_addr ib_sa mlx4_ib ib_mad ib_core dm_mirror dm_region_hash dm_log dm_mod vhost_net macvtap macvlan tun kvm raid0 sg sr_mod cdrom sd_mod crc_t10dif dcdbas serio_raw ata_generic pata_acpi ata_piix iTCO_wdt iTCO_vendor_support mptsas mptscsih mptbase scsi_transport_sas i7core_edac edac_core ipv6 nfs lockd fscache nfs_acl auth_rpcgss sunrpc mlx4_en mlx4_core bnx2 [last unloaded: scsi_wait_scan]
2012-09-19 08:14:08
2012-09-19 08:14:08 Pid: 4610, comm: mdt02_029 Tainted: P           ---------------    2.6.32-279.5.1.el6_lustre.gb4cc145.x86_64 #1 Dell Inc. PowerEdge R610/0K399H
2012-09-19 08:14:08 RIP: 0010:[&amp;lt;ffffffffa086168e&amp;gt;]  [&amp;lt;ffffffffa086168e&amp;gt;] _ldlm_lock_debug+0x7e/0x5d0 [ptlrpc]
2012-09-19 08:14:08 RSP: 0018:ffff8801613b3620  EFLAGS: 00010202
2012-09-19 08:14:08 RAX: ffffffffa09071d1 RBX: ffff88015f442d40 RCX: 0000000000000000
2012-09-19 08:14:08 RDX: ffffffffa09075ef RSI: ffffffffa092b4e0 RDI: ffff88015f442d40
2012-09-19 08:14:08 RBP: ffff8801613b3740 R08: 00000000fffffffb R09: 00000000fffffffe
2012-09-19 08:14:08 R10: 0000000000000000 R11: 0000000000000004 R12: ffffffffa09075ef
2012-09-19 08:14:08 R13: ffffffffa092b4e0 R14: 00000000cee88ea8 R15: 0000000000000000
2012-09-19 08:14:08 FS:  00002aaaab47e700(0000) GS:ffff880028220000(0000) knlGS:0000000000000000
2012-09-19 08:14:08 CS:  0010 DS: 0000 ES: 0000 CR0: 000000008005003b
2012-09-19 08:14:08 CR2: 00000000cee88efc CR3: 000000032ab2f000 CR4: 00000000000006e0
2012-09-19 08:14:08 DR0: 0000000000000000 DR1: 0000000000000000 DR2: 0000000000000000
2012-09-19 08:14:08 DR3: 0000000000000000 DR6: 00000000ffff0ff0 DR7: 0000000000000400
2012-09-19 08:14:08 &lt;span class=&quot;code-object&quot;&gt;Process&lt;/span&gt; mdt02_029 (pid: 4610, threadinfo ffff8801613b2000, task ffff88013a370ae0)
2012-09-19 08:14:08 Stack:
2012-09-19 08:14:08  24f793cecee88ecb 0000000000000002 0000120200000001 0000052a00000000
2012-09-19 08:14:08 &amp;lt;d&amp;gt; ffffffffa09075c9 ffffffffa09075c9 000000020000346e 0000000000002bf9
2012-09-19 08:14:08 &amp;lt;d&amp;gt; 0000000000000002 ffff880100000038 ffffffffa09075eb 0000000004004000
2012-09-19 08:14:08 Call Trace:

2012-09-19 08:14:08  [&amp;lt;ffffffffa03a85b1&amp;gt;] ? libcfs_debug_msg+0x41/0x50 [libcfs]
2012-09-19 08:14:08  [&amp;lt;ffffffffa086979c&amp;gt;] ldlm_resource_dump+0x12c/0x480 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0861c3f&amp;gt;] ldlm_granted_list_add_lock+0x5f/0x360 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa08651bc&amp;gt;] ldlm_grant_lock+0x38c/0x6f0 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa08927c2&amp;gt;] ldlm_process_inodebits_lock+0x212/0x400 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0865925&amp;gt;] ldlm_lock_enqueue+0x405/0x8f0 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa08848c9&amp;gt;] ldlm_cli_enqueue_local+0x179/0x560 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0884cb0&amp;gt;] ? ldlm_completion_ast+0x0/0x730 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f65ab0&amp;gt;] ? mdt_blocking_ast+0x0/0x2a0 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f687c0&amp;gt;] mdt_object_lock+0x320/0xb70 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f65ab0&amp;gt;] ? mdt_blocking_ast+0x0/0x2a0 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0884cb0&amp;gt;] ? ldlm_completion_ast+0x0/0x730 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f69071&amp;gt;] mdt_object_find_lock+0x61/0x170 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f96fa9&amp;gt;] mdt_reint_open+0x499/0x18a0 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa03a85b1&amp;gt;] ? libcfs_debug_msg+0x41/0x50 [libcfs]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f81151&amp;gt;] mdt_reint_rec+0x41/0xe0 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f7a9aa&amp;gt;] mdt_reint_internal+0x50a/0x810 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f7af7d&amp;gt;] mdt_intent_reint+0x1ed/0x500 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f77191&amp;gt;] mdt_intent_policy+0x371/0x6a0 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0865881&amp;gt;] ldlm_lock_enqueue+0x361/0x8f0 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa088d9bf&amp;gt;] ldlm_handle_enqueue0+0x48f/0xf70 [ptlrpc]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f77506&amp;gt;] mdt_enqueue+0x46/0x130 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f6e802&amp;gt;] mdt_handle_common+0x922/0x1740 [mdt]
2012-09-19 08:14:08  [&amp;lt;ffffffffa0f6f6f5&amp;gt;] mdt_regular_handle+0x15/0x20 [mdt]
2012-09-19 08:14:09  [&amp;lt;ffffffffa08bd99d&amp;gt;] ptlrpc_server_handle_request+0x40d/0xea0 [ptlrpc]
2012-09-19 08:14:09  [&amp;lt;ffffffffa08b4f37&amp;gt;] ? ptlrpc_wait_event+0xa7/0x2a0 [ptlrpc]
2012-09-19 08:14:09  [&amp;lt;ffffffffa03a85b1&amp;gt;] ? libcfs_debug_msg+0x41/0x50 [libcfs]
2012-09-19 08:14:09  [&amp;lt;ffffffff810533f3&amp;gt;] ? __wake_up+0x53/0x70
2012-09-19 08:14:09  [&amp;lt;ffffffffa08bef89&amp;gt;] ptlrpc_main+0xb59/0x1860 [ptlrpc]
2012-09-19 08:14:09  [&amp;lt;ffffffffa08be430&amp;gt;] ? ptlrpc_main+0x0/0x1860 [ptlrpc]
2012-09-19 08:14:09  [&amp;lt;ffffffff8100c14a&amp;gt;] child_rip+0xa/0x20
2012-09-19 08:14:09  [&amp;lt;ffffffffa08be430&amp;gt;] ? ptlrpc_main+0x0/0x1860 [ptlrpc]
2012-09-19 08:14:09  [&amp;lt;ffffffffa08be430&amp;gt;] ? ptlrpc_main+0x0/0x1860 [ptlrpc]
2012-09-19 08:14:09  [&amp;lt;ffffffff8100c140&amp;gt;] ? child_rip+0x0/0x20
2012-09-19 08:14:09 Code: 48 c7 c0 d1 71 90 a0 74 19 49 8b 87 d8 00 00 00 48 85 c0 0f 84 34 02 00 00 48 8b 78 18 e8 5b 2b b4 ff 4d 85 f6 0f 84 a2 04 00 00 &amp;lt;41&amp;gt; 8b 76 54 83 fe 0c 0f 84 45 02 00 00 83 fe 0d 0f 84 fc 00 00
2012-09-19 08:14:09 RIP  [&amp;lt;ffffffffa086168e&amp;gt;] _ldlm_lock_debug+0x7e/0x5d0 [ptlrpc]
2012-09-19 08:14:09  RSP &amp;lt;ffff8801613b3620&amp;gt;
2012-09-19 08:14:09 CR2: 00000000cee88efc
2012-09-19 08:14:09 Initializing cgroup subsys cpuset
2012-09-19 08:14:09 Initializing cgroup subsys cpu
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="45228" author="cliffw" created="Wed, 19 Sep 2012 12:33:09 +0000"  >&lt;p&gt;The system now appears to be in a state where it cannot complete recovery.&lt;br/&gt;
Latest crash:&lt;/p&gt;
&lt;div class=&quot;code panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;codeContent panelContent&quot;&gt;
&lt;pre class=&quot;code-java&quot;&gt;
crash&amp;gt; bt
PID: 4610   TASK: ffff88013a370ae0  CPU: 3   COMMAND: &lt;span class=&quot;code-quote&quot;&gt;&quot;mdt02_029&quot;&lt;/span&gt;
 #0 [ffff8801613b3210] machine_kexec at ffffffff8103281b
 #1 [ffff8801613b3270] crash_kexec at ffffffff810ba792
 #2 [ffff8801613b3340] oops_end at ffffffff81501700
 #3 [ffff8801613b3370] no_context at ffffffff81043bab
 #4 [ffff8801613b33c0] __bad_area_nosemaphore at ffffffff81043e35
 #5 [ffff8801613b3410] bad_area_nosemaphore at ffffffff81043f03
 #6 [ffff8801613b3420] __do_page_fault at ffffffff81044661
 #7 [ffff8801613b3540] do_page_fault at ffffffff815036de
 #8 [ffff8801613b3570] page_fault at ffffffff81500a95
    [exception RIP: _ldlm_lock_debug+126]
    RIP: ffffffffa086168e  RSP: ffff8801613b3620  RFLAGS: 00010202
    RAX: ffffffffa09071d1  RBX: ffff88015f442d40  RCX: 0000000000000000
    RDX: ffffffffa09075ef  RSI: ffffffffa092b4e0  RDI: ffff88015f442d40
    RBP: ffff8801613b3740   R8: 00000000fffffffb   R9: 00000000fffffffe
    R10: 0000000000000000  R11: 0000000000000004  R12: ffffffffa09075ef
    R13: ffffffffa092b4e0  R14: 00000000cee88ea8  R15: 0000000000000000
    ORIG_RAX: ffffffffffffffff  CS: 0010  SS: 0018
 #9 [ffff8801613b3748] ldlm_resource_dump at ffffffffa086979c [ptlrpc]
#10 [ffff8801613b37a8] ldlm_granted_list_add_lock at ffffffffa0861c3f [ptlrpc]
#11 [ffff8801613b37d8] ldlm_grant_lock at ffffffffa08651bc [ptlrpc]
#12 [ffff8801613b3838] ldlm_process_inodebits_lock at ffffffffa08927c2 [ptlrpc]
#13 [ffff8801613b38b8] ldlm_lock_enqueue at ffffffffa0865925 [ptlrpc]
#14 [ffff8801613b3918] ldlm_cli_enqueue_local at ffffffffa08848c9 [ptlrpc]
#15 [ffff8801613b39a8] mdt_object_lock at ffffffffa0f687c0 [mdt]
#16 [ffff8801613b3a48] mdt_object_find_lock at ffffffffa0f69071 [mdt]
#17 [ffff8801613b3a78] mdt_reint_open at ffffffffa0f96fa9 [mdt]
#18 [ffff8801613b3b48] mdt_reint_rec at ffffffffa0f81151 [mdt]
#19 [ffff8801613b3b68] mdt_reint_internal at ffffffffa0f7a9aa [mdt]
#20 [ffff8801613b3bb8] mdt_intent_reint at ffffffffa0f7af7d [mdt]
#21 [ffff8801613b3c08] mdt_intent_policy at ffffffffa0f77191 [mdt]
#22 [ffff8801613b3c48] ldlm_lock_enqueue at ffffffffa0865881 [ptlrpc]
#23 [ffff8801613b3ca8] ldlm_handle_enqueue0 at ffffffffa088d9bf [ptlrpc]
#24 [ffff8801613b3d18] mdt_enqueue at ffffffffa0f77506 [mdt]
#25 [ffff8801613b3d38] mdt_handle_common at ffffffffa0f6e802 [mdt]
#26 [ffff8801613b3d88] mdt_regular_handle at ffffffffa0f6f6f5 [mdt]
#27 [ffff8801613b3d98] ptlrpc_server_handle_request at ffffffffa08bd99d [ptlrpc]
#28 [ffff8801613b3e98] ptlrpc_main at ffffffffa08bef89 [ptlrpc]
#29 [ffff8801613b3f48] kernel_thread at ffffffff8100c14a
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="45313" author="yong.fan" created="Thu, 20 Sep 2012 23:01:17 +0000"  >&lt;p&gt;It is duplication of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-1976&quot; title=&quot;SWL - mds hard crash &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-1976&quot;&gt;&lt;del&gt;LU-1976&lt;/del&gt;&lt;/a&gt;&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzvgh3:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>6318</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>