<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:10:27 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-7617] replay-single test_70b hangs ASSERTION( lo_header-&gt;loh_attr &amp; LOHA_EXISTS ) in rename</title>
                <link>https://jira.whamcloud.com/browse/LU-7617</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;replay-single test_70b hangs with the following error from the test_log&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;shadow-23vm1:  rpc : @@@@@@ FAIL: can&apos;t put import for mdc.lustre-MDT0001-mdc-*.mds_server_uuid into FULL state after 1475 sec, have REPLAY 
shadow-23vm1:   Trace dump:
shadow-23vm1:   = /usr/lib64/lustre/tests/test-framework.sh:4767:error_noexit()
shadow-23vm1:   = /usr/lib64/lustre/tests/test-framework.sh:4798:error()
shadow-23vm1:   = /usr/lib64/lustre/tests/test-framework.sh:5860:_wait_import_state()
shadow-23vm1:   = /usr/lib64/lustre/tests/test-framework.sh:5882:wait_import_state()
shadow-23vm1:   = /usr/lib64/lustre/tests/test-framework.sh:5891:wait_import_state_mount()
shadow-23vm1:   = rpc.sh:20:main()
shadow-23vm1: CMD: shadow-23vm1.shadow.whamcloud.com,shadow-23vm3,shadow-23vm4,shadow-23vm5,shadow-23vm8 PATH=/usr/lib64/lustre/tests:/usr/lib/lustre/tests:/usr/lib64/lustre/tests:/opt/iozone/bin:./../utils:/usr/lib64/lustre/tests:/usr/lib/lustre/tests:/usr/lib64/lustre/tests:/opt/iozone/bin:/opt/iozone/bin:/usr/lib64/lustre/tests/mpi:/usr/lib64/lustre/tests/racer:/usr/lib64/lustre/../lustre-iokit/sgpdd-survey:/usr/lib64/lustre/tests:/usr/lib64/lustre/utils/gss:/usr/lib64/lustre/utils:/usr/lib64/qt-3.3/bin:/usr/lib64/openmpi/bin:/usr/bin:/bin:/usr/sbin:/sbin::/sbin:/bin:/usr/sbin::/sbin:/bin:/usr/sbin: NAME=autotest_config sh rpc.sh check_logdir /tmp/test_logs/1451294686 
shadow-23vm5:  rpc : @@@@@@ FAIL: can&apos;t put import for mdc.lustre-MDT0001-mdc-*.mds_server_uuid into FULL state after 1475 sec, have REPLAY 
shadow-23vm5:   Trace dump:
shadow-23vm5:   = /usr/lib64/lustre/tests/test-framework.sh:4767:error_noexit()
shadow-23vm5:   = /usr/lib64/lustre/tests/test-framework.sh:4798:error()
shadow-23vm5:   = /usr/lib64/lustre/tests/test-framework.sh:5860:_wait_import_state()
shadow-23vm5:   = /usr/lib64/lustre/tests/test-framework.sh:5882:wait_import_state()
shadow-23vm5:   = /usr/lib64/lustre/tests/test-framework.sh:5891:wait_import_state_mount()
shadow-23vm5:   = rpc.sh:20:main()
shadow-23vm5: CMD: shadow-23vm3,shadow-23vm4,shadow-23vm5,shadow-23vm5.shadow.whamcloud.com,shadow-23vm8 PATH=/usr/lib64/lustre/tests:/usr/lib/lustre/tests:/usr/lib64/lustre/tests:/opt/iozone/bin:./../utils:/usr/lib64/lustre/tests:/usr/lib/lustre/tests:/usr/lib64/lustre/tests:/opt/iozone/bin:/opt/iozone/bin:/usr/lib64/lustre/tests/mpi:/usr/lib64/lustre/tests/racer:/usr/lib64/lustre/../lustre-iokit/sgpdd-survey:/usr/lib64/lustre/tests:/usr/lib64/lustre/utils/gss:/usr/lib64/lustre/utils:/usr/lib64/qt-3.3/bin:/usr/lib64/openmpi/bin:/usr/bin:/bin:/usr/sbin:/sbin::/sbin:/bin:/usr/sbin::/sbin:/bin:/usr/sbin: NAME=autotest_config sh rpc.sh check_logdir /tmp/test_logs/1451294686 
shadow-23vm1: CMD: shadow-23vm1.shadow.whamcloud.com uname -n
shadow-23vm1: CMD: shadow-23vm3 uname -n
shadow-23vm1: Dumping lctl log to /tmp/test_logs/1451294686/rpc..*.1451296167.log
shadow-23vm1: CMD: shadow-23vm1.shadow.whamcloud.com,shadow-23vm3,shadow-23vm4,shadow-23vm5,shadow-23vm8 /usr/sbin/lctl dk &amp;gt; /tmp/test_logs/1451294686/rpc..debug_log.\$(hostname -s).1451296167.log;
shadow-23vm1:          dmesg &amp;gt; /tmp/test_logs/1451294686/rpc..dmesg.\$(hostname -s).1451296167.log
shadow-23vm1: shadow-23vm4: open /proc/sys/lnet/dump_kernel failed: No such file or directory
shadow-23vm1: shadow-23vm4: open(dump_kernel) failed: No such file or directory
shadow-23vm5: CMD: shadow-23vm3 uname -n
shadow-23vm5: Dumping lctl log to /tmp/test_logs/1451294686/rpc..*.1451296168.log
shadow-23vm5: CMD: shadow-23vm3,shadow-23vm4,shadow-23vm5,shadow-23vm5.shadow.whamcloud.com,shadow-23vm8 /usr/sbin/lctl dk &amp;gt; /tmp/test_logs/1451294686/rpc..debug_log.\$(hostname -s).1451296168.log;
shadow-23vm5:          dmesg &amp;gt; /tmp/test_logs/1451294686/rpc..dmesg.\$(hostname -s).1451296168.log
shadow-23vm1: CMD: shadow-23vm1.shadow.whamcloud.com,shadow-23vm3,shadow-23vm4,shadow-23vm5,shadow-23vm8 rsync -az /tmp/test_logs/1451294686/rpc..*.1451296167.log shadow-23vm1.shadow.whamcloud.com:/tmp/test_logs/1451294686
shadow-23vm5: shadow-23vm4: open /proc/sys/lnet/dump_kernel failed: No such file or directory
shadow-23vm5: shadow-23vm4: open(dump_kernel) failed: No such file or directory
shadow-23vm5: CMD: shadow-23vm3,shadow-23vm4,shadow-23vm5,shadow-23vm5.shadow.whamcloud.com,shadow-23vm8 rsync -az /tmp/test_logs/1451294686/rpc..*.1451296168.log shadow-23vm5.shadow.whamcloud.com:/tmp/test_logs/1451294686
replay-single test_70b: @@@@@@ FAIL: import is not in FULL state 
  Trace dump:
  = /usr/lib64/lustre/tests/test-framework.sh:4767:error_noexit()
  = /usr/lib64/lustre/tests/test-framework.sh:4798:error()
  = /usr/lib64/lustre/tests/test-framework.sh:6114:wait_clients_import_state()
  = /usr/lib64/lustre/tests/test-framework.sh:2621:fail()
  = /usr/lib64/lustre/tests/replay-single.sh:2088:test_70b()
  = /usr/lib64/lustre/tests/test-framework.sh:5045:run_one()
  = /usr/lib64/lustre/tests/test-framework.sh:5082:run_one_logged()
  = /usr/lib64/lustre/tests/test-framework.sh:4947:run_test()
  = /usr/lib64/lustre/tests/replay-single.sh:2099:main()
Dumping lctl log to /logdir/test_logs/2015-12-28/lustre-reviews-el6_7-x86_64--review-dne-part-2--1_7_1__36516__-70048159267740-004235/replay-single.test_70b.*.1451296171.log
CMD: shadow-23vm1.shadow.whamcloud.com,shadow-23vm3,shadow-23vm4,shadow-23vm5,shadow-23vm8 /usr/sbin/lctl dk &amp;gt; /logdir/test_logs/2015-12-28/lustre-reviews-el6_7-x86_64--review-dne-part-2--1_7_1__36516__-70048159267740-004235/replay-single.test_70b.debug_log.\$(hostname -s).1451296171.log;
         dmesg &amp;gt; /logdir/test_logs/2015-12-28/lustre-reviews-el6_7-x86_64--review-dne-part-2--1_7_1__36516__-70048159267740-004235/replay-single.test_70b.dmesg.\$(hostname -s).1451296171.log
shadow-23vm4: open /proc/sys/lnet/dump_kernel failed: No such file or directory
shadow-23vm4: open(dump_kernel) failed: No such file or directory
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;


&lt;p&gt;From the console on MDS0, we see the following stack trace:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;02:24:52:LustreError: 8467:0:(lu_object.h:862:lu_object_attr()) ASSERTION( ((o)-&amp;gt;lo_header-&amp;gt;loh_attr &amp;amp; LOHA_EXISTS) != 0 ) failed: 
02:24:52:LustreError: 8467:0:(lu_object.h:862:lu_object_attr()) LBUG
02:24:52:Pid: 8467, comm: mdt00_000
02:24:52:
02:24:52:Call Trace:
02:24:52: [&amp;lt;ffffffffa0467875&amp;gt;] libcfs_debug_dumpstack+0x55/0x80 [libcfs]
02:24:52: [&amp;lt;ffffffffa0467e77&amp;gt;] lbug_with_loc+0x47/0xb0 [libcfs]
02:24:52: [&amp;lt;ffffffffa0fb3ed3&amp;gt;] mdd_rename+0x1713/0x1dc0 [mdd]
02:24:52: [&amp;lt;ffffffffa0fa2ecc&amp;gt;] ? __mdd_lookup+0x28c/0x450 [mdd]
02:24:52: [&amp;lt;ffffffffa0e66c45&amp;gt;] mdt_reint_rename_internal+0x1305/0x1a60 [mdt]
02:24:52: [&amp;lt;ffffffffa075da76&amp;gt;] ? ldlm_lock_enqueue+0x2d6/0x970 [ptlrpc]
02:24:52: [&amp;lt;ffffffffa0e6759d&amp;gt;] mdt_reint_rename_or_migrate+0x1fd/0x7e0 [mdt]
02:24:52: [&amp;lt;ffffffffa077d7a0&amp;gt;] ? ldlm_blocking_ast+0x0/0x180 [ptlrpc]
02:24:52: [&amp;lt;ffffffffa077f110&amp;gt;] ? ldlm_completion_ast+0x0/0x9b0 [ptlrpc]
02:24:52: [&amp;lt;ffffffffa0e5bdaf&amp;gt;] ? ucred_set_jobid+0x5f/0x70 [mdt]
02:24:52: [&amp;lt;ffffffffa0e67bb3&amp;gt;] mdt_reint_rename+0x13/0x20 [mdt]
02:24:52: [&amp;lt;ffffffffa0e5fa5d&amp;gt;] mdt_reint_rec+0x5d/0x200 [mdt]
02:24:52: [&amp;lt;ffffffffa0e4b84b&amp;gt;] mdt_reint_internal+0x62b/0xb80 [mdt]
02:24:52: [&amp;lt;ffffffffa0e4c23b&amp;gt;] mdt_reint+0x6b/0x120 [mdt]
02:24:52: [&amp;lt;ffffffffa08174cc&amp;gt;] tgt_request_handle+0x8ec/0x1470 [ptlrpc]
02:24:52: [&amp;lt;ffffffffa07beb41&amp;gt;] ptlrpc_main+0xe41/0x1910 [ptlrpc]
02:24:52: [&amp;lt;ffffffffa07bdd00&amp;gt;] ? ptlrpc_main+0x0/0x1910 [ptlrpc]
02:24:52: [&amp;lt;ffffffff810a0fce&amp;gt;] kthread+0x9e/0xc0
02:24:52: [&amp;lt;ffffffff8100c28a&amp;gt;] child_rip+0xa/0x20
02:24:52: [&amp;lt;ffffffff810a0f30&amp;gt;] ? kthread+0x0/0xc0
02:24:52: [&amp;lt;ffffffff8100c280&amp;gt;] ? child_rip+0x0/0x20
02:24:52:
02:24:52:Kernel panic - not syncing: LBUG
02:24:52:Pid: 8467, comm: mdt00_000 Not tainted 2.6.32-573.8.1.el6_lustre.gbd3d354.x86_64 #1
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;We&#8217;ve seen this failure about eight times since early October. Logs for this failure are at:&lt;br/&gt;
2015-10-23 18:45:53 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/f4909c1c-7a10-11e5-b618-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/f4909c1c-7a10-11e5-b618-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-10-24 09:06:03 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/2e3667b2-7a75-11e5-b618-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/2e3667b2-7a75-11e5-b618-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-11-27 08:05:42 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/77835c1e-9522-11e5-bdeb-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/77835c1e-9522-11e5-bdeb-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-11-29 04:56:08 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/139e4958-96a2-11e5-aba1-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/139e4958-96a2-11e5-aba1-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-12-10 13:56:36 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/ccf059d4-9f7e-11e5-9f74-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/ccf059d4-9f7e-11e5-9f74-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-12-14 03:15:43 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/0dd3b23a-a258-11e5-afd0-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/0dd3b23a-a258-11e5-afd0-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2015-12-20 06:59:17 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/5517883e-a72f-11e5-a6c1-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/5517883e-a72f-11e5-a6c1-5254006e85c2&lt;/a&gt; &lt;br/&gt;
2015-12-28 08:43:43 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/21213400-ad81-11e5-bf32-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/21213400-ad81-11e5-bf32-5254006e85c2&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;There are several tickets open for replay-single test 70b failing with the same assertion, but the stack trace for those include mdd_migrate and were seen running racer. Two examples are &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5163&quot; title=&quot;(lu_object.h:852:lu_object_attr()) ASSERTION( ((o)-&amp;gt;lo_header-&amp;gt;loh_attr &amp;amp; LOHA_EXISTS) != 0 ) failed&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5163&quot;&gt;&lt;del&gt;LU-5163&lt;/del&gt;&lt;/a&gt; and &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6963&quot; title=&quot;racer test_1: lu_object_attr() ASSERTION(lo_header-&amp;gt;loh_attr &amp;amp; LOHA_EXISTS) failed in rename&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6963&quot;&gt;LU-6963&lt;/a&gt;.&lt;/p&gt;</description>
                <environment>autotest review-dne-part-2</environment>
        <key id="33882">LU-7617</key>
            <summary>replay-single test_70b hangs ASSERTION( lo_header-&gt;loh_attr &amp; LOHA_EXISTS ) in rename</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="jamesanunez">James Nunez</reporter>
                        <labels>
                    </labels>
                <created>Tue, 29 Dec 2015 19:15:41 +0000</created>
                <updated>Wed, 22 Dec 2021 20:58:56 +0000</updated>
                            <resolved>Wed, 22 Dec 2021 20:58:56 +0000</resolved>
                                    <version>Lustre 2.8.0</version>
                    <version>Lustre 2.9.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>2</watches>
                                                                            <comments>
                            <comment id="137731" author="jamesanunez" created="Thu, 31 Dec 2015 15:32:51 +0000"  >&lt;p&gt;Another failure on master:&lt;br/&gt;
2015-12-30 04:13:47 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/dc7e85aa-aeed-11e5-9134-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/dc7e85aa-aeed-11e5-9134-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2016-01-25 21:46:23 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/96d1b54c-c408-11e5-8866-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/96d1b54c-c408-11e5-8866-5254006e85c2&lt;/a&gt;&lt;br/&gt;
2016-02-02 10:14:26 - &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/380e4988-c9d7-11e5-9e6a-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/380e4988-c9d7-11e5-9e6a-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="137832" author="adilger" created="Mon, 4 Jan 2016 18:48:07 +0000"  >&lt;p&gt;There are a number of different failures for replay-single 70b, which may be related if there is some kind of random memory corruption during this test - &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-7309&quot; title=&quot;replay-single test_70b: no space left on device&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-7309&quot;&gt;&lt;del&gt;LU-7309&lt;/del&gt;&lt;/a&gt;, &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-7298&quot; title=&quot;replay-single test_70b: ASSERTION( dt-&amp;gt;do_body_ops-&amp;gt;dbo_write ) failed
&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-7298&quot;&gt;&lt;del&gt;LU-7298&lt;/del&gt;&lt;/a&gt;, &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-7265&quot; title=&quot;replay-single test_70b timeout: NULL pointer dereference in __mutex_lock_slowpath &quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-7265&quot;&gt;&lt;del&gt;LU-7265&lt;/del&gt;&lt;/a&gt;, &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6935&quot; title=&quot;replay-single test_70b FAIL: import is not in FULL state&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6935&quot;&gt;&lt;del&gt;LU-6935&lt;/del&gt;&lt;/a&gt;, &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6919&quot; title=&quot;replay-single test_70b: &amp;quot;Cannot send after transport endpoint shutdown&amp;quot; running dbench&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6919&quot;&gt;&lt;del&gt;LU-6919&lt;/del&gt;&lt;/a&gt;, &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-6844&quot; title=&quot;replay-single test 70b failure: &amp;#39;rundbench load on * failed!&amp;#39;&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-6844&quot;&gt;&lt;del&gt;LU-6844&lt;/del&gt;&lt;/a&gt;&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="31373">LU-6963</issuekey>
        </issuelink>
                            </outwardlinks>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="32077">LU-7145</issuekey>
        </issuelink>
            <issuelink>
            <issuekey id="31074">LU-6844</issuekey>
        </issuelink>
            <issuelink>
            <issuekey id="36029">LU-8004</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzxwzj:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>