<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:07:40 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-503] replay-single test_70b: FAIL: post-failover df: 1</title>
                <link>https://jira.whamcloud.com/browse/LU-503</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for sarah &amp;lt;sarah@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/a7e1f190-ada0-11e0-b33f-52540025f9af&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/a7e1f190-ada0-11e0-b33f-52540025f9af&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;Unfortunately I cannot reproduce it to fetch more logs&lt;/p&gt;</description>
                <environment></environment>
        <key id="11340">LU-503</key>
            <summary>replay-single test_70b: FAIL: post-failover df: 1</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="1" iconUrl="https://jira.whamcloud.com/images/icons/priorities/blocker.svg">Blocker</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="utopiabound">Nathaniel Clark</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Thu, 14 Jul 2011 01:59:03 +0000</created>
                <updated>Mon, 29 Jun 2015 05:34:57 +0000</updated>
                            <resolved>Thu, 7 May 2015 06:49:23 +0000</resolved>
                                    <version>Lustre 2.1.0</version>
                    <version>Lustre 2.3.0</version>
                    <version>Lustre 2.1.1</version>
                    <version>Lustre 2.1.2</version>
                    <version>Lustre 2.1.3</version>
                    <version>Lustre 2.1.4</version>
                    <version>Lustre 1.8.9</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>6</watches>
                                                                            <comments>
                            <comment id="19564" author="yujian" created="Wed, 24 Aug 2011 04:05:52 +0000"  >&lt;p&gt;Lustre Tag: v2_1_0_0_RC0&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://newbuild.whamcloud.com/job/lustre-master/267/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://newbuild.whamcloud.com/job/lustre-master/267/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6/x86_64(server), SLES11/x86_64(client)&lt;/p&gt;

&lt;p&gt;replay-single test 70b failed as follows:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;&amp;lt;~snip~&amp;gt;
Failing mds1 on node client-15-ib
Stopping /mnt/mds1 (opts:)
affected facets: mds1
Failover mds1 to client-15-ib
09:13:25 (1314029605) waiting for client-15-ib network 900 secs ...
09:13:25 (1314029605) network interface is UP
Starting mds1: -o user_xattr,acl  /dev/sda5 /mnt/mds1
client-15-ib: debug=0x33f0404
client-15-ib: subsystem_debug=0xffb7e3ff
client-15-ib: debug_mb=48
Started lustre-MDT0000
client-2-ib: stat: cannot read file system information for `/mnt/lustre&apos;: Interrupted system call
client-5-ib: stat: cannot read file system information for `/mnt/lustre&apos;: Interrupted system call
 replay-single test_70b: @@@@@@ FAIL: post-failover df: 1 
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Dmesg on the client node showed that:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;[ 3969.930998] Lustre: MGC192.168.4.15@o2ib: Connection restored to service MGS using nid 192.168.4.15@o2ib.
[ 3969.967639] LustreError: 11-0: an error occurred while communicating with 192.168.4.15@o2ib. The mds_connect operation failed with -11
[ 3969.967643] LustreError: Skipped 30 previous similar messages
[ 3974.940274] LustreError: 3946:0:(client.c:2573:ptlrpc_replay_interpret()) @@@ status 301, old was 0  req@ffff88031cac3000 x1377855983327289/t300647711259(300647711259) o-1-&amp;gt;lustre-MDT0000_UUID@192.168.4.15@o2ib:12/10 lens 552/544 e 0 to 0 dl 1314029642 ref 2 fl Interpret:RP/ffffffff/ffffffff rc 301/-1
[ 4183.930433] LustreError: 3946:0:(client.c:2518:ptlrpc_replay_interpret()) request replay timed out, restarting recovery
[ 4183.930821] LustreError: 167-0: This client was evicted by lustre-MDT0000; in progress operations using this service will fail.
[ 4185.527554] LustreError: 9122:0:(lmv_obd.c:1201:lmv_statfs()) can&apos;t stat MDS #0 (lustre-MDT0000-mdc-ffff88033e7d4400), error -4
[ 4185.527561] LustreError: 9122:0:(llite_lib.c:1431:ll_statfs_internal()) md_statfs fails: rc = -4
[ 4185.528223] LustreError: 9156:0:(client.c:1060:ptlrpc_import_delay_req()) @@@ IMP_INVALID  req@ffff8802b431bc00 x1377855984068278/t0(0) o-1-&amp;gt;lustre-MDT0000_UUID@192.168.4.15@o2ib:23/10 lens 360/1048 e 0 to 0 dl 0 ref 2 fl Rpc:/ffffffff/ffffffff rc 0/-1
[ 4185.528228] LustreError: 9156:0:(client.c:1060:ptlrpc_import_delay_req()) Skipped 3 previous similar messages
[ 4185.528240] LustreError: 9156:0:(file.c:158:ll_close_inode_openhandle()) inode 144115473691181069 mdc close failed: rc = -108
[ 4185.595589] Lustre: DEBUG MARKER: replay-single test_70b: @@@@@@ FAIL: post-failover df: 1
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Maloo report: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/be1fd32a-cd38-11e0-8d02-52540025f9af&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/be1fd32a-cd38-11e0-8d02-52540025f9af&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="23650" author="sarah" created="Fri, 2 Dec 2011 17:27:07 +0000"  >&lt;p&gt;hit the similar issue when running replay-single test_52 on 1.8&amp;lt;-&amp;gt;2.2 interop tesing.here is the maloo link&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/e7e3060e-1596-11e1-b189-52540025f9af&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/e7e3060e-1596-11e1-b189-52540025f9af&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="25342" author="green" created="Tue, 3 Jan 2012 08:17:06 +0000"  >&lt;p&gt;Only 1.8 client1 logs are available?&lt;br/&gt;
Having server logs would be great too.&lt;br/&gt;
It&apos;s possible that the issue is totally on 1.8 side as well.&lt;/p&gt;</comment>
                            <comment id="28489" author="yujian" created="Mon, 13 Feb 2012 08:00:03 +0000"  >&lt;p&gt;Lustre Tag: v2_1_1_0_RC2&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_1/41/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_1/41/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6/x86_64 (kernel version: 2.6.32-220.el6)&lt;br/&gt;
Network: TCP (1GigE)&lt;br/&gt;
FAILURE_MODE=HARD&lt;/p&gt;

&lt;p&gt;The replay-single test 44c failed as follows:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;&amp;lt;~snip~&amp;gt;
client-27vm1: stat: cannot read file system information for `/mnt/lustre&apos;: Interrupted system call
 replay-single test_44c: @@@@@@ FAIL: post-failover df: 1
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;The console log on client-27vm1 showed that:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;09:32:22:LustreError: 166-1: MGC10.10.4.164@tcp: Connection to service MGS via nid 10.10.4.164@tcp was lost; in progress operations using this service will fail.
09:32:57:LustreError: 11-0: an error occurred while communicating with 10.10.4.160@tcp. The obd_ping operation failed with -19
09:32:57:LustreError: Skipped 15 previous similar messages
09:32:57:LustreError: 167-0: This client was evicted by lustre-MDT0000; in progress operations using this service will fail.
09:32:57:LustreError: 6692:0:(lmv_obd.c:1201:lmv_statfs()) can&apos;t stat MDS #0 (lustre-MDT0000-mdc-ffff880050090800), error -4
09:32:57:LustreError: 6692:0:(llite_lib.c:1432:ll_statfs_internal()) md_statfs fails: rc = -4
09:32:57:Lustre: lustre-MDT0000-mdc-ffff880050090800: Connection restored to service lustre-MDT0000 using nid 10.10.4.160@tcp.
09:32:57:Lustre: Skipped 11 previous similar messages
09:32:57:Lustre: DEBUG MARKER: /usr/sbin/lctl mark  replay-single test_44c: @@@@@@ FAIL: post-failover df: 1 
09:32:57:Lustre: DEBUG MARKER: replay-single test_44c: @@@@@@ FAIL: post-failover df: 1
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Maloo report: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/bbbed6ae-55b3-11e1-9aa8-5254004bbbd3&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/bbbed6ae-55b3-11e1-9aa8-5254004bbbd3&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="39889" author="yujian" created="Mon, 4 Jun 2012 00:33:43 +0000"  >&lt;p&gt;Lustre Tag: v2_1_2_RC2&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_1/87/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_1/87/&lt;/a&gt;&lt;br/&gt;
e2fsprogs Build: &lt;a href=&quot;http://build.whamcloud.com/job/e2fsprogs-master/314/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/e2fsprogs-master/314/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6.2/x86_64(server), SLES11SP1/x86_64(client)&lt;br/&gt;
Network: IB (in-kernel OFED)&lt;br/&gt;
ENABLE_QUOTA=yes&lt;/p&gt;

&lt;p&gt;replay-single test_70b failed with the same issue: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/ab9f7a52-adf7-11e1-b2f9-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/ab9f7a52-adf7-11e1-b2f9-52540035b04c&lt;/a&gt;&lt;br/&gt;
replay-dual: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/b8a29928-ae10-11e1-ae0d-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/b8a29928-ae10-11e1-ae0d-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="40434" author="sarah" created="Tue, 12 Jun 2012 13:03:40 +0000"  >&lt;p&gt;another failure on master branch, subtest 52:&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/51db2c58-b18c-11e1-bb61-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/51db2c58-b18c-11e1-bb61-52540035b04c&lt;/a&gt; &lt;/p&gt;</comment>
                            <comment id="44083" author="yujian" created="Sun, 2 Sep 2012 21:33:44 +0000"  >&lt;p&gt;Another instance on b2_1 branch:&lt;br/&gt;
replay-dual test 10: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/1607ebb8-f452-11e1-b3b2-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/1607ebb8-f452-11e1-b3b2-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="44602" author="sarah" created="Tue, 11 Sep 2012 14:32:17 +0000"  >&lt;p&gt;Another instance on b2_3-tag2.2.94 during failover testing&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/e302b38e-f92d-11e1-a1b8-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/e302b38e-f92d-11e1-a1b8-52540035b04c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;client 1 console log:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;10:39:27:Lustre: DEBUG MARKER: == replay-single test 44c: race in target handle connect ============================================= 10:39:22 (1347039562)
10:39:27:Lustre: DEBUG MARKER: f=/mnt/lustre/fsa-$(hostname); mcreate $f; rm $f
10:39:38:Lustre: DEBUG MARKER: local REPLAY BARRIER on lustre-MDT0000
10:39:49:LustreError: 166-1: MGC10.10.4.166@tcp: Connection to MGS (at 10.10.4.166@tcp) was lost; in progress operations using this service will fail
10:40:51:Lustre: Evicted from MGS (at 10.10.4.170@tcp) after server handle changed from 0x75660b397d4087ff to 0xcb53557584749609
10:40:51:Lustre: Skipped 2 previous similar messages
10:40:51:Lustre: MGC10.10.4.166@tcp: Reactivating import
10:41:02:Lustre: lustre-MDT0000-mdc-ffff810058fc9800: Connection to lustre-MDT0000 (at 10.10.4.166@tcp) was lost; in progress operations using this service will wait for recovery to complete
10:41:02:Lustre: Skipped 9 previous similar messages
10:41:33:LustreError: 167-0: This client was evicted by lustre-MDT0000; in progress operations using this service will fail.
10:41:33:LustreError: 27916:0:(lmv_obd.c:1197:lmv_statfs()) can&apos;t stat MDS #0 (lustre-MDT0000-mdc-ffff810058fc9800), error -5
10:41:33:LustreError: 27916:0:(llite_lib.c:1546:ll_statfs_internal()) md_statfs fails: rc = -5
10:41:54:LustreError: 166-1: MGC10.10.4.166@tcp: Connection to MGS (at 10.10.4.170@tcp) was lost; in progress operations using this service will fail
10:42:56:Lustre: Evicted from MGS (at MGC10.10.4.166@tcp_0) after server handle changed from 0xcb53557584749609 to 0x75660b397d409240
10:42:56:Lustre: MGC10.10.4.166@tcp: Reactivating import
10:42:56:LustreError: 28275:0:(lmv_obd.c:1197:lmv_statfs()) can&apos;t stat MDS #0 (lustre-MDT0000-mdc-ffff810058fc9800), error -5
10:42:56:LustreError: 28275:0:(llite_lib.c:1546:ll_statfs_internal()) md_statfs fails: rc = -5
10:42:56:Lustre: DEBUG MARKER: /usr/sbin/lctl mark  replay-single test_44c: @@@@@@ FAIL: post-failover df: 1 
10:42:57:Lustre: DEBUG MARKER: replay-single test_44c: @@@@@@ FAIL: post-failover df: 1
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;client 1 dmesg:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;client-28vm1: stat: cannot read file system information for `/mnt/lustre&apos;: Input/output error
 replay-single test_44c: @@@@@@ FAIL: post-failover df: 1 
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="46469" author="yujian" created="Fri, 12 Oct 2012 09:50:17 +0000"  >&lt;p&gt;Lustre Tag: v2_3_0_RC2&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_3/32&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_3/32&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6.3/x86_64(server), RHEL5.8/x86_64(client)&lt;br/&gt;
Test Group: failover&lt;/p&gt;

&lt;p&gt;replay-single test 44c also failed: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/63efb1d0-146e-11e2-af8d-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/63efb1d0-146e-11e2-af8d-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="49585" author="yujian" created="Fri, 21 Dec 2012 21:15:32 +0000"  >&lt;p&gt;Lustre Tag: v2_1_4_RC1&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_1/159/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_1/159/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6.3/x86_64&lt;br/&gt;
Test Group: failover&lt;/p&gt;

&lt;p&gt;replay-single test 44c still failed: &lt;a href=&quot;https://maloo.whamcloud.com/test_sets/5d18ad4c-4bb6-11e2-aa80-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/5d18ad4c-4bb6-11e2-aa80-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="50319" author="keith" created="Thu, 10 Jan 2013 22:45:22 +0000"  >&lt;p&gt;On Master: &lt;a href=&quot;https://maloo.whamcloud.com/test_sessions/02bc9462-5b97-11e2-b205-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sessions/02bc9462-5b97-11e2-b205-52540035b04c&lt;/a&gt;&lt;br/&gt;
test_62 	&lt;/p&gt;

&lt;p&gt;    Error: &apos;post-failover df: 1&apos;&lt;br/&gt;
    Failure Rate: 2.00% of last 100 executions &lt;span class=&quot;error&quot;&gt;&amp;#91;all branches&amp;#93;&lt;/span&gt; &lt;/p&gt;

&lt;p&gt;Same exact error.&lt;/p&gt;
</comment>
                            <comment id="52427" author="yujian" created="Fri, 15 Feb 2013 00:37:49 +0000"  >&lt;p&gt;Lustre Tag: v1_8_9_WC1_RC1&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b1_8/256&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b1_8/256&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL5.9/x86_64(server)&lt;br/&gt;
Network: TCP (1GigE)&lt;br/&gt;
Test Group: failover&lt;/p&gt;

&lt;p&gt;The replay-single test_20b also failed with the same issue:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/429815e2-76c8-11e2-bc2f-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/429815e2-76c8-11e2-bc2f-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="114465" author="adilger" created="Thu, 7 May 2015 06:49:23 +0000"  >&lt;p&gt;Haven&apos;t seen this in a couple of years.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="12751">LU-951</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzv3uv:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>4153</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>