<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:56:11 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-5982] sanity-lfsck test_29c: (8) unexpected status, test_30: timeout</title>
                <link>https://jira.whamcloud.com/browse/LU-5982</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;This issue was created by maloo for Li Wei &amp;lt;liwei@whamcloud.com&amp;gt;&lt;/p&gt;

&lt;p&gt;This issue relates to the following test suite run: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/5278cc92-7b3d-11e4-8c6d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/5278cc92-7b3d-11e4-8c6d-5254006e85c2&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;The sub-test test_29c failed with the following error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;(8) unexpected status
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;More detailed test output:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;== sanity-lfsck test 29c: Not verify nlink attr if hark links exceed linkEA limitation == 08:43:50 (1417625030)
#####
There are too much hard links to the object, and exceeds the
	echo object&apos;s linkEA limitation, as to NOT all the known name entries
will be recorded in the linkEA. Under such case, LFSCK should
skip the nlink verification for this object.
#####
rm: cannot remove `/mnt/lustre/.lustre/lost+found/MDT0001/[0x340000bd1:0xb:0x0]-P-0&apos;: Directory not empty
Inject failure stub on MDT0 to simulate the case that
foo&apos;s hard links exceed the object&apos;s linkEA limitation.
CMD: onyx-32vm3 /usr/sbin/lctl set_param fail_loc=0x1627
fail_loc=0x1627
/mnt/lustre/d29c.sanity-lfsck/d0/foo
/mnt/lustre/d29c.sanity-lfsck/d0/h1
Trigger namespace LFSCK to repair the nlink count
CMD: onyx-32vm3 /usr/sbin/lctl lfsck_start -M lustre-MDT0000 -t namespace -r -A
Started LFSCK on the device lustre-MDT0000: scrub namespace
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
pdsh@onyx-32vm6: onyx-32vm3: mcmd: xpoll (setting up stderr): Interrupted system call
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
Waiting 32 secs for update
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
Waiting 22 secs for update
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
Waiting 12 secs for update
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
Waiting 2 secs for update
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
		awk &apos;/^status/ { print \$2 }&apos;
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
Update not seen after 32s: wanted &apos;completed&apos; got &apos;&apos;
CMD: onyx-32vm3 /usr/sbin/lctl get_param -n mdd.lustre-MDT0000.lfsck_namespace
onyx-32vm3: error: get_param: mdd/lustre-MDT0000/lfsck_namespace: Found no match
 sanity-lfsck test_29c: @@@@@@ FAIL: (8) unexpected status 
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;Info required for matching: sanity-lfsck 29c&lt;/p&gt;</description>
                <environment></environment>
        <key id="27789">LU-5982</key>
            <summary>sanity-lfsck test_29c: (8) unexpected status, test_30: timeout</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="1" iconUrl="https://jira.whamcloud.com/images/icons/priorities/blocker.svg">Blocker</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="yong.fan">nasf</assignee>
                                    <reporter username="maloo">Maloo</reporter>
                        <labels>
                    </labels>
                <created>Thu, 4 Dec 2014 02:55:38 +0000</created>
                <updated>Sat, 6 Dec 2014 23:04:17 +0000</updated>
                            <resolved>Sat, 6 Dec 2014 23:04:17 +0000</resolved>
                                    <version>Lustre 2.7.0</version>
                                    <fixVersion>Lustre 2.7.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>7</watches>
                                                                            <comments>
                            <comment id="100635" author="jamesanunez" created="Thu, 4 Dec 2014 04:05:04 +0000"  >&lt;p&gt;I was testing a recent build of master, #2760, on the OpenSFS cluster and ran into the same test failure. On the MDS console in my testing, I see &lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Message from syslogd@mds01 at Dec  3 16:38:19 ...
 kernel:LustreError: 29360:0:(osd_handler.c:1571:osd_object_write_unlock()) ASSERTION( obj-&amp;gt;oo_owner == env ) failed: 

Message from syslogd@mds01 at Dec  3 16:38:19 ...
 kernel:LustreError: 29360:0:(osd_handler.c:1571:osd_object_write_unlock()) LBUG
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;Then the MDS crashed. &lt;/p&gt;

&lt;p&gt;Looking at your test logs, it looks like the primary MDS crashed:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;09:03:40:Lustre: DEBUG MARKER: == sanity-lfsck test 29c: Not verify nlink attr if hark links exceed linkEA limitation == 08:43:50 (1417625030)
09:03:40:Lustre: DEBUG MARKER: /usr/sbin/lctl set_param fail_loc=0x1627
09:03:40:Lustre: *** cfs_fail_loc=1627, val=0***
09:03:40:Lustre: DEBUG MARKER: /usr/sbin/lctl lfsck_start -M lustre-MDT0000 -t namespace -r -A
09:03:40:LustreError: 28234:0:(osd_handler.c:1571:osd_object_write_unlock()) ASSERTION( obj-&amp;gt;oo_owner == env ) failed: 
09:03:40:LustreError: 28234:0:(osd_handler.c:1571:osd_object_write_unlock()) LBUG
09:03:40:Pid: 28234, comm: lfsck_namespace
09:03:40:
09:03:40:Call Trace:
09:03:40: [&amp;lt;ffffffffa048e895&amp;gt;] libcfs_debug_dumpstack+0x55/0x80 [libcfs]
09:03:40: [&amp;lt;ffffffffa048ee97&amp;gt;] lbug_with_loc+0x47/0xb0 [libcfs]
09:03:40: [&amp;lt;ffffffffa0d1cb66&amp;gt;] osd_object_write_unlock+0xd6/0x110 [osd_ldiskfs]
09:03:40: [&amp;lt;ffffffffa0e3fcd2&amp;gt;] dt_write_unlock+0x22/0xc0 [lfsck]
09:03:40: [&amp;lt;ffffffffa0e51dfc&amp;gt;] lfsck_namespace_insert_orphan+0xc9c/0x1100 [lfsck]
09:03:40: [&amp;lt;ffffffff8128d756&amp;gt;] ? vsnprintf+0x336/0x5e0
09:03:40: [&amp;lt;ffffffffa0e59d26&amp;gt;] lfsck_namespace_double_scan_dir+0x9d6/0xe40 [lfsck]
09:03:40: [&amp;lt;ffffffffa04a3ee9&amp;gt;] ? cfs_hash_bd_add_locked+0x29/0x90 [libcfs]
09:03:40: [&amp;lt;ffffffffa0e5a514&amp;gt;] lfsck_namespace_double_scan_one+0x384/0x1310 [lfsck]
09:03:40: [&amp;lt;ffffffffa05fe163&amp;gt;] ? lu_object_find_at+0xb3/0x100 [obdclass]
09:03:40: [&amp;lt;ffffffffa0e5c444&amp;gt;] lfsck_namespace_assistant_handler_p2+0xfa4/0x1140 [lfsck]
09:03:40: [&amp;lt;ffffffffa0e3e1bc&amp;gt;] lfsck_assistant_engine+0x148c/0x1dc0 [lfsck]
09:03:40: [&amp;lt;ffffffff81061d00&amp;gt;] ? default_wake_function+0x0/0x20
09:03:40: [&amp;lt;ffffffffa0e3cd30&amp;gt;] ? lfsck_assistant_engine+0x0/0x1dc0 [lfsck]
09:03:41: [&amp;lt;ffffffff8109abf6&amp;gt;] kthread+0x96/0xa0
09:03:41: [&amp;lt;ffffffff8100c20a&amp;gt;] child_rip+0xa/0x20
09:03:41: [&amp;lt;ffffffff8109ab60&amp;gt;] ? kthread+0x0/0xa0
09:03:41: [&amp;lt;ffffffff8100c200&amp;gt;] ? child_rip+0x0/0x20
09:03:41:
09:03:41:Kernel panic - not syncing: LBUG
09:03:41:Pid: 28234, comm: lfsck_namespace Not tainted 2.6.32-431.29.2.el6_lustre.g2856d31.x86_64 #1
09:03:41:Call Trace:
09:03:41: [&amp;lt;ffffffff81528fdc&amp;gt;] ? panic+0xa7/0x16f
09:03:41:Lustre: DEBUG MARKER: /usr/sbin/lctl get_param -n 		mdd.lustre-MDT0000.lfsck_namespace |
09:03:41:		awk &apos;/^status/ { print $2 }&apos;
09:03:41: [&amp;lt;ffffffffa048eeeb&amp;gt;] ? lbug_with_loc+0x9b/0xb0 [libcfs]
09:03:41: [&amp;lt;ffffffffa0d1cb66&amp;gt;] ? osd_object_write_unlock+0xd6/0x110 [osd_ldiskfs]
09:03:41: [&amp;lt;ffffffffa0e3fcd2&amp;gt;] ? dt_write_unlock+0x22/0xc0 [lfsck]
09:03:41: [&amp;lt;ffffffffa0e51dfc&amp;gt;] ? lfsck_namespace_insert_orphan+0xc9c/0x1100 [lfsck]
09:03:41: [&amp;lt;ffffffff8128d756&amp;gt;] ? vsnprintf+0x336/0x5e0
09:03:41: [&amp;lt;ffffffffa0e59d26&amp;gt;] ? lfsck_namespace_double_scan_dir+0x9d6/0xe40 [lfsck]
09:03:41: [&amp;lt;ffffffffa04a3ee9&amp;gt;] ? cfs_hash_bd_add_locked+0x29/0x90 [libcfs]
09:03:41: [&amp;lt;ffffffffa0e5a514&amp;gt;] ? lfsck_namespace_double_scan_one+0x384/0x1310 [lfsck]
09:03:41: [&amp;lt;ffffffffa05fe163&amp;gt;] ? lu_object_find_at+0xb3/0x100 [obdclass]
09:03:41: [&amp;lt;ffffffffa0e5c444&amp;gt;] ? lfsck_namespace_assistant_handler_p2+0xfa4/0x1140 [lfsck]
09:03:41: [&amp;lt;ffffffffa0e3e1bc&amp;gt;] ? lfsck_assistant_engine+0x148c/0x1dc0 [lfsck]
09:03:41: [&amp;lt;ffffffff81061d00&amp;gt;] ? default_wake_function+0x0/0x20
09:03:41: [&amp;lt;ffffffffa0e3cd30&amp;gt;] ? lfsck_assistant_engine+0x0/0x1dc0 [lfsck]
09:03:41: [&amp;lt;ffffffff8109abf6&amp;gt;] ? kthread+0x96/0xa0
09:03:41: [&amp;lt;ffffffff8100c20a&amp;gt;] ? child_rip+0xa/0x20
09:03:41: [&amp;lt;ffffffff8109ab60&amp;gt;] ? kthread+0x0/0xa0
09:03:41: [&amp;lt;ffffffff8100c200&amp;gt;] ? child_rip+0x0/0x20
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</comment>
                            <comment id="100638" author="adilger" created="Thu, 4 Dec 2014 06:14:55 +0000"  >&lt;p&gt;Hit this recently in &lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/13c4439a-7b11-11e4-9132-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/13c4439a-7b11-11e4-9132-5254006e85c2&lt;/a&gt; as well. Looks like this is a new regression. &lt;/p&gt;</comment>
                            <comment id="100641" author="yujian" created="Thu, 4 Dec 2014 08:23:22 +0000"  >&lt;p&gt;More instances on master branch:&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/8015e966-7b72-11e4-813e-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/8015e966-7b72-11e4-813e-5254006e85c2&lt;/a&gt;&lt;br/&gt;
&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/c643440e-7bd8-11e4-813e-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/c643440e-7bd8-11e4-813e-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="100645" author="jhammond" created="Thu, 4 Dec 2014 13:27:24 +0000"  >&lt;p&gt;&lt;a href=&quot;https://testing.hpdd.intel.com/test_sets/4ceb1126-7b57-11e4-8c6d-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sets/4ceb1126-7b57-11e4-8c6d-5254006e85c2&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="100788" author="gerrit" created="Fri, 5 Dec 2014 01:16:07 +0000"  >&lt;p&gt;Fan Yong (fan.yong@intel.com) uploaded a new patch: &lt;a href=&quot;http://review.whamcloud.com/12943&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/12943&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5982&quot; title=&quot;sanity-lfsck test_29c: (8) unexpected status, test_30: timeout&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5982&quot;&gt;&lt;del&gt;LU-5982&lt;/del&gt;&lt;/a&gt; lfsck: not unlock the object repeatedly&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: 1&lt;br/&gt;
Commit: 5d9c9240c598eb0bed054679440acb555f3bff1f&lt;/p&gt;</comment>
                            <comment id="100905" author="adilger" created="Sat, 6 Dec 2014 18:03:49 +0000"  >&lt;p&gt;This problem was added via &lt;a href=&quot;http://review.whamcloud.com/11536&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/11536&lt;/a&gt; &quot;&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5518&quot; title=&quot;LFSCK 3: recover client visible objects from the backend /lost+found directory&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5518&quot;&gt;&lt;del&gt;LU-5518&lt;/del&gt;&lt;/a&gt; lfsck: recover orphans from backend lost+found&quot;&lt;/p&gt;</comment>
                            <comment id="100908" author="gerrit" created="Sat, 6 Dec 2014 21:41:13 +0000"  >&lt;p&gt;Andreas Dilger (andreas.dilger@intel.com) merged in patch &lt;a href=&quot;http://review.whamcloud.com/12943/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/12943/&lt;/a&gt;&lt;br/&gt;
Subject: &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5982&quot; title=&quot;sanity-lfsck test_29c: (8) unexpected status, test_30: timeout&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5982&quot;&gt;&lt;del&gt;LU-5982&lt;/del&gt;&lt;/a&gt; lfsck: not unlock the object repeatedly&lt;br/&gt;
Project: fs/lustre-release&lt;br/&gt;
Branch: master&lt;br/&gt;
Current Patch Set: &lt;br/&gt;
Commit: 8ee9c59a3688e5026ebb914190b63970135ce2e5&lt;/p&gt;</comment>
                            <comment id="100909" author="adilger" created="Sat, 6 Dec 2014 23:04:17 +0000"  >&lt;p&gt;Patch landed to master. &lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                            <outwardlinks description="is related to ">
                                        <issuelink>
            <issuekey id="26085">LU-5518</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzx1vj:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>16695</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>