<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:18:29 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-1648] MDS Crash</title>
                <link>https://jira.whamcloud.com/browse/LU-1648</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;Lustre hung this morning, we are running a e2fsck on the MDT at the moment.&lt;/p&gt;

&lt;p&gt;Mounting the mdt with ldiskfs we saw many large file like &apos;oi.XX.XX&apos;, what are these files?&lt;br/&gt;
Can you please help us debugging the current problem?&lt;/p&gt;

&lt;p&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;root@weisshorn01 mdt&amp;#93;&lt;/span&gt;# ls&lt;br/&gt;
capa_keys        OBJECTS   oi.16.15  oi.16.22  oi.16.3   oi.16.37  oi.16.44  oi.16.51  oi.16.59  oi.16.9&lt;br/&gt;
CATALOGS         oi.16.0   oi.16.16  oi.16.23  oi.16.30  oi.16.38  oi.16.45  oi.16.52  oi.16.6   PENDING&lt;br/&gt;
CONFIGS          oi.16.1   oi.16.17  oi.16.24  oi.16.31  oi.16.39  oi.16.46  oi.16.53  oi.16.60  ROOT&lt;br/&gt;
fld              oi.16.10  oi.16.18  oi.16.25  oi.16.32  oi.16.4   oi.16.47  oi.16.54  oi.16.61  seq_ctl&lt;br/&gt;
last_rcvd        oi.16.11  oi.16.19  oi.16.26  oi.16.33  oi.16.40  oi.16.48  oi.16.55  oi.16.62  seq_srv&lt;br/&gt;
lost+found       oi.16.12  oi.16.2   oi.16.27  oi.16.34  oi.16.41  oi.16.49  oi.16.56  oi.16.63&lt;br/&gt;
lov_objid        oi.16.13  oi.16.20  oi.16.28  oi.16.35  oi.16.42  oi.16.5   oi.16.57  oi.16.7&lt;br/&gt;
NIDTBL_VERSIONS  oi.16.14  oi.16.21  oi.16.29  oi.16.36  oi.16.43  oi.16.50  oi.16.58  oi.16.8&lt;br/&gt;
&lt;span class=&quot;error&quot;&gt;&amp;#91;root@weisshorn01 mdt&amp;#93;&lt;/span&gt;# ls -l&lt;br/&gt;
total 1957836&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root      144 May 15 14:43 capa_keys&lt;br/&gt;
&lt;del&gt;rwx&lt;/del&gt;-----   1 root root     2304 May 15 14:49 CATALOGS&lt;br/&gt;
drwxrwxrwx   2 root root     4096 Jul 19 17:07 CONFIGS&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root     8192 May 15 14:43 fld&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root   392832 May 15 14:43 last_rcvd&lt;br/&gt;
drwx------   2 root root    16384 May 15 14:43 lost+found&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root      576 May 15 14:43 lov_objid&lt;br/&gt;
drwxrwxrwx   2 root root     4096 May 15 14:43 NIDTBL_VERSIONS&lt;br/&gt;
drwxrwxrwx   2 root root   237568 Jul 20 08:56 OBJECTS&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 31494144 May 15 14:43 oi.16.0&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 26271744 May 15 14:43 oi.16.1&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 49315840 May 15 14:43 oi.16.10&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 29536256 May 15 14:43 oi.16.11&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 26890240 May 15 14:43 oi.16.12&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 20484096 May 15 14:43 oi.16.13&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 30490624 May 15 14:43 oi.16.14&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 33075200 May 15 14:43 oi.16.15&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 25034752 May 15 14:43 oi.16.16&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 43155456 May 15 14:43 oi.16.17&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 27435008 May 15 14:43 oi.16.18&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 21987328 May 15 14:43 oi.16.19&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 29138944 May 15 14:43 oi.16.2&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 21946368 May 15 14:43 oi.16.20&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 28278784 May 15 14:43 oi.16.21&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 28504064 May 15 14:43 oi.16.22&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 30584832 May 15 14:43 oi.16.23&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 27758592 May 15 14:43 oi.16.24&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 22654976 May 15 14:43 oi.16.25&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 24956928 May 15 14:43 oi.16.26&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 45015040 May 15 14:43 oi.16.27&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 27344896 May 15 14:43 oi.16.28&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 36724736 May 15 14:43 oi.16.29&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 23318528 May 15 14:43 oi.16.3&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 25739264 May 15 14:43 oi.16.30&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 26865664 May 15 14:43 oi.16.31&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 29147136 May 15 14:43 oi.16.32&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 28573696 May 15 14:43 oi.16.33&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 26796032 May 15 14:43 oi.16.34&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 30167040 May 15 14:43 oi.16.35&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 31641600 May 15 14:43 oi.16.36&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 21430272 May 15 14:43 oi.16.37&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 24567808 May 15 14:43 oi.16.38&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 29364224 May 15 14:43 oi.16.39&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 22032384 May 15 14:43 oi.16.4&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 41111552 May 15 14:43 oi.16.40&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 41889792 May 15 14:43 oi.16.41&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 34344960 May 15 14:43 oi.16.42&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 45531136 May 15 14:43 oi.16.43&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 34304000 May 15 14:43 oi.16.44&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 32129024 May 15 14:43 oi.16.45&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 30593024 May 15 14:43 oi.16.46&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 33566720 May 15 14:43 oi.16.47&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 31928320 May 15 14:43 oi.16.48&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 32591872 May 15 14:43 oi.16.49&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 29097984 May 15 14:43 oi.16.5&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 38350848 May 15 14:43 oi.16.50&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 24289280 May 15 14:43 oi.16.51&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 41656320 May 15 14:43 oi.16.52&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 35467264 May 15 14:43 oi.16.53&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 37556224 May 15 14:43 oi.16.54&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 32391168 May 15 14:43 oi.16.55&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 31694848 May 15 14:43 oi.16.56&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 35209216 May 15 14:43 oi.16.57&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 34750464 May 15 14:43 oi.16.58&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 33206272 May 15 14:43 oi.16.59&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 40476672 May 15 14:43 oi.16.6&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 26509312 May 15 14:43 oi.16.60&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 29929472 May 15 14:43 oi.16.61&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 34635776 May 15 14:43 oi.16.62&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 23273472 May 15 14:43 oi.16.63&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 34062336 May 15 14:43 oi.16.7&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 33783808 May 15 14:43 oi.16.8&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root 33796096 May 15 14:43 oi.16.9&lt;br/&gt;
drwxr-xr-x   2 root root  5906432 May 15 14:43 PENDING&lt;br/&gt;
drwxr-xr-x 856 root root    36864 Jan  1  1970 ROOT&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root       24 May 15 14:43 seq_ctl&lt;br/&gt;
&lt;del&gt;rw-r&lt;/del&gt;&lt;del&gt;r&lt;/del&gt;-   1 root root       24 May 15 14:43 seq_srv&lt;/p&gt;
</description>
                <environment>## MDS HW ## &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
Linux XXXX.admin.cscs.ch 2.6.32-220.7.1.el6_lustre.g9c8f747.x86_64 &lt;br/&gt;
Architecture: x86_64 &lt;br/&gt;
CPU op-mode(s): 32-bit, 64-bit &lt;br/&gt;
Byte Order: Little Endian &lt;br/&gt;
CPU(s): 16 &lt;br/&gt;
Vendor ID: AuthenticAMD &lt;br/&gt;
CPU family: 16 &lt;br/&gt;
64Gb RAM &lt;br/&gt;
Interconnect IB 40Gb/s &lt;br/&gt;
--- &lt;br/&gt;
MDT LSI 5480 Pikes Peak &lt;br/&gt;
SSDs SLC &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
&lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
## OSS HW ## &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
Architecture: x86_64 &lt;br/&gt;
CPU op-mode(s): 32-bit, 64-bit &lt;br/&gt;
Byte Order: Little Endian &lt;br/&gt;
CPU(s): 32 &lt;br/&gt;
Vendor ID: GenuineIntel &lt;br/&gt;
CPU family: 6 &lt;br/&gt;
64Gb RAM &lt;br/&gt;
Interconnect IB 40Gb/s &lt;br/&gt;
--- &lt;br/&gt;
OSTs ---&amp;gt; LSI 7900 SATA Disks &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
&lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
## Router nodes ## &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
12 Cray XE6 Service nodes as router nodes - IB 40Gb/s &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
&lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
## Clients ## &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
~ 1500 Cray XE6 nodes - Lustre 1.8.6 &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
&lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
## LUSTRE Config ## &lt;br/&gt;
---------------------------------------------------------------------------------------------------- &lt;br/&gt;
1 MDS + 1 fail over (MDT on SSD array) &lt;br/&gt;
12 OSSs - 6 OSTs per OSS (72 OSTs) &lt;br/&gt;
&lt;br/&gt;
Luster Servers ---&amp;gt; 2.2.51.0 &lt;br/&gt;
Lustre Clients ---&amp;gt; 1.8.6 (~1500 nodes) / 2.2.51.0 (~20 nodes) &lt;br/&gt;
----------------------------------------------------------------------------------------------------</environment>
        <key id="15262">LU-1648</key>
            <summary>MDS Crash</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="2" iconUrl="https://jira.whamcloud.com/images/icons/priorities/critical.svg">Critical</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="yong.fan">nasf</assignee>
                                    <reporter username="fverzell">Fabio Verzelloni</reporter>
                        <labels>
                    </labels>
                <created>Fri, 20 Jul 2012 04:54:36 +0000</created>
                <updated>Tue, 22 Apr 2014 13:32:44 +0000</updated>
                            <resolved>Tue, 21 Aug 2012 07:40:47 +0000</resolved>
                                    <version>Lustre 2.2.0</version>
                                    <fixVersion>Lustre 2.3.0</fixVersion>
                    <fixVersion>Lustre 2.1.4</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>7</watches>
                                                                            <comments>
                            <comment id="42046" author="liang" created="Fri, 20 Jul 2012 05:19:29 +0000"  >&lt;p&gt;Please don&apos;t touch this files, so I&apos;m correct on &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-1642&quot; title=&quot;Clients get disconnected and reconnected during heavy IO immediately after the halt of a blade.&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-1642&quot;&gt;&lt;del&gt;LU-1642&lt;/del&gt;&lt;/a&gt;, it is because of OI leak (&lt;a href=&quot;https://jira.whamcloud.com/browse/LU-1512&quot; title=&quot;OI leaks&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-1512&quot;&gt;&lt;del&gt;LU-1512&lt;/del&gt;&lt;/a&gt;), these files are actually supposed to be big and growing forever, but growing speed should be slower and slower, not like now, they always grow at the same speed.&lt;/p&gt;

&lt;p&gt;Again, we need Fan Yong to comment on this, I have added him to CC list,  I believe he has some way to fix this, which might require you to run a tool to rebuild these files.&lt;/p&gt;</comment>
                            <comment id="42048" author="liang" created="Fri, 20 Jul 2012 05:32:54 +0000"  >&lt;p&gt;Btw, I think the crash is not about these files, could you please post console output or whatever information from MDS about the crash so we can investigate on it? Thanks&lt;/p&gt;
</comment>
                            <comment id="42049" author="fverzell" created="Fri, 20 Jul 2012 05:50:32 +0000"  >&lt;p&gt;&lt;span class=&quot;error&quot;&gt;&amp;#91;root@weisshorn01 ~&amp;#93;&lt;/span&gt;# e2fsck -fp /dev/mapper/mds&lt;br/&gt;
scratch-MDT0000: Deleted inode 11624457 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11624753 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11624777 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11624784 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11624869 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11624887 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11625059 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11625127 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11654190 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11654602 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11654713 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11654772 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11654866 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655003 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655022 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655110 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655125 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655138 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655142 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655261 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655323 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655324 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655337 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 11655396 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 15234080 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 15255286 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 15785601 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Deleted inode 18362624 has zero dtime.  FIXED.&lt;br/&gt;
scratch-MDT0000: Symlink /ROOT/tast/RUN_nproma1_bcLAI/unit.21 (inode #12063425) is invalid.&lt;/p&gt;


&lt;p&gt;scratch-MDT0000: UNEXPECTED INCONSISTENCY; RUN fsck MANUALLY.&lt;br/&gt;
	(i.e., without -a or -p options)&lt;/p&gt;</comment>
                            <comment id="42050" author="yong.fan" created="Fri, 20 Jul 2012 07:33:04 +0000"  >&lt;p&gt;These oi.16.xx files are used for mapping global identifier (FID) to local identifier (ino# &amp;amp; gen) for ldiskfs-based backend filesystem. These files are used on server only, invisible to client.&lt;/p&gt;

&lt;p&gt;According to current design and implementation, the OI file size/space cannot be shrink. I am making patch in &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-1512&quot; title=&quot;OI leaks&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-1512&quot;&gt;&lt;del&gt;LU-1512&lt;/del&gt;&lt;/a&gt; to slow the size/space growing speed. On Lustre-2.3, you can remove those OI files by force, then OI scrub can rebuild them automatically. Usually, these new created OI files are smaller. But for the former release, no better solution for that.&lt;/p&gt;

&lt;p&gt;Anyway, OI file size/space issue should not hung the system. Have you seen some error meesage for &quot;-ENOSPC&quot; on MDS when the system hung?&lt;/p&gt;</comment>
                            <comment id="42056" author="liang" created="Fri, 20 Jul 2012 08:38:02 +0000"  >&lt;p&gt;many threads are stuck at &quot;start_this_handle&quot;:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Jul 20 09:00:34 weisshorn02 kernel: Call Trace:
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffff8127466d&amp;gt;] ? pointer+0xad/0xa60
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa0142072&amp;gt;] start_this_handle+0x282/0x500 [jbd2]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffff812731ee&amp;gt;] ? number+0x2ee/0x320
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffff81090a90&amp;gt;] ? autoremove_wake_function+0x0/0x40
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa01424f0&amp;gt;] jbd2_journal_start+0xd0/0x110 [jbd2]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa0af8b08&amp;gt;] ldiskfs_journal_start_sb+0x58/0x90 [ldiskfs]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa05d0d41&amp;gt;] fsfilt_ldiskfs_start+0x91/0x480 [fsfilt_ldiskfs]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa063fdaa&amp;gt;] llog_origin_handle_cancel+0x3ea/0xa20 [ptlrpc]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa03c4903&amp;gt;] ? cfs_alloc+0x63/0x90 [libcfs]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa04d10df&amp;gt;] ? keys_fill+0x6f/0x1a0 [obdclass]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa060da87&amp;gt;] ldlm_cancel_handler+0x157/0x4a0 [ptlrpc]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa06363c1&amp;gt;] ptlrpc_server_handle_request+0x3c1/0xcb0 [ptlrpc]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa03c44ce&amp;gt;] ? cfs_timer_arm+0xe/0x10 [libcfs]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa03ceef9&amp;gt;] ? lc_watchdog_touch+0x79/0x110 [libcfs]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa0630462&amp;gt;] ? ptlrpc_wait_event+0xb2/0x2c0 [ptlrpc]
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffff810519c3&amp;gt;] ? __wake_up+0x53/0x70
Jul 20 09:00:34 weisshorn02 kernel: [&amp;lt;ffffffffa06373cf&amp;gt;] ptlrpc_main+0x71f/0x1210 [ptlrpc]

&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;it looks like &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-81&quot; title=&quot;Some JBD2 journaling deadlock at BULL&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-81&quot;&gt;&lt;del&gt;LU-81&lt;/del&gt;&lt;/a&gt;, but the fix should be in 2.2 already, any comment?&lt;/p&gt;</comment>
                            <comment id="42237" author="yong.fan" created="Wed, 25 Jul 2012 09:25:18 +0000"  >&lt;p&gt;Yes, it is &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-81&quot; title=&quot;Some JBD2 journaling deadlock at BULL&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-81&quot;&gt;&lt;del&gt;LU-81&lt;/del&gt;&lt;/a&gt;. But &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-81&quot; title=&quot;Some JBD2 journaling deadlock at BULL&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-81&quot;&gt;&lt;del&gt;LU-81&lt;/del&gt;&lt;/a&gt; did not totally fix the deadlock of changelog adding vs changelog canceling. There are other potencial deadlock cases caused by the race of starting journal handle and acquiring &quot;lgh_lock&quot;.&lt;/p&gt;

&lt;p&gt;The original patch of &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-81&quot; title=&quot;Some JBD2 journaling deadlock at BULL&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-81&quot;&gt;&lt;del&gt;LU-81&lt;/del&gt;&lt;/a&gt; made the process sequences as:&lt;br/&gt;
1) start journal handle&lt;br/&gt;
2) acquire &quot;lgh_lock&quot;&lt;/p&gt;

&lt;p&gt;But it ignored the case of journal handle restarting. Under such case, the caller may be blocked with holding &quot;lgh_lock&quot;. I have no idea to resolve such deadlock yet.&lt;/p&gt;


&lt;p&gt;Jul 20 09:06:37 weisshorn02 kernel: Call Trace:&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa01432d4&amp;gt;&amp;#93;&lt;/span&gt; ? do_get_write_access+0x3b4/0x520 &lt;span class=&quot;error&quot;&gt;&amp;#91;jbd2&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0142072&amp;gt;&amp;#93;&lt;/span&gt; start_this_handle+0x282/0x500 &lt;span class=&quot;error&quot;&gt;&amp;#91;jbd2&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff81090a90&amp;gt;&amp;#93;&lt;/span&gt; ? autoremove_wake_function+0x0/0x40&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa01423c1&amp;gt;&amp;#93;&lt;/span&gt; jbd2_journal_restart+0xd1/0x130 &lt;span class=&quot;error&quot;&gt;&amp;#91;jbd2&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ace8ba&amp;gt;&amp;#93;&lt;/span&gt; ldiskfs_truncate_restart_trans+0x8a/0xa0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad3bfd&amp;gt;&amp;#93;&lt;/span&gt; ldiskfs_clear_blocks+0x9d/0x170 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad3de4&amp;gt;&amp;#93;&lt;/span&gt; ldiskfs_free_data+0x114/0x170 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad4023&amp;gt;&amp;#93;&lt;/span&gt; ldiskfs_free_branches+0x1e3/0x200 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad3f16&amp;gt;&amp;#93;&lt;/span&gt; ldiskfs_free_branches+0xd6/0x200 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad4629&amp;gt;&amp;#93;&lt;/span&gt; ldiskfs_truncate+0x5e9/0x660 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ac3f3b&amp;gt;&amp;#93;&lt;/span&gt; ? __ldiskfs_handle_dirty_metadata+0x7b/0x100 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0acec7e&amp;gt;&amp;#93;&lt;/span&gt; ? ldiskfs_mark_iloc_dirty+0x36e/0x5d0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad0b23&amp;gt;&amp;#93;&lt;/span&gt; ? ldiskfs_mark_inode_dirty+0x83/0x1f0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff814ee34e&amp;gt;&amp;#93;&lt;/span&gt; ? mutex_lock+0x1e/0x50&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad57c0&amp;gt;&amp;#93;&lt;/span&gt; ? ldiskfs_delete_inode+0x0/0x250 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0ad59a0&amp;gt;&amp;#93;&lt;/span&gt; ldiskfs_delete_inode+0x1e0/0x250 &lt;span class=&quot;error&quot;&gt;&amp;#91;ldiskfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff811915fe&amp;gt;&amp;#93;&lt;/span&gt; generic_delete_inode+0xde/0x1d0&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff81191755&amp;gt;&amp;#93;&lt;/span&gt; generic_drop_inode+0x65/0x80&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff811905d2&amp;gt;&amp;#93;&lt;/span&gt; iput+0x62/0x70&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa074e41e&amp;gt;&amp;#93;&lt;/span&gt; mds_obd_destroy+0x3ae/0x850 &lt;span class=&quot;error&quot;&gt;&amp;#91;mds&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff81177ee1&amp;gt;&amp;#93;&lt;/span&gt; ? __fput+0x1a1/0x210&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa049eca5&amp;gt;&amp;#93;&lt;/span&gt; llog_lvfs_destroy+0x545/0xbb0 &lt;span class=&quot;error&quot;&gt;&amp;#91;obdclass&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0141ed5&amp;gt;&amp;#93;&lt;/span&gt; ? start_this_handle+0xe5/0x500 &lt;span class=&quot;error&quot;&gt;&amp;#91;jbd2&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0496acf&amp;gt;&amp;#93;&lt;/span&gt; llog_cancel_rec+0x31f/0x600 &lt;span class=&quot;error&quot;&gt;&amp;#91;obdclass&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa049aee9&amp;gt;&amp;#93;&lt;/span&gt; llog_cat_cancel_records+0x99/0x230 &lt;span class=&quot;error&quot;&gt;&amp;#91;obdclass&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa063fbee&amp;gt;&amp;#93;&lt;/span&gt; llog_origin_handle_cancel+0x22e/0xa20 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa03c4903&amp;gt;&amp;#93;&lt;/span&gt; ? cfs_alloc+0x63/0x90 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa04d10df&amp;gt;&amp;#93;&lt;/span&gt; ? keys_fill+0x6f/0x1a0 &lt;span class=&quot;error&quot;&gt;&amp;#91;obdclass&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa060da87&amp;gt;&amp;#93;&lt;/span&gt; ldlm_cancel_handler+0x157/0x4a0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa06363c1&amp;gt;&amp;#93;&lt;/span&gt; ptlrpc_server_handle_request+0x3c1/0xcb0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa03c44ce&amp;gt;&amp;#93;&lt;/span&gt; ? cfs_timer_arm+0xe/0x10 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa03ceef9&amp;gt;&amp;#93;&lt;/span&gt; ? lc_watchdog_touch+0x79/0x110 &lt;span class=&quot;error&quot;&gt;&amp;#91;libcfs&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0630462&amp;gt;&amp;#93;&lt;/span&gt; ? ptlrpc_wait_event+0xb2/0x2c0 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff810519c3&amp;gt;&amp;#93;&lt;/span&gt; ? __wake_up+0x53/0x70&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa06373cf&amp;gt;&amp;#93;&lt;/span&gt; ptlrpc_main+0x71f/0x1210 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0636cb0&amp;gt;&amp;#93;&lt;/span&gt; ? ptlrpc_main+0x0/0x1210 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff8100c14a&amp;gt;&amp;#93;&lt;/span&gt; child_rip+0xa/0x20&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0636cb0&amp;gt;&amp;#93;&lt;/span&gt; ? ptlrpc_main+0x0/0x1210 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffffa0636cb0&amp;gt;&amp;#93;&lt;/span&gt; ? ptlrpc_main+0x0/0x1210 &lt;span class=&quot;error&quot;&gt;&amp;#91;ptlrpc&amp;#93;&lt;/span&gt;&lt;br/&gt;
Jul 20 09:06:37 weisshorn02 kernel: &lt;span class=&quot;error&quot;&gt;&amp;#91;&amp;lt;ffffffff8100c140&amp;gt;&amp;#93;&lt;/span&gt; ? child_rip+0x0/0x20&lt;/p&gt;</comment>
                            <comment id="42245" author="yong.fan" created="Wed, 25 Jul 2012 11:52:06 +0000"  >&lt;p&gt;My current idea is to increase the credit for llog cancel to prevent journal restart during the transaction. It may be not perfect solution, but should be workable. This is the patch:&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;http://review.whamcloud.com/#change,3463&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/#change,3463&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="42916" author="yong.fan" created="Thu, 9 Aug 2012 01:42:22 +0000"  >&lt;p&gt;The patch has been landed to Lustre-2.3.&lt;/p&gt;

&lt;p&gt;Fabio, would have chance to verity it on your system? Thanks!&lt;/p&gt;</comment>
                            <comment id="48789" author="emoly.liu" created="Wed, 5 Dec 2012 02:36:50 +0000"  >&lt;p&gt;Port for b2_1 is at &lt;a href=&quot;http://review.whamcloud.com/4743&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/4743&lt;/a&gt;&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="23814">LU-4794</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                            <attachment id="11706" name="cluster_20_jul.log" size="1593663" author="fverzell" created="Fri, 20 Jul 2012 07:02:18 +0000"/>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzv5z3:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>4502</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>