<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:12:12 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-7820] jobs crash with llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -5</title>
                <link>https://jira.whamcloud.com/browse/LU-7820</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;Error happens during soak testing of build &apos;20160224&apos; (b2_8 RC2) (see: &lt;br/&gt;
&lt;a href=&quot;https://wiki.hpdd.intel.com/pages/viewpage.action?title=Soak+Testing+on+Lola&amp;amp;&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://wiki.hpdd.intel.com/pages/viewpage.action?title=Soak+Testing+on+Lola&amp;amp;&lt;/a&gt; spaceKey=Releases#SoakTestingonLola-20150224). DNE is enabled.&lt;br/&gt;
MDSes had been formatted using &lt;em&gt;ldiskfs&lt;/em&gt;, OSTs using &lt;em&gt;zfs&lt;/em&gt;. MDSes are configured in active-active HA failover configuration.&lt;/p&gt;

&lt;p&gt;Applicaton {&lt;tt&gt;mdtest&lt;/tt&gt; (1file per process) jobs crash with the following errors:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;  JOBID          ERROR MESSAGE
-- 445604 :  201602 25 15:08:35 : Process 1(lola-31.lola.whamcloud.com): FAILED in main, Unable to change to test directory: Input/output error
-- 445605 :  201602 25 15:07:42 : Process 3(lola-32.lola.whamcloud.com): FAILED in main, Unable to change to test directory: Input/output error
-- 445415 :  201602 25 11:27:11 : Process 3(lola-34.lola.whamcloud.com): FAILED in main, Unable to change to test directory: Input/output error
-- 445416 :  201602 25 11:28:45 : Process 3(lola-32.lola.whamcloud.com): FAILED in main, Unable to change to test directory: Input/output error
-- 445270 :  201602 25 08:05:01 : Process 4(lola-31.lola.whamcloud.com): FAILED in main, Unable to change to test directory: Input/output error
-- 445271 :  201602 25 08:04:34 : Process 1(lola-29.lola.whamcloud.com): FAILED in main, Unable to change to test directory: Input/output error
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;On MDS and client nodes the following Lustre errors can be correlated:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;---- Incident 25 15:08:35 ----
lola-11.log:Feb 25 15:08:35 lola-11 kernel: Lustre: soaked-MDT0006: Connection restored to 300cd577-7ec5-3892-b093-9d631f897cda (at 192.168.1.131@o2ib100)
lola-11.log:Feb 25 15:08:35 lola-11 kernel: Lustre: Skipped 254 previous similar messages
lola-31.log:Feb 25 15:08:35 lola-31 kernel: LustreError: 167-0: soaked-MDT0006-mdc-ffff88086597e800: This client was evicted by soaked-MDT0006; in progress operations using this service will fail.
lola-31.log:Feb 25 15:08:35 lola-31 kernel: LustreError: 120434:0:(llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -5
lola-31.log:Feb 25 15:08:35 lola-31 kernel: Lustre: soaked-MDT0006-mdc-ffff88086597e800: Connection restored to 192.168.1.111@o2ib10 (at 192.168.1.111@o2ib10)
---- Incident 25 15:07:42 ----
lola-32.log:Feb 25 15:07:42 lola-32 kernel: LustreError: 167-0: soaked-MDT0006-mdc-ffff88082f4c4000: This client was evicted by soaked-MDT0006; in progress operations using this service will fail.
lola-32.log:Feb 25 15:07:42 lola-32 kernel: LustreError: 133347:0:(llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -5
lola-32.log:Feb 25 15:07:42 lola-32 kernel: LustreError: 133347:0:(llite_lib.c:2309:ll_prep_inode()) Skipped 2 previous similar messages
lola-32.log:Feb 25 15:07:42 lola-32 kernel: Lustre: soaked-MDT0006-mdc-ffff88082f4c4000: Connection restored to 192.168.1.111@o2ib10 (at 192.168.1.111@o2ib10)
---- Incident 25 11:27:11 ----
lola-31.log:Feb 25 11:27:11 lola-31 kernel: LustreError: 105033:0:(llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -4
lola-34.log:Feb 25 11:27:11 lola-34 kernel: LustreError: 167-0: soaked-MDT0002-mdc-ffff88102fa38000: This client was evicted by soaked-MDT0002; in progress operations using this service will fail.
lola-34.log:Feb 25 11:27:11 lola-34 kernel: LustreError: 105947:0:(llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -5
lola-34.log:Feb 25 11:27:11 lola-34 kernel: Lustre: soaked-MDT0002-mdc-ffff88102fa38000: Connection restored to 192.168.1.109@o2ib10 (at 192.168.1.109@o2ib10)
---- Incident 25 11:28:45 ----
lola-32.log:Feb 25 11:28:45 lola-32 kernel: LustreError: 167-0: soaked-MDT0002-mdc-ffff88082f4c4000: This client was evicted by soaked-MDT0002; in progress operations using this service will fail.
lola-32.log:Feb 25 11:28:45 lola-32 kernel: LustreError: 117554:0:(llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -5
lola-32.log:Feb 25 11:28:45 lola-32 kernel: Lustre: soaked-MDT0002-mdc-ffff88082f4c4000: Connection restored to 192.168.1.109@o2ib10 (at 192.168.1.109@o2ib10)
lola-32.log:Feb 25 11:28:45 lola-32 kernel: LustreError: 117554:0:(llite_lib.c:2309:ll_prep_inode()) Skipped 2 previous similar messages
---- Incident 25 08:05:01 ----
lola-31.log:Feb 25 08:05:01 lola-31 kernel: LustreError: 167-0: soaked-MDT0002-mdc-ffff88086597e800: This client was evicted by soaked-MDT0002; in progress operations using this service will fail.
lola-31.log:Feb 25 08:05:01 lola-31 kernel: LustreError: 89849:0:(file.c:180:ll_close_inode_openhandle()) soaked-clilmv-ffff88086597e800: inode [0x28000bf82:0x69f4:0x0] mdc close failed: rc = -5
lola-31.log:Feb 25 08:05:01 lola-31 kernel: LustreError: 91182:0:(llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -5
lola-31.log:Feb 25 08:05:01 lola-31 kernel: Lustre: soaked-MDT0002-mdc-ffff88086597e800: Connection restored to 192.168.1.109@o2ib10 (at 192.168.1.109@o2ib10)
---- Incident 25 08:04:34 ----
lola-29.log:Feb 25 08:04:34 lola-29 kernel: LustreError: 167-0: soaked-MDT0002-mdc-ffff880871eec800: This client was evicted by soaked-MDT0002; in progress operations using this service will fail.
lola-29.log:Feb 25 08:04:34 lola-29 kernel: LustreError: 1037:0:(file.c:180:ll_close_inode_openhandle()) soaked-clilmv-ffff880871eec800: inode [0x28000bf82:0x66f3:0x0] mdc close failed: rc = -5
lola-29.log:Feb 25 08:04:34 lola-29 kernel: LustreError: 1043:0:(vvp_io.c:1519:vvp_io_init()) soaked: refresh file layout [0x28000a816:0x1c0e2:0x0] error -5.
lola-29.log:Feb 25 08:04:34 lola-29 kernel: Lustre: soaked-MDT0002-mdc-ffff880871eec800: Connection restored to 192.168.1.109@o2ib10 (at 192.168.1.109@o2ib10)
lola-29.log:Feb 25 08:04:34 lola-29 kernel: LustreError: 1037:0:(file.c:180:ll_close_inode_openhandle()) Skipped 3 previous similar messages
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;The errors happened after&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;mds_failover     : 2016-02-25 14:52:36,099 - 2016-02-25 14:59:44,541     lola-11
mds_failover     : 2016-02-25 11:06:59,431 - 2016-02-25 11:16:18,956     lola-9
mds_failover     : 2016-02-25 07:45:03,939 - 2016-02-25 07:54:18,970     lola-9
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;Does the eviction is an expected part of the workflow?&lt;/p&gt;</description>
                <environment>lola&lt;br/&gt;
build: &lt;a href=&quot;https://build.hpdd.intel.com/job/lustre-b2_8/8/&quot;&gt;https://build.hpdd.intel.com/job/lustre-b2_8/8/&lt;/a&gt;</environment>
        <key id="34995">LU-7820</key>
            <summary>jobs crash with llite_lib.c:2309:ll_prep_inode()) new_inode -fatal: rc -5</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="2" iconUrl="https://jira.whamcloud.com/images/icons/priorities/critical.svg">Critical</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="2">Won&apos;t Fix</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="heckes">Frank Heckes</reporter>
                        <labels>
                            <label>soak</label>
                    </labels>
                <created>Fri, 26 Feb 2016 10:45:01 +0000</created>
                <updated>Tue, 24 Jan 2017 22:41:28 +0000</updated>
                            <resolved>Tue, 24 Jan 2017 22:41:28 +0000</resolved>
                                    <version>Lustre 2.8.0</version>
                                                        <due></due>
                            <votes>0</votes>
                                    <watches>1</watches>
                                                                            <comments>
                            <comment id="182018" author="cliffw" created="Tue, 24 Jan 2017 22:41:28 +0000"  >&lt;p&gt;Old issue from 2.8&lt;/p&gt;</comment>
                    </comments>
                    <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzy2vz:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>