<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:49:28 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-5208] sanity-lfsck test_18c failure: Expect 3 fixed on mds1, but got: 2</title>
                <link>https://jira.whamcloud.com/browse/LU-5208</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;Running sanity-lfsck with the stated environment, tests 18c, 18d, 18e, and 19a fail and test 19b hangs. Test results are at &lt;a href=&quot;https://maloo.whamcloud.com/test_sessions/5ad54b54-f5a5-11e3-b29e-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sessions/5ad54b54-f5a5-11e3-b29e-52540035b04c&lt;/a&gt; .&lt;/p&gt;

&lt;p&gt;sanity-lfsck test 18c fails with the error:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;sanity-lfsck test_18c: @@@@@@ FAIL: (4) Expect 3 fixed on mds1, but got: 2
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Right before this test fails, the output from /proc/fs/lustre/mdd/scratch-MDT0000/lfsck_layout on mds01, MDT0, is:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;name: lfsck_layout
magic: 0xb173ae14
version: 2
status: completed
flags:
param: all_targets,orphan
time_since_last_completed: 2912 seconds
time_since_latest_start: 2912 seconds
time_since_last_checkpoint: 2912 seconds
latest_start_position: 0
last_checkpoint_position: 25098
first_failure_position: 0
success_count: 1
repaired_dangling: 0
repaired_unmatched_pair: 0
repaired_multiple_referenced: 0
repaired_orphan: 2
repaired_inconsistent_owner: 0
repaired_others: 0
skipped: 0
failed_phase1: 0
failed_phase2: 0
checked_phase1: 8
checked_phase2: 2
run_time_phase1: 0 seconds
run_time_phase2: 0 seconds
average_speed_phase1: 8 items/sec
average_speed_phase2: 2 objs/sec
real-time_speed_phase1: N/A
real-time_speed_phase2: N/A
current_position: N/A
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;</description>
                <environment>Lustre 2.5.60 on the OpenSFS cluster, CentOS 6.5 with one server (mds01) with a MGS and MDS with two MDTs, another server (mds02) with MDS and two MDTs, four OSSs with two OSTs each and four clients.</environment>
        <key id="25177">LU-5208</key>
            <summary>sanity-lfsck test_18c failure: Expect 3 fixed on mds1, but got: 2</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="4" iconUrl="https://jira.whamcloud.com/images/icons/priorities/minor.svg">Minor</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="yong.fan">nasf</assignee>
                                    <reporter username="jamesanunez">James Nunez</reporter>
                        <labels>
                            <label>lfsck</label>
                    </labels>
                <created>Mon, 16 Jun 2014 22:53:18 +0000</created>
                <updated>Mon, 25 Aug 2014 23:13:03 +0000</updated>
                            <resolved>Mon, 25 Aug 2014 23:13:03 +0000</resolved>
                                    <version>Lustre 2.6.0</version>
                                    <fixVersion>Lustre 2.7.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>2</watches>
                                                                            <comments>
                            <comment id="88401" author="yong.fan" created="Tue, 8 Jul 2014 03:18:31 +0000"  >&lt;p&gt;Please refer to the comment in the &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-5209&quot; title=&quot;sanity-lfsck test_18d failure: Expect file2 size 4, but got 0&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-5209&quot;&gt;&lt;del&gt;LU-5209&lt;/del&gt;&lt;/a&gt;, we need the LFSCK log to analysis the LFSCK behaviour.&lt;/p&gt;</comment>
                            <comment id="89154" author="jamesanunez" created="Tue, 15 Jul 2014 21:53:54 +0000"  >&lt;p&gt;sanity-lfsck test logs for 2.6.0-RC1 are at: &lt;a href=&quot;https://testing.hpdd.intel.com/test_sessions/5e3c96b0-0c68-11e4-9892-5254006e85c2&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.hpdd.intel.com/test_sessions/5e3c96b0-0c68-11e4-9892-5254006e85c2&lt;/a&gt; . &lt;/p&gt;
</comment>
                            <comment id="90413" author="yong.fan" created="Wed, 30 Jul 2014 02:41:00 +0000"  >&lt;p&gt;Here is the patch:&lt;br/&gt;
&lt;a href=&quot;http://review.whamcloud.com/#/c/11275/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/#/c/11275/&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="90761" author="jamesanunez" created="Tue, 5 Aug 2014 00:48:06 +0000"  >&lt;p&gt;I tried patch 11275 and the test passes, but the output and the comments don&apos;t match. &lt;/p&gt;

&lt;p&gt;From the output of sanity-lfsck test 18c with this patch:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Trigger layout LFSCK on all devices to find out orphan OST-object
Started LFSCK on the device scratch-MDT0000: scrub layout
There should be some stub under .lustre/lost+found/MDT0001/
ls: cannot access /lustre/scratch/.lustre/lost+found/MDT0001/*-N-0: No such file or directory
There should be some stub under .lustre/lost+found/MDT0000/
216172799310430210 -r-------- 1 root root 2097152 Aug  4 16:42 /lustre/scratch/.lustre/lost+found/MDT0000/[0x300000401:0x2:0x0]-N-0
216172799310430211 -r-------- 1 root root 2097152 Aug  4 16:42 /lustre/scratch/.lustre/lost+found/MDT0000/[0x300000401:0x3:0x0]-N-0
Resetting fail_loc on all nodes...done.
PASS 18c (7s)
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;So, the comment expects something to be in $mount/.lustre/lost+found/scratch-MDT0001, but there is no scratch-MDT0001 subdirectory under lost+found. Maybe with the change in this patch to using &quot;$LFS setstripe -c 1&quot;, we shouldn&apos;t expect anything there to be an MDT0001 subdirectory?&lt;/p&gt;

&lt;p&gt;I put some debug prints n the test and there is not scratch-MDT0001 subdirectory:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Trigger layout LFSCK on all devices to find out orphan OST-object
Started LFSCK on the device scratch-MDT0000: scrub layout
ls -ail /lustre/scratch/.lustre/lost+found/
total 8
144115188109410307 dr-x------ 3 root root 4096 Aug  4 17:37 .
216172799310430209 drwx------ 3 root root 4096 Aug  4 17:38 MDT0000
ls -ail /lustre/scratch/.lustre/lost+found/MDT*
total 4104
216172799310430209 drwx------ 3 root root    4096 Aug  4 17:38 .
144115188109410307 dr-x------ 3 root root    4096 Aug  4 17:37 ..
216172799310430210 -r-------- 1 root root 2097152 Aug  4 17:38 [0x300000401:0x2:0x0]-N-0
216172799310430211 -r-------- 1 root root 2097152 Aug  4 17:38 [0x300000401:0x3:0x0]-N-0
There should be some stub under .lustre/lost+found/MDT0001/
ls: cannot access /lustre/scratch/.lustre/lost+found/MDT0001/*-N-0: No such file or directory
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
</comment>
                            <comment id="90763" author="yong.fan" created="Tue, 5 Aug 2014 01:01:34 +0000"  >&lt;p&gt;It is the test scripts issue, the comment should be &quot;There should NOT be some stub under .lustre/lost+found/MDT0001/&quot;. I will update the patch.&lt;/p&gt;</comment>
                            <comment id="92362" author="yong.fan" created="Mon, 25 Aug 2014 23:13:03 +0000"  >&lt;p&gt;The patch has been landed to master.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="25179">LU-5209</issuekey>
        </issuelink>
            <issuelink>
            <issuekey id="25182">LU-5210</issuekey>
        </issuelink>
            <issuelink>
            <issuekey id="25183">LU-5211</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzwp6v:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>14536</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>