<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 01:32:04 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-3228] fc18: sanity test_183: @@@@@@ FAIL: test_183 failed with 1 </title>
                <link>https://jira.whamcloud.com/browse/LU-3228</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;== sanity test 183: No crash or request leak in case of strange dispositions ========================= 20:23:20 (1366860200)&lt;br/&gt;
CMD: client-24vm3 /usr/sbin/lctl set_param fail_loc=0x148&lt;br/&gt;
fail_loc=0x148&lt;br/&gt;
ls: cannot open directory /mnt/lustre/d0.sanity/d183: No such file or directory&lt;br/&gt;
cat: /mnt/lustre/d0.sanity/d183/f.sanity.183: No such file or directory&lt;br/&gt;
CMD: client-24vm3 /usr/sbin/lctl set_param fail_loc=0&lt;br/&gt;
fail_loc=0&lt;br/&gt;
touch: cannot touch &#8216;/mnt/lustre/d0.sanity/d183/f.sanity.183&#8217;: No such file or directory&lt;br/&gt;
rm: cannot remove &#8216;/mnt/lustre/d0.sanity/d183&#8217;: Directory not empty&lt;br/&gt;
 sanity test_183: @@@@@@ FAIL: test_183 failed with 1 &lt;br/&gt;
  Trace dump:&lt;br/&gt;
  = /usr/lib64/lustre/tests/test-framework.sh:4024:error_noexit()&lt;br/&gt;
  = /usr/lib64/lustre/tests/test-framework.sh:4047:error()&lt;br/&gt;
  = /usr/lib64/lustre/tests/test-framework.sh:4301:run_one()&lt;br/&gt;
  = /usr/lib64/lustre/tests/test-framework.sh:4334:run_one_logged()&lt;br/&gt;
  = /usr/lib64/lustre/tests/test-framework.sh:4189:run_test()&lt;br/&gt;
  = /usr/lib64/lustre/tests/sanity.sh:9626:main()&lt;br/&gt;
Dumping lctl log to /logdir/test_logs/2013-04-24/lustre-reviews-el6-x86_64-vs-lustre-reviews-fc18-x86_64-&lt;del&gt;review&lt;/del&gt;-2_4_1_&lt;em&gt;15074&lt;/em&gt;_-70194495514420-185046/sanity.test_183.*.1366860201.log&lt;br/&gt;
CMD: client-24vm1,client-24vm2.lab.whamcloud.com,client-24vm3,client-24vm4 /usr/sbin/lctl dk &amp;gt; /logdir/test_logs/2013-04-24/lustre-reviews-el6-x86_64-vs-lustre-reviews-fc18-x86_64-&lt;del&gt;review&lt;/del&gt;-2_4_1_&lt;em&gt;15074&lt;/em&gt;_-70194495514420-185046/sanity.test_183.debug_log.\$(hostname -s).1366860201.log;&lt;br/&gt;
         dmesg &amp;gt; /logdir/test_logs/2013-04-24/lustre-reviews-el6-x86_64-vs-lustre-reviews-fc18-x86_64-&lt;del&gt;review&lt;/del&gt;-2_4_1_&lt;em&gt;15074&lt;/em&gt;_-70194495514420-185046/sanity.test_183.dmesg.\$(hostname -s).1366860201.log&lt;/p&gt;

&lt;p&gt;&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/83b66856-ad8e-11e2-bbea-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/83b66856-ad8e-11e2-bbea-52540035b04c&lt;/a&gt;&lt;/p&gt;</description>
                <environment></environment>
        <key id="18540">LU-3228</key>
            <summary>fc18: sanity test_183: @@@@@@ FAIL: test_183 failed with 1 </summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="1" iconUrl="https://jira.whamcloud.com/images/icons/priorities/blocker.svg">Blocker</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="1">Fixed</resolution>
                                        <assignee username="yujian">Jian Yu</assignee>
                                    <reporter username="mdiep">Minh Diep</reporter>
                        <labels>
                            <label>yuc2</label>
                    </labels>
                <created>Thu, 25 Apr 2013 16:28:02 +0000</created>
                <updated>Tue, 1 Jul 2014 01:05:56 +0000</updated>
                            <resolved>Sat, 25 Jan 2014 05:29:07 +0000</resolved>
                                    <version>Lustre 2.4.0</version>
                    <version>Lustre 2.4.1</version>
                                    <fixVersion>Lustre 2.4.0</fixVersion>
                                        <due></due>
                            <votes>0</votes>
                                    <watches>5</watches>
                                                                            <comments>
                            <comment id="57075" author="pjones" created="Thu, 25 Apr 2013 19:49:28 +0000"  >&lt;p&gt;Yu, Jian&lt;/p&gt;

&lt;p&gt;Could you please look into this one?&lt;/p&gt;

&lt;p&gt;Thanks&lt;/p&gt;

&lt;p&gt;Peter&lt;/p&gt;</comment>
                            <comment id="59063" author="yujian" created="Wed, 22 May 2013 15:01:43 +0000"  >&lt;p&gt;Lustre Branch: master (tag 2.4.50)&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-master/1502/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-master/1502/&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;The sanity test 183 was added for &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-2275&quot; title=&quot;Open request leak&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-2275&quot;&gt;&lt;del&gt;LU-2275&lt;/del&gt;&lt;/a&gt; in &lt;a href=&quot;http://review.whamcloud.com/4458&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/4458&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;It passed on RHEL6.4 client:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/456be8de-c2db-11e2-b2eb-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/456be8de-c2db-11e2-b2eb-52540035b04c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;but failed on FC18 client:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/a6a8e2d4-c2d4-11e2-b2eb-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/a6a8e2d4-c2d4-11e2-b2eb-52540035b04c&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;After fail_loc was set to 0 on MDS, the touch operation failed on client node:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;fail_loc=0
touch: cannot touch &apos;/mnt/lustre/d0.sanity/d183/f.sanity.183&apos;: No such file or directory
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;Debug log on client node showed that:&lt;/p&gt;
&lt;div class=&quot;panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;panelContent&quot;&gt;
&lt;p&gt;00000001:00000001:0.0:1369222356.453337:0:8789:0:(debug.c:444:libcfs_debug_mark_buffer()) ***************************************************&lt;br/&gt;
00000001:02000400:0.0:1369222356.453339:0:8789:0:(debug.c:445:libcfs_debug_mark_buffer()) DEBUG MARKER: fail_loc=0&lt;br/&gt;
00000001:00000001:0.0:1369222356.453345:0:8789:0:(debug.c:446:libcfs_debug_mark_buffer()) ***************************************************&lt;br/&gt;
......&lt;br/&gt;
00000080:00000001:0.0:1369222356.699798:0:8803:0:(dcache.c:96:ll_dcompare()) Process entered&lt;br/&gt;
00000080:00002000:0.0:1369222356.699799:0:8803:0:(dcache.c:119:ll_dcompare()) found name f.sanity.183(ffff88007a14f540) flags 0xce refc 0&lt;br/&gt;
00000080:00000001:0.0:1369222356.699800:0:8803:0:(dcache.c:128:ll_dcompare()) Process leaving (rc=0 : 0 : 0)&lt;br/&gt;
00000080:00000001:0.0:1369222356.699801:0:8803:0:(dcache.c:669:ll_revalidate_nd()) Process entered&lt;br/&gt;
00000080:00200000:0.0:1369222356.699801:0:8803:0:(dcache.c:671:ll_revalidate_nd()) VFS Op:name=f.sanity.183,flags=769&lt;br/&gt;
&lt;font color=&quot;red&quot;&gt;00000080:00000001:0.0:1369222356.699802:0:8803:0:(dcache.c:684:ll_revalidate_nd()) Process leaving (rc=1 : 1 : 1)&lt;/font&gt;&lt;br/&gt;
00000080:00000001:0.0:1369222356.699803:0:8803:0:(file.c:2978:ll_inode_permission()) Process entered&lt;br/&gt;
00000080:00200000:0.0:1369222356.699803:0:8803:0:(file.c:3001:ll_inode_permission()) VFS Op:inode=144115205255725060/33554436(ffff88007b35c9f8), inode mode 41ed mask 3&lt;br/&gt;
00000080:00000001:0.0:1369222356.699804:0:8803:0:(file.c:3009:ll_inode_permission()) Process leaving (rc=0 : 0 : 0)&lt;br/&gt;
00000080:00000001:0.0:1369222356.699806:0:8803:0:(namei.c:622:ll_atomic_open()) Process entered&lt;br/&gt;
00000080:00200000:0.0:1369222356.699807:0:8803:0:(namei.c:627:ll_atomic_open()) VFS Op:name=f.sanity.183,dir=144115205255725060/33554436(ffff88007b35c9f8),file ffff88007adf5400,open_flags 8941,mode 81b6 opened 0&lt;br/&gt;
00000080:00000010:0.0:1369222356.699809:0:8803:0:(namei.c:629:ll_atomic_open()) kmalloced &apos;it&apos;: 72 at ffff88007a8bb420.&lt;br/&gt;
00000080:00000001:0.0:1369222356.699811:0:8803:0:(namei.c:496:ll_lookup_it()) Process entered&lt;br/&gt;
00000080:00200000:0.0:1369222356.699811:0:8803:0:(namei.c:503:ll_lookup_it()) VFS Op:name=f.sanity.183,dir=144115205255725060/33554436(ffff88007b35c9f8),intent=open|creat&lt;br/&gt;
......&lt;br/&gt;
&lt;font color=&quot;red&quot;&gt;00000080:00000001:0.0:1369222356.702127:0:8803:0:(namei.c:568:ll_lookup_it()) Process leaving via out (rc=0 : 0 : 0x0)&lt;/font&gt;&lt;br/&gt;
00000100:00000001:0.0:1369222356.702128:0:8803:0:(client.c:2271:__ptlrpc_req_finished()) Process entered&lt;br/&gt;
00000100:00000040:0.0:1369222356.702130:0:8803:0:(client.c:2283:__ptlrpc_req_finished()) @@@ refcount now 2  req@ffff88007b35b400 x1435733615182304/t4294967313(4294967313) o101-&amp;gt;lustre-MDT0000-mdc-ffff88007c342000@10.10.4.208@tcp:12/10 lens 568/600 e 0 to 0 dl 1369222363 ref 3 fl Complete:RP/4/0 rc 0/0&lt;br/&gt;
00000100:00000001:0.0:1369222356.702133:0:8803:0:(client.c:2290:__ptlrpc_req_finished()) Process leaving (rc=0 : 0 : 0)&lt;br/&gt;
00000080:00000001:0.0:1369222356.702135:0:8803:0:(dcache.c:289:ll_intent_release()) Process entered&lt;br/&gt;
......&lt;br/&gt;
00000080:00000001:0.0:1369222356.702158:0:8803:0:(dcache.c:304:ll_intent_release()) Process leaving&lt;br/&gt;
00000080:00000010:0.0:1369222356.702158:0:8803:0:(namei.c:688:ll_atomic_open()) kfreed &apos;it&apos;: 72 at ffff88007a8bb420.&lt;br/&gt;
&lt;font color=&quot;red&quot;&gt;00000080:00000001:0.0:1369222356.702168:0:8803:0:(namei.c:690:ll_atomic_open()) Process leaving (rc=1 : 1 : 1)&lt;/font&gt;&lt;/p&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;The ll_atomic_open() failed with 1, which was returned from finish_no_open(). I&apos;m still digging.&lt;/p&gt;</comment>
                            <comment id="63944" author="yujian" created="Fri, 9 Aug 2013 10:13:33 +0000"  >&lt;p&gt;Lustre Branch: b2_4&lt;br/&gt;
Lustre Build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_4/27/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_4/27/&lt;/a&gt;&lt;br/&gt;
Distro/Arch: RHEL6.4/x86_64 + FC18/x86_64 (Server + Client)&lt;/p&gt;

&lt;p&gt;The failure occurred regularly on Lustre b2_4 branch on FC18 client:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/996b2278-fd79-11e2-9fdb-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/996b2278-fd79-11e2-9fdb-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="65699" author="yujian" created="Wed, 4 Sep 2013 06:36:35 +0000"  >&lt;p&gt;Lustre build: &lt;a href=&quot;http://build.whamcloud.com/job/lustre-b2_4/44/&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://build.whamcloud.com/job/lustre-b2_4/44/&lt;/a&gt; (2.4.1 RC1)&lt;br/&gt;
Distro/Arch: RHEL6.4/x86_64 + FC18/x86_64 (Server + Client)&lt;/p&gt;

&lt;p&gt;sanity test 183 hit the same failure:&lt;br/&gt;
&lt;a href=&quot;https://maloo.whamcloud.com/test_sets/0cbde1d0-14ee-11e3-ac48-52540035b04c&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://maloo.whamcloud.com/test_sets/0cbde1d0-14ee-11e3-ac48-52540035b04c&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="75609" author="green" created="Fri, 24 Jan 2014 22:48:01 +0000"  >&lt;p&gt;After failing like this, next unmount, that happens to be in test 223 crashes with reference count assertion.&lt;br/&gt;
This happens to be &lt;a href=&quot;https://jira.whamcloud.com/browse/LU-1480&quot; title=&quot;failure on replay-single test_74: ASSERTION( cfs_atomic_read(&amp;amp;d-&amp;gt;ld_ref) == 0 ) failed: Refcount is 1&quot; class=&quot;issue-link&quot; data-issue-key=&quot;LU-1480&quot;&gt;&lt;del&gt;LU-1480&lt;/del&gt;&lt;/a&gt;.&lt;/p&gt;

&lt;p&gt;This bug is also observed in mainline kernel.&lt;/p&gt;</comment>
                            <comment id="75624" author="green" created="Sat, 25 Jan 2014 02:20:32 +0000"  >&lt;p&gt;Apparently this bug was introduced by commit 784cd144103871bd421c139c09bfbf4d5d29ca08 coming from patch &lt;a href=&quot;http://review.whamcloud.com/4387&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/4387&lt;/a&gt; to support atomic_open.&lt;/p&gt;

&lt;p&gt;The problematic hunk is this:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;@@ -438,13 +443,21 @@ int ll_lookup_it_finish(struct ptlrpc_request *request,
                    Also see bug 7198. */
        }
 
-       *de = ll_splice_alias(inode, *de);
+       /* Only hash *de if it is unhashed (new dentry).
+        * Atoimc_open may passin hashed dentries for open.
+        */
+       if (d_unhashed(*de))
+               *de = ll_splice_alias(inode, *de);
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;When this hits (as in, dentry IS hashed), two things happen:&lt;br/&gt;
1. we leak inode (and don&apos;t assign it to dentry, obviously)&lt;br/&gt;
2. we return NULL from ll_lookup_it, so it uses passed in dentry, but it is negative, and leads to that no_open branch, where we pass in de, that&apos;s NULL (probably should pass in dentry at all times?).&lt;/p&gt;</comment>
                            <comment id="75628" author="green" created="Sat, 25 Jan 2014 04:59:02 +0000"  >&lt;p&gt;Apparently this was fixed by &lt;a href=&quot;http://review.whamcloud.com/8110&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/8110&lt;/a&gt;&lt;/p&gt;</comment>
                            <comment id="75629" author="green" created="Sat, 25 Jan 2014 05:29:07 +0000"  >&lt;p&gt;I verified that the patch in &lt;a href=&quot;http://review.whamcloud.com/8110&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://review.whamcloud.com/8110&lt;/a&gt; fixes the issue.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10010">
                    <name>Duplicate</name>
                                            <outwardlinks description="duplicates">
                                        <issuelink>
            <issuekey id="19084">LU-3373</issuekey>
        </issuelink>
                            </outwardlinks>
                                                        </issuelinktype>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="14742">LU-1480</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|hzvp27:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>7889</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>