<!-- 
RSS generated by JIRA (9.4.14#940014-sha1:734e6822bbf0d45eff9af51f82432957f73aa32c) at Sat Feb 10 02:53:22 UTC 2024

It is possible to restrict the fields that are returned in this document by specifying the 'field' parameter in your request.
For example, to request only the issue key and summary append 'field=key&field=summary' to the URL of your request.
-->
<rss version="0.92" >
<channel>
    <title>Whamcloud Community JIRA</title>
    <link>https://jira.whamcloud.com</link>
    <description>This file is an XML representation of an issue</description>
    <language>en-us</language>    <build-info>
        <version>9.4.14</version>
        <build-number>940014</build-number>
        <build-date>05-12-2023</build-date>
    </build-info>


<item>
            <title>[LU-12525] sanity-flr test 200 and others asertion in osc_page_delete</title>
                <link>https://jira.whamcloud.com/browse/LU-12525</link>
                <project id="10000" key="LU">Lustre</project>
                    <description>&lt;p&gt;There&apos;s a flurry of failures. In maloo it mostly fails in sanity-flr test 200, but I also saw it in racer at least once with a partially similar backtrace.&lt;/p&gt;

&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;Lustre: DEBUG MARKER: mkdir -p /mnt/lustre2
Lustre: DEBUG MARKER: mount -t lustre -o user_xattr,flock trevis-49vm6@tcp:/lustre /mnt/lustre2
Lustre: Mounted lustre-client
Lustre: DEBUG MARKER: mkdir -p /mnt/lustre3
Lustre: DEBUG MARKER: mount -t lustre -o user_xattr,flock trevis-49vm6@tcp:/lustre /mnt/lustre3
LustreError: 16418:0:(osc_cache.c:2553:osc_teardown_async_page()) extent ffff89076293af20@{[768 -&amp;gt; 1023/1023], [2|0|-|cache|wi|ffff890756aca8c0], [1703936|123|+|-|ffff890762821d40|256| (null)]} trunc at 848.
LustreError: 16418:0:(osc_cache.c:2553:osc_teardown_async_page()) ### extent: ffff89076293af20 ns: lustre-OST0001-osc-ffff890767ec5800 lock: ffff890762821d40/0x10751cafee0649b3 lrc: 2/0,0 mode: PW/PW res: [0x175d:0x0:0x0].0x0 rrc: 2 type: EXT [0-&amp;gt;18446744073709551615] (req 3473408-&amp;gt;3977215) flags: 0x20000020000 nid: local remote: 0x70d1a9e0ebb537b3 expref: -99 pid: 16413 timeout: 0 lvb_type: 1
LustreError: 16418:0:(osc_page.c:192:osc_page_delete()) page@ffff89075dd84a00[2 ffff890736741ab0 4 1 (null)]
LustreError: 16418:0:(osc_page.c:192:osc_page_delete()) vvp-page@ffff89075dd84a50(0:0) vm@ffffe55201ca5c80 1fffff0000083d 3:0 ffff89075dd84a00 1872 lru
LustreError: 16418:0:(osc_page.c:192:osc_page_delete()) lov-page@ffff89075dd84a90, comp index: 70001, gen: 8
LustreError: 16418:0:(osc_page.c:192:osc_page_delete()) osc-page@ffff89075dd84ac8 848: 1&amp;lt; 0x845fed 258 0 + - &amp;gt; 2&amp;lt; 3473408 0 4096 0x0 0x520 | (null) ffff89076b15c6c0 ffff890756aca8c0 &amp;gt; 3&amp;lt; 0 0 0 &amp;gt; 4&amp;lt; 0 0 8 120979456 - | - - + - &amp;gt; 5&amp;lt; - - + - | 0 - | 123 - -&amp;gt;
LustreError: 16418:0:(osc_page.c:192:osc_page_delete()) end page@ffff89075dd84a00
LustreError: 16418:0:(osc_page.c:192:osc_page_delete()) Trying to teardown failed: -16
LustreError: 16418:0:(osc_page.c:193:osc_page_delete()) ASSERTION( 0 ) failed:
LustreError: 16418:0:(osc_page.c:193:osc_page_delete()) LBUG
Pid: 16418, comm: cat 3.10.0-957.12.2.el7.x86_64 #1 SMP Tue May 14 21:24:32 UTC 2019
Call Trace:
[&amp;lt;ffffffffc05b78bc&amp;gt;] libcfs_call_trace+0x8c/0xc0 [libcfs]
[&amp;lt;ffffffffc05b796c&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[&amp;lt;ffffffffc0b8086f&amp;gt;] osc_page_delete+0x48f/0x500 [osc]
[&amp;lt;ffffffffc073cdf0&amp;gt;] cl_page_delete0+0x80/0x220 [obdclass]
[&amp;lt;ffffffffc073cfc3&amp;gt;] cl_page_delete+0x33/0x110 [obdclass]
[&amp;lt;ffffffffc0c8c98f&amp;gt;] ll_invalidatepage+0x7f/0x170 [lustre]
[&amp;lt;ffffffff8ffc6c8d&amp;gt;] do_invalidatepage_range+0x7d/0x90
[&amp;lt;ffffffff8ffc6d37&amp;gt;] truncate_inode_page+0x77/0x80
[&amp;lt;ffffffff8ffc6f6a&amp;gt;] truncate_inode_pages_range+0x1ea/0x700
[&amp;lt;ffffffff8ffc7495&amp;gt;] truncate_inode_pages+0x15/0x20
[&amp;lt;ffffffffc0ca06db&amp;gt;] vvp_prune+0x7b/0x2b0 [lustre]
[&amp;lt;ffffffffc073a638&amp;gt;] cl_object_prune+0x58/0x110 [obdclass]
[&amp;lt;ffffffffc0beecc9&amp;gt;] lov_conf_set+0x509/0xac0 [lov]
[&amp;lt;ffffffffc073a753&amp;gt;] cl_conf_set+0x63/0x120 [obdclass]
[&amp;lt;ffffffffc0c598f1&amp;gt;] ll_layout_conf+0x81/0x3f0 [lustre]
[&amp;lt;ffffffffc0c5a1ad&amp;gt;] ll_layout_refresh+0x54d/0x900 [lustre]
[&amp;lt;ffffffffc0c9fc97&amp;gt;] vvp_io_init+0x347/0x460 [lustre]
[&amp;lt;ffffffffc07426ab&amp;gt;] cl_io_init0.isra.15+0x8b/0x160 [obdclass]
[&amp;lt;ffffffffc0742843&amp;gt;] cl_io_init+0x43/0x80 [obdclass]
[&amp;lt;ffffffffc07428f7&amp;gt;] cl_io_rw_init+0x77/0x210 [obdclass]
[&amp;lt;ffffffffc0c502be&amp;gt;] ll_file_io_generic+0x17e/0xaf0 [lustre]
[&amp;lt;ffffffffc0c51a52&amp;gt;] ll_file_aio_read+0x3a2/0x470 [lustre]
[&amp;lt;ffffffffc0c51c20&amp;gt;] ll_file_read+0x100/0x1c0 [lustre]
[&amp;lt;ffffffff9004167f&amp;gt;] vfs_read+0x9f/0x170
[&amp;lt;ffffffff9004253f&amp;gt;] SyS_read+0x7f/0xf0
[&amp;lt;ffffffff90575ddb&amp;gt;] system_call_fastpath+0x22/0x27
[&amp;lt;ffffffffffffffff&amp;gt;] 0xffffffffffffffff
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;

&lt;p&gt;example maloo report: &lt;a href=&quot;https://testing.whamcloud.com/test_sets/4bc4f606-a1e3-11e9-8dbe-52540065bddc&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/4bc4f606-a1e3-11e9-8dbe-52540065bddc&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;racer:&lt;/p&gt;
&lt;div class=&quot;preformatted panel&quot; style=&quot;border-width: 1px;&quot;&gt;&lt;div class=&quot;preformattedContent panelContent&quot;&gt;
&lt;pre&gt;LustreError: 12527:0:(osc_cache.c:2553:osc_teardown_async_page()) extent ffff8800a297e230@{[672 -&amp;gt; 1023/1023], [2|0|-|cache|wi|ffff8800a0bcee60], [1466368|352|+|-|ffff8800b81c0d80|1024| (null)]} trunc at 672.
LustreError: 12527:0:(osc_cache.c:2553:osc_teardown_async_page()) ### extent: ffff8800a297e230 ns: lustre-OST0001-osc-ffff88010a173800 lock: ffff8800b81c0d80/0xfcc9cb8b08e86ca9 lrc: 2/0,0 mode: PW/PW res: [0x1e9:0x0:0x0].0x0 rrc: 2 type: EXT [0-&amp;gt;18446744073709551615] (req 2752512-&amp;gt;18446744073709551615) flags: 0x20000000000 nid: local remote: 0x4b2ddd8a9f2ac3e expref: -99 pid: 12139 timeout: 0 lvb_type: 1
LustreError: 12527:0:(osc_page.c:192:osc_page_delete()) page@ffff8800a3ce2e00[2 ffff8800a231ac58 4 1 (null)]
LustreError: 12527:0:(osc_page.c:192:osc_page_delete()) vvp-page@ffff8800a3ce2e50(0:0) vm@ffffea00028cfb48 1fffff0000083d 3:0 ffff8800a3ce2e00 1696 lru
LustreError: 12527:0:(osc_page.c:192:osc_page_delete()) lov-page@ffff8800a3ce2e90, comp index: 10001, gen: 5
LustreError: 12527:0:(osc_page.c:192:osc_page_delete()) osc-page@ffff8800a3ce2ec8 672: 1&amp;lt; 0x845fed 258 0 + - &amp;gt; 2&amp;lt; 2752512 0 4096 0x0 0x520 | (null) ffff8800c8118798 ffff8800a0bcee60 &amp;gt; 3&amp;lt; 0 0 0 &amp;gt; 4&amp;lt; 0 0 8 424652800 - | - - + - &amp;gt; 5&amp;lt; - - + - | 0 - | 352 - -&amp;gt;
LustreError: 12527:0:(osc_page.c:192:osc_page_delete()) end page@ffff8800a3ce2e00
LustreError: 12527:0:(osc_page.c:192:osc_page_delete()) Trying to teardown failed: -16
LustreError: 12527:0:(osc_page.c:193:osc_page_delete()) ASSERTION( 0 ) failed:
LustreError: 12527:0:(osc_page.c:193:osc_page_delete()) LBUG
Pid: 12527, comm: touch 3.10.0-7.6-debug #5 SMP Tue Jun 11 21:26:20 EDT 2019
Call Trace:
[&amp;lt;ffffffffa017e8cc&amp;gt;] libcfs_call_trace+0x8c/0xc0 [libcfs]
[&amp;lt;ffffffffa017e97c&amp;gt;] lbug_with_loc+0x4c/0xa0 [libcfs]
[&amp;lt;ffffffffa08133d9&amp;gt;] osc_page_delete+0x489/0x4f0 [osc]
[&amp;lt;ffffffffa032dcad&amp;gt;] cl_page_delete0+0x7d/0x210 [obdclass]
[&amp;lt;ffffffffa032de73&amp;gt;] cl_page_delete+0x33/0x110 [obdclass]
[&amp;lt;ffffffffa0d6551f&amp;gt;] ll_invalidatepage+0x7f/0x170 [lustre]
[&amp;lt;ffffffff811bc961&amp;gt;] do_invalidatepage_range+0x71/0x80
[&amp;lt;ffffffff811bca07&amp;gt;] truncate_inode_page+0x77/0x80
[&amp;lt;ffffffff811bcc42&amp;gt;] truncate_inode_pages_range+0x1f2/0x770
[&amp;lt;ffffffff811bd1d5&amp;gt;] truncate_inode_pages+0x15/0x20
[&amp;lt;ffffffffa0d78abb&amp;gt;] vvp_prune+0x7b/0x2b0 [lustre]
[&amp;lt;ffffffffa032b595&amp;gt;] cl_object_prune+0x55/0x110 [obdclass]
[&amp;lt;ffffffffa08d5121&amp;gt;] lov_conf_set+0x501/0xa90 [lov]
[&amp;lt;ffffffffa032b6b0&amp;gt;] cl_conf_set+0x60/0x120 [obdclass]
[&amp;lt;ffffffffa0d6f0cb&amp;gt;] cl_file_inode_init+0x12b/0x390 [lustre]
[&amp;lt;ffffffffa0d44f35&amp;gt;] ll_update_inode+0x2e5/0x610 [lustre]
[&amp;lt;ffffffffa0d56363&amp;gt;] ll_iget+0x253/0x350 [lustre]
[&amp;lt;ffffffffa0d4955d&amp;gt;] ll_prep_inode+0x20d/0x9a0 [lustre]
[&amp;lt;ffffffffa0d5849b&amp;gt;] ll_lookup_it+0xc0b/0x1e90 [lustre]
[&amp;lt;ffffffffa0d5ad25&amp;gt;] ll_lookup_nd+0x85/0x190 [lustre]
[&amp;lt;ffffffff8124147d&amp;gt;] lookup_real+0x1d/0x50
[&amp;lt;ffffffff81241f62&amp;gt;] __lookup_hash+0x42/0x60
[&amp;lt;ffffffff817b31f4&amp;gt;] lookup_slow+0x42/0xa7
[&amp;lt;ffffffff8124677f&amp;gt;] link_path_walk+0x81f/0x8c0
[&amp;lt;ffffffff812478fe&amp;gt;] path_openat+0xae/0x650
[&amp;lt;ffffffff812492cd&amp;gt;] do_filp_open+0x4d/0xb0
[&amp;lt;ffffffff81235167&amp;gt;] do_sys_open+0x137/0x240
[&amp;lt;ffffffff8123528e&amp;gt;] SyS_open+0x1e/0x20
[&amp;lt;ffffffff817cae15&amp;gt;] system_call_fastpath+0x1c/0x21
[&amp;lt;ffffffffffffffff&amp;gt;] 0xffffffffffffffff
&lt;/pre&gt;
&lt;/div&gt;&lt;/div&gt;
&lt;p&gt;sample report: &lt;a href=&quot;http://testing.linuxhacker.ru:3333/lustre-reports/1051/testresults/racer-ldiskfs-DNE-centos7_x86_64-centos7_x86_64&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://testing.linuxhacker.ru:3333/lustre-reports/1051/testresults/racer-ldiskfs-DNE-centos7_x86_64-centos7_x86_64&lt;/a&gt; and in maloo: &lt;a href=&quot;https://testing.whamcloud.com/test_sets/58251f94-a11a-11e9-8dbe-52540065bddc&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://testing.whamcloud.com/test_sets/58251f94-a11a-11e9-8dbe-52540065bddc&lt;/a&gt;&lt;/p&gt;

&lt;p&gt;This seems to be a very recent crash scenario too appearing just two das ago.&lt;/p&gt;

&lt;p&gt;Coincidentally we just landed this patch n that timeframe: &lt;a href=&quot;https://review.whamcloud.com/35111&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;https://review.whamcloud.com/35111&lt;/a&gt;&lt;br/&gt;
So I guess it&apos;s the prime suspect now.&lt;/p&gt;</description>
                <environment></environment>
        <key id="56296">LU-12525</key>
            <summary>sanity-flr test 200 and others asertion in osc_page_delete</summary>
                <type id="1" iconUrl="https://jira.whamcloud.com/secure/viewavatar?size=xsmall&amp;avatarId=11303&amp;avatarType=issuetype">Bug</type>
                                            <priority id="2" iconUrl="https://jira.whamcloud.com/images/icons/priorities/critical.svg">Critical</priority>
                        <status id="5" iconUrl="https://jira.whamcloud.com/images/icons/statuses/resolved.png" description="A resolution has been taken, and it is awaiting verification by reporter. From here issues are either reopened, or are closed.">Resolved</status>
                    <statusCategory id="3" key="done" colorName="success"/>
                                    <resolution id="5">Cannot Reproduce</resolution>
                                        <assignee username="wc-triage">WC Triage</assignee>
                                    <reporter username="green">Oleg Drokin</reporter>
                        <labels>
                    </labels>
                <created>Tue, 9 Jul 2019 01:31:15 +0000</created>
                <updated>Fri, 30 Aug 2019 23:54:02 +0000</updated>
                            <resolved>Fri, 30 Aug 2019 23:54:02 +0000</resolved>
                                    <version>Lustre 2.13.0</version>
                                                        <due></due>
                            <votes>1</votes>
                                    <watches>6</watches>
                                                                            <comments>
                            <comment id="250873" author="green" created="Tue, 9 Jul 2019 01:37:09 +0000"  >&lt;p&gt;in fact as a further proof - this wa first seen in reviews for that patch: &lt;a href=&quot;http://testing.linuxhacker.ru:3333/lustre-reports/522/results.html&quot; class=&quot;external-link&quot; target=&quot;_blank&quot; rel=&quot;nofollow noopener&quot;&gt;http://testing.linuxhacker.ru:3333/lustre-reports/522/results.html&lt;/a&gt; (racer-zfs crash)&lt;/p&gt;</comment>
                            <comment id="250894" author="bzzz" created="Tue, 9 Jul 2019 13:12:05 +0000"  >&lt;p&gt;hitting this quite often with ldiskfs&lt;/p&gt;</comment>
                            <comment id="250912" author="green" created="Tue, 9 Jul 2019 17:26:59 +0000"  >&lt;p&gt;I am going to revert the patch causing this, not sure if Jinshan has any time to look into this, so Bobi, please take a look here as well, since thatp atch is also important and we do want to fix that usecase, just without the crashing hopefully.&lt;/p&gt;</comment>
                            <comment id="250955" author="pfarrell" created="Wed, 10 Jul 2019 15:55:09 +0000"  >&lt;p&gt;I looked at this yesterday and couldn&apos;t figure it out, but I noticed that the crash is always because there are still busy pages when we do the purge step of conf_set.&#160; Looking in the logs, it seems like the sync that is supposed to happen before the invalidate call is not happening.&#160; (talking about vvp_prune here).&#160; There is a cl_sync_file_range, and looking in the debug logs for Oleg&apos;s instance, I cannot see it happening.&lt;/p&gt;

&lt;p&gt;But as to why - I&apos;m getting lost in FLR details.&#160; This does seem likely to be exposing an existing bug rather than strictly introducing one.&lt;/p&gt;</comment>
                            <comment id="253965" author="adilger" created="Fri, 30 Aug 2019 23:54:02 +0000"  >&lt;p&gt;Closing this since the offending patch was reverted.&lt;/p&gt;</comment>
                    </comments>
                <issuelinks>
                            <issuelinktype id="10011">
                    <name>Related</name>
                                                                <inwardlinks description="is related to">
                                        <issuelink>
            <issuekey id="55719">LU-12328</issuekey>
        </issuelink>
                            </inwardlinks>
                                    </issuelinktype>
                    </issuelinks>
                <attachments>
                    </attachments>
                <subtasks>
                    </subtasks>
                <customfields>
                                                                                                                                                                                            <customfield id="customfield_10890" key="com.atlassian.jira.plugins.jira-development-integration-plugin:devsummary">
                        <customfieldname>Development</customfieldname>
                        <customfieldvalues>
                            
                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        <customfield id="customfield_10390" key="com.pyxis.greenhopper.jira:gh-lexo-rank">
                        <customfieldname>Rank</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>1|i00jd3:</customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                <customfield id="customfield_10090" key="com.pyxis.greenhopper.jira:gh-global-rank">
                        <customfieldname>Rank (Obsolete)</customfieldname>
                        <customfieldvalues>
                            <customfieldvalue>9223372036854775807</customfieldvalue>
                        </customfieldvalues>
                    </customfield>
                                                                                            <customfield id="customfield_10060" key="com.atlassian.jira.plugin.system.customfieldtypes:select">
                        <customfieldname>Severity</customfieldname>
                        <customfieldvalues>
                                <customfieldvalue key="10022"><![CDATA[3]]></customfieldvalue>

                        </customfieldvalues>
                    </customfield>
                                                                                                                                                                                                                                                                                                                                                        </customfields>
    </item>
</channel>
</rss>