Merge "Try harder to avoid parser cache pollution"
[lhc/web/wiklou.git] / includes / page / WikiPage.php
index f2f5781..0e23a88 100644 (file)
@@ -24,6 +24,8 @@ use \MediaWiki\Logger\LoggerFactory;
 use \MediaWiki\MediaWikiServices;
 use Wikimedia\Rdbms\FakeResultWrapper;
 use Wikimedia\Rdbms\IDatabase;
+use Wikimedia\Rdbms\DBError;
+use Wikimedia\Rdbms\DBUnexpectedError;
 
 /**
  * Class representing a MediaWiki article and history.
@@ -576,36 +578,12 @@ class WikiPage implements Page, IDBAccessObject {
         * @return Revision|null
         */
        public function getOldestRevision() {
-
                // Try using the replica DB first, then try the master
-               $continue = 2;
-               $db = wfGetDB( DB_REPLICA );
-               $revSelectFields = Revision::selectFields();
-
-               $row = null;
-               while ( $continue ) {
-                       $row = $db->selectRow(
-                               [ 'revision' ],
-                               $revSelectFields,
-                               [
-                                       'rev_page' => $this->getId()
-                               ],
-                               __METHOD__,
-                               [
-                                       'ORDER BY' => 'rev_timestamp ASC',
-                                       'IGNORE INDEX' => 'rev_timestamp'
-                               ]
-                       );
-
-                       if ( $row ) {
-                               $continue = 0;
-                       } else {
-                               $db = wfGetDB( DB_MASTER );
-                               $continue--;
-                       }
+               $rev = $this->mTitle->getFirstRevision();
+               if ( !$rev ) {
+                       $rev = $this->mTitle->getFirstRevision( Title::GAID_FOR_UPDATE );
                }
-
-               return $row ? Revision::newFromRow( $row ) : null;
+               return $rev;
        }
 
        /**
@@ -1077,6 +1055,13 @@ class WikiPage implements Page, IDBAccessObject {
        ) {
                $useParserCache =
                        ( !$forceParse ) && $this->shouldCheckParserCache( $parserOptions, $oldid );
+
+               if ( $useParserCache && !$parserOptions->isSafeToCache() ) {
+                       throw new InvalidArgumentException(
+                               'The supplied ParserOptions are not safe to cache. Fix the options or set $forceParse = true.'
+                       );
+               }
+
                wfDebug( __METHOD__ .
                        ': using parser cache: ' . ( $useParserCache ? 'yes' : 'no' ) . "\n" );
                if ( $parserOptions->getStubThreshold() ) {
@@ -1199,7 +1184,7 @@ class WikiPage implements Page, IDBAccessObject {
                );
 
                if ( $dbw->affectedRows() > 0 ) {
-                       $newid = $pageId ?: $dbw->insertId();
+                       $newid = $pageId ? (int)$pageId : $dbw->insertId();
                        $this->mId = $newid;
                        $this->mTitle->resetArticleID( $newid );
 
@@ -2923,8 +2908,8 @@ class WikiPage implements Page, IDBAccessObject {
                $status->value = $logid;
 
                // Show log excerpt on 404 pages rather than just a link
-               $cache = ObjectCache::getMainStashInstance();
-               $key = wfMemcKey( 'page-recent-delete', md5( $logTitle->getPrefixedText() ) );
+               $cache = MediaWikiServices::getInstance()->getMainObjectStash();
+               $key = $cache->makeKey( 'page-recent-delete', md5( $logTitle->getPrefixedText() ) );
                $cache->set( $key, 1, $cache::TTL_DAY );
 
                return $status;
@@ -3274,6 +3259,9 @@ class WikiPage implements Page, IDBAccessObject {
 
                MediaWikiServices::getInstance()->getLinkCache()->invalidateTitle( $title );
 
+               // Invalidate caches of articles which include this page
+               DeferredUpdates::addUpdate( new HTMLCacheUpdate( $title, 'templatelinks' ) );
+
                if ( $title->getNamespace() == NS_CATEGORY ) {
                        // Load the Category object, which will schedule a job to create
                        // the category table row if necessary. Checking a replica DB is ok
@@ -3515,7 +3503,10 @@ class WikiPage implements Page, IDBAccessObject {
                        );
                        foreach ( $rows as $row ) {
                                $cat = Category::newFromRow( $row );
-                               $cat->refreshCounts();
+                               // T166757: do the update after this DB commit
+                               DeferredUpdates::addCallableUpdate( function () use ( $cat ) {
+                                       $cat->refreshCounts();
+                               } );
                        }
                }
        }