Fix old regression in HTMLCacheUpdate de-duplication
[lhc/web/wiklou.git] / includes / jobqueue / jobs / HTMLCacheUpdateJob.php
index f09ba57..4c16d7f 100644 (file)
@@ -22,6 +22,8 @@
  * @ingroup Cache
  */
 
+use MediaWiki\MediaWikiServices;
+
 /**
  * Job to purge the cache for all pages that link to or use another page or file
  *
@@ -111,9 +113,13 @@ class HTMLCacheUpdateJob extends Job {
                // before the link jobs, so using the current timestamp instead of the root timestamp is
                // not expected to invalidate these cache entries too often.
                $touchTimestamp = wfTimestampNow();
+               // If page_touched is higher than this, then something else already bumped it after enqueue
+               $condTimestamp = isset( $this->params['rootJobTimestamp'] )
+                       ? $this->params['rootJobTimestamp']
+                       : $touchTimestamp;
 
                $dbw = wfGetDB( DB_MASTER );
-               $factory = wfGetLBFactory();
+               $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
                $ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
                // Update page_touched (skipping pages already touched since the root job).
                // Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
@@ -124,7 +130,7 @@ class HTMLCacheUpdateJob extends Job {
                                [ 'page_touched' => $dbw->timestamp( $touchTimestamp ) ],
                                [ 'page_id' => $batch,
                                        // don't invalidated pages that were already invalidated
-                                       "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $touchTimestamp ) )
+                                       "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $condTimestamp ) )
                                ],
                                __METHOD__
                        );
@@ -137,9 +143,13 @@ class HTMLCacheUpdateJob extends Job {
                        __METHOD__
                ) );
 
-               // Update CDN
-               $u = CdnCacheUpdate::newFromTitles( $titleArray );
-               $u->doUpdate();
+               // Update CDN; call purge() directly so as to not bother with secondary purges
+               $urls = [];
+               foreach ( $titleArray as $title ) {
+                       /** @var Title $title */
+                       $urls = array_merge( $urls, $title->getCdnUrls() );
+               }
+               CdnCacheUpdate::purge( $urls );
 
                // Update file cache
                if ( $wgUseFileCache ) {
@@ -150,6 +160,12 @@ class HTMLCacheUpdateJob extends Job {
        }
 
        public function workItemCount() {
-               return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
+               if ( !empty( $this->params['recursive'] ) ) {
+                       return 0; // nothing actually purged
+               } elseif ( isset( $this->params['pages'] ) ) {
+                       return count( $this->params['pages'] );
+               }
+
+               return 1; // one title
        }
 }