* @ingroup Cache
*/
+use MediaWiki\MediaWikiServices;
+
/**
* Job to purge the cache for all pages that link to or use another page or file
*
// before the link jobs, so using the current timestamp instead of the root timestamp is
// not expected to invalidate these cache entries too often.
$touchTimestamp = wfTimestampNow();
+ // If page_touched is higher than this, then something else already bumped it after enqueue
+ $condTimestamp = isset( $this->params['rootJobTimestamp'] )
+ ? $this->params['rootJobTimestamp']
+ : $touchTimestamp;
$dbw = wfGetDB( DB_MASTER );
- $factory = wfGetLBFactory();
+ $factory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
$ticket = $factory->getEmptyTransactionTicket( __METHOD__ );
// Update page_touched (skipping pages already touched since the root job).
// Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
[ 'page_touched' => $dbw->timestamp( $touchTimestamp ) ],
[ 'page_id' => $batch,
// don't invalidated pages that were already invalidated
- "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $touchTimestamp ) )
+ "page_touched < " . $dbw->addQuotes( $dbw->timestamp( $condTimestamp ) )
],
__METHOD__
);
__METHOD__
) );
- // Update CDN
- $u = CdnCacheUpdate::newFromTitles( $titleArray );
- $u->doUpdate();
+ // Update CDN; call purge() directly so as to not bother with secondary purges
+ $urls = [];
+ foreach ( $titleArray as $title ) {
+ /** @var Title $title */
+ $urls = array_merge( $urls, $title->getCdnUrls() );
+ }
+ CdnCacheUpdate::purge( $urls );
// Update file cache
if ( $wgUseFileCache ) {
}
public function workItemCount() {
- return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
+ if ( !empty( $this->params['recursive'] ) ) {
+ return 0; // nothing actually purged
+ } elseif ( isset( $this->params['pages'] ) ) {
+ return count( $this->params['pages'] );
+ }
+
+ return 1; // one title
}
}