* @ingroup JobQueue
*/
use MediaWiki\MediaWikiServices;
+use Wikimedia\Rdbms\DBReplicationWaitError;
/**
* Job to update link tables for pages
class RefreshLinksJob extends Job {
/** @var float Cache parser output when it takes this long to render */
const PARSE_THRESHOLD_SEC = 1.0;
- /** @var integer Lag safety margin when comparing root job times to last-refresh times */
+ /** @var int Lag safety margin when comparing root job times to last-refresh times */
const CLOCK_FUDGE = 10;
- /** @var integer How many seconds to wait for replica DBs to catch up */
+ /** @var int How many seconds to wait for replica DBs to catch up */
const LAG_WAIT_TIMEOUT = 15;
function __construct( Title $title, array $params ) {
// When the base job branches, wait for the replica DBs to catch up to the master.
// From then on, we know that any template changes at the time the base job was
// enqueued will be reflected in backlink page parses when the leaf jobs run.
- if ( !isset( $params['range'] ) ) {
+ if ( !isset( $this->params['range'] ) ) {
try {
$lbFactory = MediaWikiServices::getInstance()->getDBLoadBalancerFactory();
$lbFactory->waitForReplication( [
JobQueueGroup::singleton()->push( $jobs );
// Job to update link tables for a set of titles
} elseif ( isset( $this->params['pages'] ) ) {
- foreach ( $this->params['pages'] as $pageId => $nsAndKey ) {
+ foreach ( $this->params['pages'] as $nsAndKey ) {
list( $ns, $dbKey ) = $nsAndKey;
$this->runForTitle( Title::makeTitleSafe( $ns, $dbKey ) );
}
if ( $page->getTouched() >= $this->params['rootJobTimestamp'] || $opportunistic ) {
// Cache is suspected to be up-to-date. As long as the cache rev ID matches
// and it reflects the job's triggering change, then it is usable.
- $parserOutput = ParserCache::singleton()->getDirty( $page, $parserOptions );
+ $parserOutput = $services->getParserCache()->getDirty( $page, $parserOptions );
if ( !$parserOutput
|| $parserOutput->getCacheRevisionId() != $revision->getId()
|| $parserOutput->getCacheTime() < $skewedTimestamp
&& $parserOutput->isCacheable()
) {
$ctime = wfTimestamp( TS_MW, (int)$start ); // cache time
- ParserCache::singleton()->save(
+ $services->getParserCache()->save(
$parserOutput, $page, $parserOptions, $ctime, $revision->getId()
);
}
// This avoids snapshot-clearing errors in LinksUpdate::acquirePageLock().
$lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
- foreach ( $updates as $key => $update ) {
+ foreach ( $updates as $update ) {
// FIXME: This code probably shouldn't be here?
// Needed by things like Echo notifications which need
// to know which user caused the links update
InfoAction::invalidateCache( $title );
+ // Commit any writes here in case this method is called in a loop.
+ // In that case, the scoped lock will fail to be acquired.
+ $lbFactory->commitAndWaitForReplication( __METHOD__, $ticket );
+
return true;
}
}
public function workItemCount() {
- return isset( $this->params['pages'] ) ? count( $this->params['pages'] ) : 1;
+ if ( !empty( $this->params['recursive'] ) ) {
+ return 0; // nothing actually refreshed
+ } elseif ( isset( $this->params['pages'] ) ) {
+ return count( $this->params['pages'] );
+ }
+
+ return 1; // one title
}
}