3 * Job to update link tables for pages
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
23 use MediaWiki\MediaWikiServices
;
26 * Job to update link tables for pages
28 * This job comes in a few variants:
29 * - a) Recursive jobs to update links for backlink pages for a given title.
30 * These jobs have (recursive:true,table:<table>) set.
31 * - b) Jobs to update links for a set of pages (the job title is ignored).
32 * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
33 * - c) Jobs to update links for a single page (the job title)
34 * These jobs need no extra fields set.
38 class RefreshLinksJob
extends Job
{
39 /** @var float Cache parser output when it takes this long to render */
40 const PARSE_THRESHOLD_SEC
= 1.0;
41 /** @var int Lag safety margin when comparing root job times to last-refresh times */
42 const CLOCK_FUDGE
= 10;
43 /** @var int How many seconds to wait for replica DBs to catch up */
44 const LAG_WAIT_TIMEOUT
= 15;
46 function __construct( Title
$title, array $params ) {
47 parent
::__construct( 'refreshLinks', $title, $params );
48 // Avoid the overhead of de-duplication when it would be pointless
49 $this->removeDuplicates
= (
50 // Ranges rarely will line up
51 !isset( $params['range'] ) &&
52 // Multiple pages per job make matches unlikely
53 !( isset( $params['pages'] ) && count( $params['pages'] ) != 1 )
55 $this->params +
= [ 'causeAction' => 'unknown', 'causeAgent' => 'unknown' ];
60 * @param array $params
61 * @return RefreshLinksJob
63 public static function newPrioritized( Title
$title, array $params ) {
64 $job = new self( $title, $params );
65 $job->command
= 'refreshLinksPrioritized';
72 * @param array $params
73 * @return RefreshLinksJob
75 public static function newDynamic( Title
$title, array $params ) {
76 $job = new self( $title, $params );
77 $job->command
= 'refreshLinksDynamic';
83 global $wgUpdateRowsPerJob;
85 // Job to update all (or a range of) backlink pages for a page
86 if ( !empty( $this->params
['recursive'] ) ) {
87 // When the base job branches, wait for the replica DBs to catch up to the master.
88 // From then on, we know that any template changes at the time the base job was
89 // enqueued will be reflected in backlink page parses when the leaf jobs run.
90 if ( !isset( $this->params
['range'] ) ) {
91 $lbFactory = MediaWikiServices
::getInstance()->getDBLoadBalancerFactory();
92 if ( !$lbFactory->waitForReplication( [
94 'timeout' => self
::LAG_WAIT_TIMEOUT
95 ] ) ) { // only try so hard
96 $stats = MediaWikiServices
::getInstance()->getStatsdDataFactory();
97 $stats->increment( 'refreshlinks.lag_wait_failed' );
100 // Carry over information for de-duplication
101 $extraParams = $this->getRootJobParams();
102 $extraParams['triggeredRecursive'] = true;
103 // Carry over cause information for logging
104 $extraParams['causeAction'] = $this->params
['causeAction'];
105 $extraParams['causeAgent'] = $this->params
['causeAgent'];
106 // Convert this into no more than $wgUpdateRowsPerJob RefreshLinks per-title
107 // jobs and possibly a recursive RefreshLinks job for the rest of the backlinks
108 $jobs = BacklinkJobUtils
::partitionBacklinkJob(
112 [ 'params' => $extraParams ]
114 JobQueueGroup
::singleton()->push( $jobs );
115 // Job to update link tables for a set of titles
116 } elseif ( isset( $this->params
['pages'] ) ) {
117 foreach ( $this->params
['pages'] as $nsAndKey ) {
118 list( $ns, $dbKey ) = $nsAndKey;
119 $this->runForTitle( Title
::makeTitleSafe( $ns, $dbKey ) );
121 // Job to update link tables for a given title
123 $this->runForTitle( $this->title
);
130 * @param Title $title
133 protected function runForTitle( Title
$title ) {
134 $services = MediaWikiServices
::getInstance();
135 $stats = $services->getStatsdDataFactory();
136 $lbFactory = $services->getDBLoadBalancerFactory();
137 $ticket = $lbFactory->getEmptyTransactionTicket( __METHOD__
);
139 $page = WikiPage
::factory( $title );
140 $page->loadPageData( WikiPage
::READ_LATEST
);
142 // Serialize links updates by page ID so they see each others' changes
143 $dbw = $lbFactory->getMainLB()->getConnection( DB_MASTER
);
144 /** @noinspection PhpUnusedLocalVariableInspection */
145 $scopedLock = LinksUpdate
::acquirePageLock( $dbw, $page->getId(), 'job' );
146 if ( $scopedLock === null ) {
147 // Another job is already updating the page, likely for an older revision (T170596).
148 $this->setLastError( 'LinksUpdate already running for this page, try again later.' );
151 // Get the latest ID *after* acquirePageLock() flushed the transaction.
152 // This is used to detect edits/moves after loadPageData() but before the scope lock.
153 // The works around the chicken/egg problem of determining the scope lock key.
154 $latest = $title->getLatestRevID( Title
::GAID_FOR_UPDATE
);
156 if ( !empty( $this->params
['triggeringRevisionId'] ) ) {
157 // Fetch the specified revision; lockAndGetLatest() below detects if the page
158 // was edited since and aborts in order to avoid corrupting the link tables
159 $revision = Revision
::newFromId(
160 $this->params
['triggeringRevisionId'],
161 Revision
::READ_LATEST
164 // Fetch current revision; READ_LATEST reduces lockAndGetLatest() check failures
165 $revision = Revision
::newFromTitle( $title, false, Revision
::READ_LATEST
);
169 $stats->increment( 'refreshlinks.rev_not_found' );
170 $this->setLastError( "Revision not found for {$title->getPrefixedDBkey()}" );
171 return false; // just deleted?
172 } elseif ( $revision->getId() != $latest ||
$revision->getPage() !== $page->getId() ) {
173 // Do not clobber over newer updates with older ones. If all jobs where FIFO and
174 // serialized, it would be OK to update links based on older revisions since it
175 // would eventually get to the latest. Since that is not the case (by design),
176 // only update the link tables to a state matching the current revision's output.
177 $stats->increment( 'refreshlinks.rev_not_current' );
178 $this->setLastError( "Revision {$revision->getId()} is not current" );
182 $content = $revision->getContent( Revision
::RAW
);
184 // If there is no content, pretend the content is empty
185 $content = $revision->getContentHandler()->makeEmptyContent();
188 $parserOutput = false;
189 $parserOptions = $page->makeParserOptions( 'canonical' );
190 // If page_touched changed after this root job, then it is likely that
191 // any views of the pages already resulted in re-parses which are now in
192 // cache. The cache can be reused to avoid expensive parsing in some cases.
193 if ( isset( $this->params
['rootJobTimestamp'] ) ) {
194 $opportunistic = !empty( $this->params
['isOpportunistic'] );
196 $skewedTimestamp = $this->params
['rootJobTimestamp'];
197 if ( $opportunistic ) {
198 // Neither clock skew nor DB snapshot/replica DB lag matter much for such
199 // updates; focus on reusing the (often recently updated) cache
201 // For transclusion updates, the template changes must be reflected
202 $skewedTimestamp = wfTimestamp( TS_MW
,
203 wfTimestamp( TS_UNIX
, $skewedTimestamp ) + self
::CLOCK_FUDGE
207 if ( $page->getLinksTimestamp() > $skewedTimestamp ) {
208 // Something already updated the backlinks since this job was made
209 $stats->increment( 'refreshlinks.update_skipped' );
213 if ( $page->getTouched() >= $this->params
['rootJobTimestamp'] ||
$opportunistic ) {
214 // Cache is suspected to be up-to-date. As long as the cache rev ID matches
215 // and it reflects the job's triggering change, then it is usable.
216 $parserOutput = $services->getParserCache()->getDirty( $page, $parserOptions );
218 ||
$parserOutput->getCacheRevisionId() != $revision->getId()
219 ||
$parserOutput->getCacheTime() < $skewedTimestamp
221 $parserOutput = false; // too stale
226 // Fetch the current revision and parse it if necessary...
227 if ( $parserOutput ) {
228 $stats->increment( 'refreshlinks.parser_cached' );
230 $start = microtime( true );
231 // Revision ID must be passed to the parser output to get revision variables correct
232 $parserOutput = $content->getParserOutput(
233 $title, $revision->getId(), $parserOptions, false );
234 $elapsed = microtime( true ) - $start;
235 // If it took a long time to render, then save this back to the cache to avoid
236 // wasted CPU by other apaches or job runners. We don't want to always save to
237 // cache as this can cause high cache I/O and LRU churn when a template changes.
238 if ( $elapsed >= self
::PARSE_THRESHOLD_SEC
239 && $page->shouldCheckParserCache( $parserOptions, $revision->getId() )
240 && $parserOutput->isCacheable()
242 $ctime = wfTimestamp( TS_MW
, (int)$start ); // cache time
243 $services->getParserCache()->save(
244 $parserOutput, $page, $parserOptions, $ctime, $revision->getId()
247 $stats->increment( 'refreshlinks.parser_uncached' );
250 $updates = $content->getSecondaryDataUpdates(
253 !empty( $this->params
['useRecursiveLinksUpdate'] ),
257 // For legacy hook handlers doing updates via LinksUpdateConstructed, make sure
258 // any pending writes they made get flushed before the doUpdate() calls below.
259 // This avoids snapshot-clearing errors in LinksUpdate::acquirePageLock().
260 $lbFactory->commitAndWaitForReplication( __METHOD__
, $ticket );
262 foreach ( $updates as $update ) {
263 // Carry over cause in case so the update can do extra logging
264 $update->setCause( $this->params
['causeAction'], $this->params
['causeAgent'] );
265 // FIXME: This code probably shouldn't be here?
266 // Needed by things like Echo notifications which need
267 // to know which user caused the links update
268 if ( $update instanceof LinksUpdate
) {
269 $update->setRevision( $revision );
270 if ( !empty( $this->params
['triggeringUser'] ) ) {
271 $userInfo = $this->params
['triggeringUser'];
272 if ( $userInfo['userId'] ) {
273 $user = User
::newFromId( $userInfo['userId'] );
275 // Anonymous, use the username
276 $user = User
::newFromName( $userInfo['userName'], false );
278 $update->setTriggeringUser( $user );
283 foreach ( $updates as $update ) {
284 $update->setTransactionTicket( $ticket );
288 InfoAction
::invalidateCache( $title );
290 // Commit any writes here in case this method is called in a loop.
291 // In that case, the scoped lock will fail to be acquired.
292 $lbFactory->commitAndWaitForReplication( __METHOD__
, $ticket );
297 public function getDeduplicationInfo() {
298 $info = parent
::getDeduplicationInfo();
299 unset( $info['causeAction'] );
300 unset( $info['causeAgent'] );
301 if ( is_array( $info['params'] ) ) {
302 // For per-pages jobs, the job title is that of the template that changed
303 // (or similar), so remove that since it ruins duplicate detection
304 if ( isset( $info['params']['pages'] ) ) {
305 unset( $info['namespace'] );
306 unset( $info['title'] );
313 public function workItemCount() {
314 if ( !empty( $this->params
['recursive'] ) ) {
315 return 0; // nothing actually refreshed
316 } elseif ( isset( $this->params
['pages'] ) ) {
317 return count( $this->params
['pages'] );
320 return 1; // one title