* - a) Recursive jobs to purge caches for backlink pages for a given title.
* These jobs have (recursive:true,table:<table>) set.
* - b) Jobs to purge caches for a set of titles (the job title is ignored).
- * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
+ * These jobs have (pages:(<page ID>:(<namespace>,<title>),...) set.
*
* @ingroup JobQueue
*/
$this->removeDuplicates = ( !isset( $params['range'] ) && !isset( $params['pages'] ) );
}
+ /**
+ * @param Title $title Title to purge backlink pages from
+ * @param string $table Backlink table name
+ * @return HTMLCacheUpdateJob
+ */
+ public static function newForBacklinks( Title $title, $table ) {
+ return new self(
+ $title,
+ array(
+ 'table' => $table,
+ 'recursive' => true
+ ) + Job::newRootJobParams( // "overall" refresh links job info
+ "htmlCacheUpdate:{$table}:{$title->getPrefixedText()}"
+ )
+ );
+ }
+
function run() {
global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
* @param array $pages Map of (page ID => (namespace, DB key)) entries
*/
protected function invalidateTitles( array $pages ) {
- global $wgUpdateRowsPerQuery, $wgUseFileCache, $wgUseSquid;
+ global $wgUpdateRowsPerQuery, $wgUseFileCache;
// Get all page IDs in this query into an array
$pageIds = array_keys( $pages );
// Check $wgUpdateRowsPerQuery for sanity; batch jobs are sized by that already.
foreach ( array_chunk( $pageIds, $wgUpdateRowsPerQuery ) as $batch ) {
$dbw->commit( __METHOD__, 'flush' );
- wfWaitForSlaves();
+ wfGetLBFactory()->waitForReplication();
$dbw->update( 'page',
array( 'page_touched' => $dbw->timestamp( $touchTimestamp ) ),
__METHOD__
) );
- // Update squid
- if ( $wgUseSquid ) {
- $u = SquidUpdate::newFromTitles( $titleArray );
- $u->doUpdate();
- }
+ // Update CDN
+ $u = CdnCacheUpdate::newFromTitles( $titleArray );
+ $u->doUpdate();
// Update file cache
if ( $wgUseFileCache ) {