*/
$wgUpdateRowsPerQuery = 100;
+/**
+ * Do not purge all the pages that use a page when it is edited
+ * if there are more than this many such pages. This is used to
+ * avoid invalidating a large portion of the squid/parser cache.
+ *
+ * This setting should factor in any squid/parser cache expiry settings.
+ */
+$wgMaxBacklinksInvalidate = false;
+
/** @} */ # End job queue }
/************************************************************************//**
}
public function doUpdate() {
+ global $wgMaxBacklinksInvalidate;
+
wfProfileIn( __METHOD__ );
$job = new HTMLCacheUpdateJob(
);
$count = $this->mTitle->getBacklinkCache()->getNumLinks( $this->mTable, 200 );
- if ( $count >= 200 ) { // many backlinks
+ if ( $wgMaxBacklinksInvalidate !== false && $count > $wgMaxBacklinksInvalidate ) {
+ wfDebug( "Skipped HTML cache invalidation of {$this->mTitle->getPrefixedText}." );
+ } elseif ( $count >= 200 ) { // many backlinks
JobQueueGroup::singleton()->push( $job );
JobQueueGroup::singleton()->deduplicateRootJob( $job );
} else { // few backlinks ($count might be off even if 0)