From: Aaron Schulz Date: Wed, 9 Apr 2014 00:26:37 +0000 (-0700) Subject: Removed $wgMaxBacklinksInvalidate X-Git-Tag: 1.31.0-rc.0~16261 X-Git-Url: http://git.cyclocoop.org/%24image?a=commitdiff_plain;h=12472046955cf7592cfbc7c378ff8a74160698ea;p=lhc%2Fweb%2Fwiklou.git Removed $wgMaxBacklinksInvalidate This was a quick hack, introduced in I4fadded0e, that was later made redundant by $wgJobBackoffThrottling. Change-Id: Iddc5f6cfd6554ad9b6e54b8ce1b4805af809c7c8 --- diff --git a/RELEASE-NOTES-1.23 b/RELEASE-NOTES-1.23 index 244cce7b4d..947a0d611e 100644 --- a/RELEASE-NOTES-1.23 +++ b/RELEASE-NOTES-1.23 @@ -56,6 +56,7 @@ production. * $wgLicenseTerms has been removed as it was unused. * $wgProfileOnly is now deprecated; set the log file in $wgDebugLogGroups['profileoutput'] to replace it. +* $wgMaxBacklinksInvalidate was removed; use $wgJobBackoffThrottling instead === New features in 1.23 === * ResourceLoader can utilize the Web Storage API to cache modules client-side. diff --git a/includes/DefaultSettings.php b/includes/DefaultSettings.php index 5b2360c6ee..8b8d75ce71 100644 --- a/includes/DefaultSettings.php +++ b/includes/DefaultSettings.php @@ -6940,15 +6940,6 @@ $wgUpdateRowsPerJob = 500; */ $wgUpdateRowsPerQuery = 100; -/** - * Do not purge all the pages that use a page when it is edited - * if there are more than this many such pages. This is used to - * avoid invalidating a large portion of the squid/parser cache. - * - * This setting should factor in any squid/parser cache expiry settings. - */ -$wgMaxBacklinksInvalidate = false; - /** @} */ # End job queue } /************************************************************************//** diff --git a/includes/jobqueue/jobs/HTMLCacheUpdateJob.php b/includes/jobqueue/jobs/HTMLCacheUpdateJob.php index a7c5dc03c5..4d1e72c91a 100644 --- a/includes/jobqueue/jobs/HTMLCacheUpdateJob.php +++ b/includes/jobqueue/jobs/HTMLCacheUpdateJob.php @@ -40,7 +40,7 @@ class HTMLCacheUpdateJob extends Job { } function run() { - global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery, $wgMaxBacklinksInvalidate; + global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery; static $expected = array( 'recursive', 'pages' ); // new jobs have one of these @@ -57,14 +57,6 @@ class HTMLCacheUpdateJob extends Job { // Job to purge all (or a range of) backlink pages for a page if ( !empty( $this->params['recursive'] ) ) { - // @TODO: try to use delayed jobs if possible? - if ( !isset( $this->params['range'] ) && $wgMaxBacklinksInvalidate !== false ) { - $numRows = $this->title->getBacklinkCache()->getNumLinks( - $this->params['table'], $wgMaxBacklinksInvalidate ); - if ( $numRows > $wgMaxBacklinksInvalidate ) { - return true; - } - } // Convert this into no more than $wgUpdateRowsPerJob HTMLCacheUpdateJob per-title // jobs and possibly a recursive HTMLCacheUpdateJob job for the rest of the backlinks $jobs = BacklinkJobUtils::partitionBacklinkJob(