CACHE_ANYTHING => array( 'factory' => 'ObjectCache::newAnything' ),
CACHE_ACCEL => array( 'factory' => 'ObjectCache::newAccelerator' ),
- CACHE_MEMCACHED => array( 'factory' => 'ObjectCache::newMemcached', 'loggroup' => 'memcached' ),
+ CACHE_MEMCACHED => array( 'class' => 'MemcachedPhpBagOStuff', 'loggroup' => 'memcached' ),
'db-replicated' => array(
'class' => 'ReplicatedBagOStuff',
* @par Example:
* To set a generic maximum of 4 hits in 60 seconds:
* @code
- * $wgRateLimits = array( 4, 60 );
+ * $wgRateLimits = array( 4, 60 );
* @endcode
*
- * You could also limit per action and then type of users. See the inline
- * code for a template to use.
- *
- * This option set is experimental and likely to change.
+ * @par Example:
+ * You could also limit per action and then type of users.
+ * @code
+ * $wgRateLimits = array(
+ * 'edit' => array(
+ * 'anon' => array( x, y ), // any and all anonymous edits (aggregate)
+ * 'user' => array( x, y ), // each logged-in user
+ * 'newbie' => array( x, y ), // each new autoconfirmed accounts; overrides 'user'
+ * 'ip' => array( x, y ), // each anon and recent account
+ * 'subnet' => array( x, y ), // ... within a /24 subnet in IPv4 or /64 in IPv6
+ * )
+ * )
+ * @endcode
*
- * @warning Requires memcached.
+ * @warning Requires that $wgMainCacheType is set to something persistent
*/
$wgRateLimits = array(
+ // Page edits
'edit' => array(
- 'anon' => null, // for any and all anonymous edits (aggregate)
- 'user' => null, // for each logged-in user
- 'newbie' => null, // for each recent (autoconfirmed) account; overrides 'user'
- 'ip' => null, // for each anon and recent account
- 'subnet' => null, // ... within a /24 subnet in IPv4 or /64 in IPv6
+ 'ip' => array( 8, 60 ),
+ 'newbie' => array( 8, 60 ),
),
+ // Page moves
+ 'move' => array(
+ 'newbie' => array( 2, 120 ),
+ 'user' => array( 8, 60 ),
+ ),
+ // File uploads
'upload' => array(
- 'user' => null,
- 'newbie' => null,
- 'ip' => null,
- 'subnet' => null,
+ 'ip' => array( 8, 60 ),
+ 'newbie' => array( 8, 60 ),
),
- 'move' => array(
- 'user' => null,
- 'newbie' => null,
- 'ip' => null,
- 'subnet' => null,
+ // Page rollbacks
+ 'rollback' => array(
+ 'user' => array( 10, 60 ),
+ 'newbie' => array( 5, 120 )
),
- 'mailpassword' => array( // triggering password resets emails
- 'anon' => null,
+ // Triggering password resets emails
+ 'mailpassword' => array(
+ 'ip' => array( 5, 3600 ),
),
- 'emailuser' => array( // emailing other users using MediaWiki
- 'user' => null,
+ // Emailing other users using MediaWiki
+ 'emailuser' => array(
+ 'ip' => array( 5, 86400 ),
+ 'newbie' => array( 5, 86400 ),
+ 'user' => array( 20, 86400 ),
),
- 'linkpurge' => array( // purges of link tables
- 'anon' => null,
- 'user' => null,
- 'newbie' => null,
- 'ip' => null,
- 'subnet' => null,
+ // Purging pages
+ 'purge' => array(
+ 'ip' => array( 30, 60 ),
+ 'user' => array( 30, 60 ),
),
- 'renderfile' => array( // files rendered via thumb.php or thumb_handler.php
- 'anon' => null,
- 'user' => null,
- 'newbie' => null,
- 'ip' => null,
- 'subnet' => null,
+ // Purges of link tables
+ 'linkpurge' => array(
+ 'ip' => array( 30, 60 ),
+ 'user' => array( 30, 60 ),
),
- 'renderfile-nonstandard' => array( // same as above but for non-standard thumbnails
- 'anon' => null,
- 'user' => null,
- 'newbie' => null,
- 'ip' => null,
- 'subnet' => null,
+ // Files rendered via thumb.php or thumb_handler.php
+ 'renderfile' => array(
+ 'ip' => array( 700, 30 ),
+ 'user' => array( 700, 30 ),
),
- 'stashedit' => array( // stashing edits into cache before save
- 'anon' => null,
- 'user' => null,
- 'newbie' => null,
- 'ip' => null,
- 'subnet' => null,
+ // Same as above but for non-standard thumbnails
+ 'renderfile-nonstandard' => array(
+ 'ip' => array( 70, 30 ),
+ 'user' => array( 70, 30 ),
),
- 'changetag' => array( // adding or removing change tags
- 'user' => null,
- 'newbie' => null,
+ // Stashing edits into cache before save
+ 'stashedit' => array(
+ 'ip' => array( 30, 60 ),
+ 'newbie' => array( 30, 60 ),
),
- 'purge' => array( // purging pages
- 'anon' => null,
- 'user' => null,
- 'newbie' => null,
- 'ip' => null,
- 'subnet' => null,
+ // Adding or removing change tags
+ 'changetag' => array(
+ 'ip' => array( 8, 60 ),
+ 'newbie' => array( 8, 60 ),
),
);
'ThumbnailRender' => 'ThumbnailRenderJob',
'recentChangesUpdate' => 'RecentChangesUpdateJob',
'refreshLinksPrioritized' => 'RefreshLinksJob', // for cascading protection
+ 'refreshLinksDynamic' => 'RefreshLinksJob', // for pages with dynamic content
'activityUpdateJob' => 'ActivityUpdateJob',
'enqueue' => 'EnqueueJob', // local queue for multi-DC setups
'null' => 'NullJob'
* @param string $reason Delete reason for deletion log
* @param bool $suppress Suppress all revisions and log the deletion in
* the suppression log instead of the deletion log
- * @param int $id Article ID
- * @param bool $commit Defaults to true, triggers transaction end
- * @param array &$error Array of errors to append to
+ * @param int $u1 Unused
+ * @param bool $u2 Unused
+ * @param array|string &$error Array of errors to append to
* @param User $user The deleting user
* @return bool True if successful
*/
public function doDeleteArticle(
- $reason, $suppress = false, $id = 0, $commit = true, &$error = '', User $user = null
+ $reason, $suppress = false, $u1 = null, $u2 = null, &$error = '', User $user = null
) {
- $status = $this->doDeleteArticleReal( $reason, $suppress, $id, $commit, $error, $user );
+ $status = $this->doDeleteArticleReal( $reason, $suppress, $u1, $u2, $error, $user );
return $status->isGood();
}
* @param string $reason Delete reason for deletion log
* @param bool $suppress Suppress all revisions and log the deletion in
* the suppression log instead of the deletion log
- * @param int $id Article ID
- * @param bool $commit Defaults to true, triggers transaction end
- * @param array &$error Array of errors to append to
+ * @param int $u1 Unused
+ * @param bool $u2 Unused
+ * @param array|string &$error Array of errors to append to
* @param User $user The deleting user
* @return Status Status object; if successful, $status->value is the log_id of the
* deletion log entry. If the page couldn't be deleted because it wasn't
* found, $status is a non-fatal 'cannotdelete' error
*/
public function doDeleteArticleReal(
- $reason, $suppress = false, $id = 0, $commit = true, &$error = '', User $user = null
+ $reason, $suppress = false, $u1 = null, $u2 = null, &$error = '', User $user = null
) {
global $wgUser, $wgContentHandlerUseDB;
}
$dbw = wfGetDB( DB_MASTER );
- $dbw->begin( __METHOD__ );
-
- if ( $id == 0 ) {
- $this->loadPageData( self::READ_LATEST );
- $id = $this->getID();
- // T98706: lock the page from various other updates but avoid using
- // WikiPage::READ_LOCKING as that will carry over the FOR UPDATE to
- // the revisions queries (which also JOIN on user). Only lock the page
- // row and CAS check on page_latest to see if the trx snapshot matches.
- $lockedLatest = $this->lock();
- if ( $id == 0 || $this->getLatest() != $lockedLatest ) {
- // Page not there or trx snapshot is stale
- $dbw->rollback( __METHOD__ );
- $status->error( 'cannotdelete',
- wfEscapeWikiText( $this->getTitle()->getPrefixedText() ) );
- return $status;
- }
+ $dbw->startAtomic( __METHOD__ );
+
+ $this->loadPageData( self::READ_LATEST );
+ $id = $this->getID();
+ // T98706: lock the page from various other updates but avoid using
+ // WikiPage::READ_LOCKING as that will carry over the FOR UPDATE to
+ // the revisions queries (which also JOIN on user). Only lock the page
+ // row and CAS check on page_latest to see if the trx snapshot matches.
+ $lockedLatest = $this->lock();
+ if ( $id == 0 || $this->getLatest() != $lockedLatest ) {
+ $dbw->endAtomic( __METHOD__ );
+ // Page not there or trx snapshot is stale
+ $status->error( 'cannotdelete',
+ wfEscapeWikiText( $this->getTitle()->getPrefixedText() ) );
+ return $status;
}
+ // At this point we are now comitted to returning an OK
+ // status unless some DB query error or other exception comes up.
+ // This way callers don't have to call rollback() if $status is bad
+ // unless they actually try to catch exceptions (which is rare).
+
// we need to remember the old content so we can use it to generate all deletion updates.
$content = $this->getContent( Revision::RAW );
$row['ar_content_format'] = 'rev_content_format';
}
- $dbw->insertSelect( 'archive', array( 'page', 'revision' ),
+ // Copy all the page revisions into the archive table
+ $dbw->insertSelect(
+ 'archive',
+ array( 'page', 'revision' ),
$row,
array(
'page_id' => $id,
'page_id = rev_page'
- ), __METHOD__
+ ),
+ __METHOD__
);
// Now that it's safely backed up, delete it
$dbw->delete( 'page', array( 'page_id' => $id ), __METHOD__ );
- $ok = ( $dbw->affectedRows() > 0 ); // $id could be laggy
-
- if ( !$ok ) {
- $dbw->rollback( __METHOD__ );
- $status->error( 'cannotdelete',
- wfEscapeWikiText( $this->getTitle()->getPrefixedText() ) );
- return $status;
- }
if ( !$dbw->cascadingDeletes() ) {
$dbw->delete( 'revision', array( 'rev_page' => $id ), __METHOD__ );
$logEntry->publish( $logid );
} );
- if ( $commit ) {
- $dbw->commit( __METHOD__ );
- }
-
- // Show log excerpt on 404 pages rather than just a link
- $key = wfMemcKey( 'page-recent-delete', md5( $logTitle->getPrefixedText() ) );
- ObjectCache::getMainStashInstance()->set( $key, 1, 86400 );
+ $dbw->endAtomic( __METHOD__ );
$this->doDeleteUpdates( $id, $content );
Hooks::run( 'ArticleDeleteComplete',
array( &$this, &$user, $reason, $id, $content, $logEntry ) );
$status->value = $logid;
+
+ // Show log excerpt on 404 pages rather than just a link
+ $key = wfMemcKey( 'page-recent-delete', md5( $logTitle->getPrefixedText() ) );
+ ObjectCache::getMainStashInstance()->set( $key, 1, 86400 );
+
return $status;
}
return;
}
+ $params = array(
+ 'isOpportunistic' => true,
+ 'rootJobTimestamp' => $parserOutput->getCacheTime()
+ );
+
if ( $this->mTitle->areRestrictionsCascading() ) {
// If the page is cascade protecting, the links should really be up-to-date
- $params = array( 'prioritize' => true );
+ JobQueueGroup::singleton()->lazyPush(
+ RefreshLinksJob::newPrioritized( $this->mTitle, $params )
+ );
} elseif ( $parserOutput->hasDynamicContent() ) {
- // Assume the output contains time/random based magic words
- $params = array();
- } else {
- // If the inclusions are deterministic, the edit-triggered link jobs are enough
- return;
- }
-
- // Check if the last link refresh was before page_touched
- if ( $this->getLinksTimestamp() < $this->getTouched() ) {
- $params['isOpportunistic'] = true;
- $params['rootJobTimestamp'] = $parserOutput->getCacheTime();
- JobQueueGroup::singleton()->lazyPush( new RefreshLinksJob( $this->mTitle, $params ) );
+ // Assume the output contains "dynamic" time/random based magic words.
+ // Only update pages that expired due to dynamic content and NOT due to edits
+ // to referenced templates/files. When the cache expires due to dynamic content,
+ // page_touched is unchanged. We want to avoid triggering redundant jobs due to
+ // views of pages that were just purged via HTMLCacheUpdateJob. In that case, the
+ // template/file edit already triggered recursive RefreshLinksJob jobs.
+ if ( $this->getLinksTimestamp() > $this->getTouched() ) {
+ // If a page is uncacheable, do not keep spamming a job for it.
+ // Although it would be de-duplicated, it would still waste I/O.
+ $cache = ObjectCache::getLocalClusterInstance();
+ $key = $cache->makeKey( 'dynamic-linksupdate', 'last', $this->getId() );
+ if ( $cache->add( $key, time(), 60 ) ) {
+ JobQueueGroup::singleton()->lazyPush(
+ RefreshLinksJob::newDynamic( $this->mTitle, $params )
+ );
+ }
+ }
}
}