* @file
*/
+use \MediaWiki\Logger\LoggerFactory;
+
/**
* Class representing a MediaWiki article and history.
*
$this->mTitle = $title;
}
+ /**
+ * Makes sure that the mTitle object is cloned
+ * to the newly cloned WikiPage.
+ */
+ public function __clone() {
+ $this->mTitle = clone $this->mTitle;
+ }
+
/**
* Create a WikiPage object of the appropriate class for the given title.
*
/**
* Loads page_touched and returns a value indicating if it should be used
- * @return bool True if not a redirect
+ * @return bool True if this page exists and is not a redirect
*/
public function checkTouched() {
if ( !$this->mDataLoaded ) {
$this->loadPageData();
}
- return !$this->mIsRedirect;
+ return ( $this->mId && !$this->mIsRedirect );
}
/**
// Update the DB post-send if the page has not cached since now
$that = $this;
$latest = $this->getLatest();
- DeferredUpdates::addCallableUpdate( function() use ( $that, $retval, $latest ) {
- $that->insertRedirectEntry( $retval, $latest );
- } );
+ DeferredUpdates::addCallableUpdate(
+ function () use ( $that, $retval, $latest ) {
+ $that->insertRedirectEntry( $retval, $latest );
+ },
+ DeferredUpdates::POSTSEND,
+ wfGetDB( DB_MASTER )
+ );
return $retval;
}
*
* @since 1.19
* @param ParserOptions $parserOptions ParserOptions to use for the parse operation
- * @param null|int $oldid Revision ID to get the text from, passing null or 0 will
- * get the current revision (default value)
- *
- * @return ParserOutput|bool ParserOutput or false if the revision was not found
+ * @param null|int $oldid Revision ID to get the text from, passing null or 0 will
+ * get the current revision (default value)
+ * @param bool $forceParse Force reindexing, regardless of cache settings
+ * @return bool|ParserOutput ParserOutput or false if the revision was not found
*/
- public function getParserOutput( ParserOptions $parserOptions, $oldid = null ) {
-
- $useParserCache = $this->shouldCheckParserCache( $parserOptions, $oldid );
+ public function getParserOutput(
+ ParserOptions $parserOptions, $oldid = null, $forceParse = false
+ ) {
+ $useParserCache =
+ ( !$forceParse ) && $this->shouldCheckParserCache( $parserOptions, $oldid );
wfDebug( __METHOD__ .
': using parser cache: ' . ( $useParserCache ? 'yes' : 'no' ) . "\n" );
if ( $parserOptions->getStubThreshold() ) {
return false;
}
- $title = $this->mTitle;
- wfGetDB( DB_MASTER )->onTransactionIdle( function() use ( $title ) {
- // Invalidate the cache in auto-commit mode
- $title->invalidateCache();
- } );
-
+ $this->mTitle->invalidateCache();
// Send purge after above page_touched update was committed
DeferredUpdates::addUpdate(
- new CdnCacheUpdate( $title->getCdnUrls() ),
+ new CdnCacheUpdate( $this->mTitle->getCdnUrls() ),
DeferredUpdates::PRESEND
);
* @param IDatabase $dbw
* @param int|null $pageId Custom page ID that will be used for the insert statement
*
- * @return bool|int The newly created page_id key; false if the title already existed
+ * @return bool|int The newly created page_id key; false if the row was not
+ * inserted, e.g. because the title already existed or because the specified
+ * page ID is already in use.
*/
public function insertOn( $dbw, $pageId = null ) {
$pageIdForInsert = $pageId ?: $dbw->nextSequenceValue( 'page_page_id_seq' );
$revisionId = $revision->insertOn( $dbw );
// Update page_latest and friends to reflect the new revision
if ( !$this->updateRevisionOn( $dbw, $revision, null, $meta['oldIsRedirect'] ) ) {
- $dbw->rollback( __METHOD__ ); // sanity; this should never happen
throw new MWException( "Failed to update page row to use new revision." );
}
}
// Do secondary updates once the main changes have been committed...
- $that = $this;
- $dbw->onTransactionIdle(
- function () use (
- $dbw, &$that, $revision, &$user, $content, $summary, &$flags,
- $changed, $meta, &$status
- ) {
- // Do per-page updates in a transaction
- $dbw->setFlag( DBO_TRX );
- // Update links tables, site stats, etc.
- $that->doEditUpdates(
- $revision,
- $user,
- [
- 'changed' => $changed,
- 'oldcountable' => $meta['oldCountable'],
- 'oldrevision' => $meta['oldRevision']
- ]
- );
- // Trigger post-save hook
- $params = [ &$that, &$user, $content, $summary, $flags & EDIT_MINOR,
- null, null, &$flags, $revision, &$status, $meta['baseRevId'] ];
- ContentHandler::runLegacyHooks( 'ArticleSaveComplete', $params );
- Hooks::run( 'PageContentSaveComplete', $params );
- }
+ DeferredUpdates::addUpdate(
+ new AtomicSectionUpdate(
+ $dbw,
+ __METHOD__,
+ function () use (
+ $revision, &$user, $content, $summary, &$flags,
+ $changed, $meta, &$status
+ ) {
+ // Update links tables, site stats, etc.
+ $this->doEditUpdates(
+ $revision,
+ $user,
+ [
+ 'changed' => $changed,
+ 'oldcountable' => $meta['oldCountable'],
+ 'oldrevision' => $meta['oldRevision']
+ ]
+ );
+ // Trigger post-save hook
+ $params = [ &$this, &$user, $content, $summary, $flags & EDIT_MINOR,
+ null, null, &$flags, $revision, &$status, $meta['baseRevId'] ];
+ ContentHandler::runLegacyHooks( 'ArticleSaveComplete', $params );
+ Hooks::run( 'PageContentSaveComplete', $params );
+ }
+ ),
+ DeferredUpdates::PRESEND
);
return $status;
$revisionId = $revision->insertOn( $dbw );
// Update the page record with revision data
if ( !$this->updateRevisionOn( $dbw, $revision, 0 ) ) {
- $dbw->rollback( __METHOD__ ); // sanity; this should never happen
throw new MWException( "Failed to update page row to use new revision." );
}
$status->value['revision'] = $revision;
// Do secondary updates once the main changes have been committed...
- $that = $this;
- $dbw->onTransactionIdle(
- function () use (
- &$that, $dbw, $revision, &$user, $content, $summary, &$flags, $meta, &$status
- ) {
- // Do per-page updates in a transaction
- $dbw->setFlag( DBO_TRX );
- // Update links, etc.
- $that->doEditUpdates( $revision, $user, [ 'created' => true ] );
- // Trigger post-create hook
- $params = [ &$that, &$user, $content, $summary,
- $flags & EDIT_MINOR, null, null, &$flags, $revision ];
- ContentHandler::runLegacyHooks( 'ArticleInsertComplete', $params );
- Hooks::run( 'PageContentInsertComplete', $params );
- // Trigger post-save hook
- $params = array_merge( $params, [ &$status, $meta['baseRevId'] ] );
- ContentHandler::runLegacyHooks( 'ArticleSaveComplete', $params );
- Hooks::run( 'PageContentSaveComplete', $params );
+ DeferredUpdates::addUpdate(
+ new AtomicSectionUpdate(
+ $dbw,
+ __METHOD__,
+ function () use (
+ $revision, &$user, $content, $summary, &$flags, $meta, &$status
+ ) {
+ // Update links, etc.
+ $this->doEditUpdates( $revision, $user, [ 'created' => true ] );
+ // Trigger post-create hook
+ $params = [ &$this, &$user, $content, $summary,
+ $flags & EDIT_MINOR, null, null, &$flags, $revision ];
+ ContentHandler::runLegacyHooks( 'ArticleInsertComplete', $params );
+ Hooks::run( 'PageContentInsertComplete', $params );
+ // Trigger post-save hook
+ $params = array_merge( $params, [ &$status, $meta['baseRevId'] ] );
+ ContentHandler::runLegacyHooks( 'ArticleSaveComplete', $params );
+ Hooks::run( 'PageContentSaveComplete', $params );
- }
+ }
+ ),
+ DeferredUpdates::PRESEND
);
return $status;
}
if ( $this->mPreparedEdit
- && $this->mPreparedEdit->newContent
+ && isset( $this->mPreparedEdit->newContent )
&& $this->mPreparedEdit->newContent->equals( $content )
&& $this->mPreparedEdit->revid == $revid
&& $this->mPreparedEdit->format == $serialFormat
}
}
);
+ } else {
+ // Try to avoid a second parse if {{REVISIONID}} is used
+ $edit->popts->setSpeculativeRevIdCallback( function () {
+ return 1 + (int)wfGetDB( DB_MASTER )->selectField(
+ 'revision',
+ 'MAX(rev_id)',
+ [],
+ __METHOD__
+ );
+ } );
}
$edit->output = $edit->pstContent
? $edit->pstContent->getParserOutput( $this->mTitle, $revid, $edit->popts )
];
$content = $revision->getContent();
+ $logger = LoggerFactory::getInstance( 'SaveParse' );
+
// See if the parser output before $revision was inserted is still valid
$editInfo = false;
if ( !$this->mPreparedEdit ) {
- wfDebug( __METHOD__ . ": No prepared edit...\n" );
+ $logger->debug( __METHOD__ . ": No prepared edit...\n" );
} elseif ( $this->mPreparedEdit->output->getFlag( 'vary-revision' ) ) {
- wfDebug( __METHOD__ . ": Prepared edit has vary-revision...\n" );
+ $logger->info( __METHOD__ . ": Prepared edit has vary-revision...\n" );
+ } elseif ( $this->mPreparedEdit->output->getFlag( 'vary-revision-id' )
+ && $this->mPreparedEdit->output->getSpeculativeRevIdUsed() !== $revision->getId()
+ ) {
+ $logger->info( __METHOD__ . ": Prepared edit has vary-revision-id with wrong ID...\n" );
} elseif ( $this->mPreparedEdit->output->getFlag( 'vary-user' ) && !$options['changed'] ) {
- wfDebug( __METHOD__ . ": Prepared edit has vary-user and is null...\n" );
+ $logger->info( __METHOD__ . ": Prepared edit has vary-user and is null...\n" );
} else {
wfDebug( __METHOD__ . ": Using prepared edit...\n" );
$editInfo = $this->mPreparedEdit;
return $status;
}
+ // Given the lock above, we can be confident in the title and page ID values
+ $namespace = $this->getTitle()->getNamespace();
+ $dbKey = $this->getTitle()->getDBkey();
+
// At this point we are now comitted to returning an OK
// status unless some DB query error or other exception comes up.
// This way callers don't have to call rollback() if $status is bad
$bitfield = 'rev_deleted';
}
- /**
- * For now, shunt the revision data into the archive table.
- * Text is *not* removed from the text table; bulk storage
- * is left intact to avoid breaking block-compression or
- * immutable storage schemes.
- *
- * For backwards compatibility, note that some older archive
- * table entries will have ar_text and ar_flags fields still.
- *
- * In the future, we may keep revisions and mark them with
- * the rev_deleted field, which is reserved for this purpose.
- */
-
- $row = [
- 'ar_namespace' => 'page_namespace',
- 'ar_title' => 'page_title',
- 'ar_comment' => 'rev_comment',
- 'ar_user' => 'rev_user',
- 'ar_user_text' => 'rev_user_text',
- 'ar_timestamp' => 'rev_timestamp',
- 'ar_minor_edit' => 'rev_minor_edit',
- 'ar_rev_id' => 'rev_id',
- 'ar_parent_id' => 'rev_parent_id',
- 'ar_text_id' => 'rev_text_id',
- 'ar_text' => '\'\'', // Be explicit to appease
- 'ar_flags' => '\'\'', // MySQL's "strict mode"...
- 'ar_len' => 'rev_len',
- 'ar_page_id' => 'page_id',
- 'ar_deleted' => $bitfield,
- 'ar_sha1' => 'rev_sha1',
- ];
+ // For now, shunt the revision data into the archive table.
+ // Text is *not* removed from the text table; bulk storage
+ // is left intact to avoid breaking block-compression or
+ // immutable storage schemes.
+ // In the future, we may keep revisions and mark them with
+ // the rev_deleted field, which is reserved for this purpose.
- if ( $wgContentHandlerUseDB ) {
- $row['ar_content_model'] = 'rev_content_model';
- $row['ar_content_format'] = 'rev_content_format';
+ // Get all of the page revisions
+ $res = $dbw->select(
+ 'revision',
+ Revision::selectFields(),
+ [ 'rev_page' => $id ],
+ __METHOD__,
+ 'FOR UPDATE'
+ );
+ // Build their equivalent archive rows
+ $rowsInsert = [];
+ foreach ( $res as $row ) {
+ $rowInsert = [
+ 'ar_namespace' => $namespace,
+ 'ar_title' => $dbKey,
+ 'ar_comment' => $row->rev_comment,
+ 'ar_user' => $row->rev_user,
+ 'ar_user_text' => $row->rev_user_text,
+ 'ar_timestamp' => $row->rev_timestamp,
+ 'ar_minor_edit' => $row->rev_minor_edit,
+ 'ar_rev_id' => $row->rev_id,
+ 'ar_parent_id' => $row->rev_parent_id,
+ 'ar_text_id' => $row->rev_text_id,
+ 'ar_text' => '',
+ 'ar_flags' => '',
+ 'ar_len' => $row->rev_len,
+ 'ar_page_id' => $id,
+ 'ar_deleted' => $bitfield,
+ 'ar_sha1' => $row->rev_sha1,
+ ];
+ if ( $wgContentHandlerUseDB ) {
+ $rowInsert['ar_content_model'] = $row->rev_content_model;
+ $rowInsert['ar_content_format'] = $row->rev_content_format;
+ }
+ $rowsInsert[] = $rowInsert;
}
+ // Copy them into the archive table
+ $dbw->insert( 'archive', $rowsInsert, __METHOD__ );
+ // Save this so we can pass it to the ArticleDeleteComplete hook.
+ $archivedRevisionCount = $dbw->affectedRows();
- // Copy all the page revisions into the archive table
- $dbw->insertSelect(
- 'archive',
- [ 'page', 'revision' ],
- $row,
- [
- 'page_id' => $id,
- 'page_id = rev_page'
- ],
- __METHOD__
- );
+ // Clone the title and wikiPage, so we have the information we need when
+ // we log and run the ArticleDeleteComplete hook.
+ $logTitle = clone $this->mTitle;
+ $wikiPageBeforeDelete = clone $this;
// Now that it's safely backed up, delete it
$dbw->delete( 'page', [ 'page_id' => $id ], __METHOD__ );
$dbw->delete( 'revision', [ 'rev_page' => $id ], __METHOD__ );
}
- // Clone the title, so we have the information we need when we log
- $logTitle = clone $this->mTitle;
-
// Log the deletion, if the page was suppressed, put it in the suppression log instead
$logtype = $suppress ? 'suppress' : 'delete';
$this->doDeleteUpdates( $id, $content );
- Hooks::run( 'ArticleDeleteComplete',
- [ &$this, &$user, $reason, $id, $content, $logEntry ] );
+ Hooks::run( 'ArticleDeleteComplete', [
+ &$wikiPageBeforeDelete,
+ &$user,
+ $reason,
+ $id,
+ $content,
+ $logEntry,
+ $archivedRevisionCount
+ ] );
$status->value = $logid;
// Show log excerpt on 404 pages rather than just a link
$title->touchLinks();
$title->purgeSquid();
$title->deleteTitleProtection();
+
+ if ( $title->getNamespace() == NS_CATEGORY ) {
+ // Load the Category object, which will schedule a job to create
+ // the category table row if necessary. Checking a slave is ok
+ // here, in the worst case it'll run an unnecessary recount job on
+ // a category that probably doesn't have many members.
+ Category::newFromTitle( $title )->getID();
+ }
}
/**
$cat = Category::newFromName( $catName );
Hooks::run( 'CategoryAfterPageRemoved', [ $cat, $this, $id ] );
}
+
+ // Refresh counts on categories that should be empty now, to
+ // trigger possible deletion. Check master for the most
+ // up-to-date cat_pages.
+ if ( count( $deleted ) ) {
+ $rows = $dbw->select(
+ 'category',
+ [ 'cat_id', 'cat_title', 'cat_pages', 'cat_subcats', 'cat_files' ],
+ [ 'cat_title' => $deleted, 'cat_pages <= 0' ],
+ $method
+ );
+ foreach ( $rows as $row ) {
+ $cat = Category::newFromRow( $row );
+ $cat->refreshCounts();
+ }
+ }
}
);
}