setting in the relevant section of $wgLBFactoryConf.
* User::newSystemUser() may be used to simplify the creation of passwordless
"system" users for logged actions from scripts and extensions.
+* Extensions can now return detailed error information via the API when
+ preventing user actions using 'getUserPermissionsErrors' and similar hooks
+ by using ApiMessage instances instead of strings for the $result value.
==== External libraries ====
'getUserPermissionsErrors': Add a permissions error when permissions errors are
checked for. Use instead of userCan for most cases. Return false if the user
can't do it, and populate $result with the reason in the form of
-array( messagename, param1, param2, ... ). For consistency, error messages
+array( messagename, param1, param2, ... ) or a MessageSpecifier instance (you
+might want to use ApiMessage to provide machine-readable details for the API).
+For consistency, error messages
should be plain text with no special coloring, bolding, etc. to show that
they're errors; presenting them properly to the user as errors is done by the
caller.
called only if expensive checks are enabled. Add a permissions error when
permissions errors are checked for. Return false if the user can't do it, and
populate $result with the reason in the form of array( messagename, param1,
-param2, ... ). For consistency, error messages should be plain text with no
+param2, ... ) or a MessageSpecifier instance (you might want to use ApiMessage
+to provide machine-readable details for the API). For consistency, error
+messages should be plain text with no
special coloring, bolding, etc. to show that they're errors; presenting them
properly to the user as errors is done by the caller.
$title: Title object being checked against
$out = array();
foreach ( $args as $errors ) {
foreach ( $errors as $params ) {
+ $originalParams = $params;
+ if ( $params[0] instanceof MessageSpecifier ) {
+ $msg = $params[0];
+ $params = array_merge( array( $msg->getKey() ), $msg->getParams() );
+ }
# @todo FIXME: Sometimes get nested arrays for $params,
# which leads to E_NOTICEs
$spec = implode( "\t", $params );
- $out[$spec] = $params;
+ $out[$spec] = $originalParams;
}
}
return array_values( $out );
if ( $page == '' ) {
return Status::newFatal( 'import-noarticle' );
}
- $link = Title::newFromText( "$interwiki:Special:Export/$page" );
- if ( is_null( $link ) || !$link->isExternal() ) {
+
+ # Look up the first interwiki prefix, and let the foreign site handle
+ # subsequent interwiki prefixes
+ $firstIwPrefix = strtok( $interwiki, ':' );
+ $firstIw = Interwiki::fetch( $firstIwPrefix );
+ if ( !$firstIw ) {
return Status::newFatal( 'importbadinterwiki' );
- } else {
- $params = array();
- if ( $history ) {
- $params['history'] = 1;
- }
- if ( $templates ) {
- $params['templates'] = 1;
- }
- if ( $pageLinkDepth ) {
- $params['pagelink-depth'] = $pageLinkDepth;
- }
- $url = $link->getFullURL( $params );
- # For interwikis, use POST to avoid redirects.
- return ImportStreamSource::newFromURL( $url, "POST" );
}
+
+ $additionalIwPrefixes = strtok( '' );
+ if ( $additionalIwPrefixes ) {
+ $additionalIwPrefixes .= ':';
+ }
+ # Have to do a DB-key replacement ourselves; otherwise spaces get
+ # URL-encoded to +, which is wrong in this case. Similar to logic in
+ # Title::getLocalURL
+ $link = $firstIw->getURL( strtr( "${additionalIwPrefixes}Special:Export/$page",
+ ' ', '_' ) );
+
+ $params = array();
+ if ( $history ) {
+ $params['history'] = 1;
+ }
+ if ( $templates ) {
+ $params['templates'] = 1;
+ }
+ if ( $pageLinkDepth ) {
+ $params['pagelink-depth'] = $pageLinkDepth;
+ }
+
+ $url = wfAppendQuery( $link, $params );
+ # For interwikis, use POST to avoid redirects.
+ return ImportStreamSource::newFromURL( $url, "POST" );
}
}
* - secure : does cheap and expensive checks, using the master as needed
* @param array $ignoreErrors Array of Strings Set this to a list of message keys
* whose corresponding errors may be ignored.
- * @return array Array of arguments to wfMessage to explain permissions problems.
+ * @return array Array of arrays of the arguments to wfMessage to explain permissions problems.
*/
public function getUserPermissionsErrors(
$action, $user, $rigor = 'secure', $ignoreErrors = array()
// Remove the errors being ignored.
foreach ( $errors as $index => $error ) {
- $error_key = is_array( $error ) ? $error[0] : $error;
+ $errKey = is_array( $error ) ? $error[0] : $error;
- if ( in_array( $error_key, $ignoreErrors ) ) {
+ if ( in_array( $errKey, $ignoreErrors ) ) {
+ unset( $errors[$index] );
+ }
+ if ( $errKey instanceof MessageSpecifier && in_array( $errKey->getKey(), $ignoreErrors ) ) {
unset( $errors[$index] );
}
}
} elseif ( $result !== '' && is_string( $result ) ) {
// A string representing a message-id
$errors[] = array( $result );
+ } elseif ( $result instanceof MessageSpecifier ) {
+ // A message specifier representing an error
+ $errors[] = array( $result );
} elseif ( $result === false ) {
// a generic "We don't want them to do that"
$errors[] = array( 'badaccess-group0' );
*
* @since 1.23
* @param Status $status
+ * @param array|null &$extraData Set if extra data from IApiMessage is available (since 1.27)
* @return array Array of code and error string
* @throws MWException
*/
- public function getErrorFromStatus( $status ) {
+ public function getErrorFromStatus( $status, &$extraData = null ) {
if ( $status->isGood() ) {
throw new MWException( 'Successful status passed to ApiBase::dieStatus' );
}
// error messages.
if ( $errors[0] instanceof Message ) {
$msg = $errors[0];
- $code = $msg->getKey();
+ if ( $msg instanceof IApiMessage ) {
+ $extraData = $msg->getApiData();
+ $code = $msg->getApiCode();
+ } else {
+ $code = $msg->getKey();
+ }
} else {
$code = array_shift( $errors[0] );
$msg = wfMessage( $code, $errors[0] );
* @throws UsageException always
*/
public function dieStatus( $status ) {
- list( $code, $msg ) = $this->getErrorFromStatus( $status );
- $this->dieUsage( $msg, $code );
+ $extraData = null;
+ list( $code, $msg ) = $this->getErrorFromStatus( $status, $extraData );
+ $this->dieUsage( $msg, $code, 0, $extraData );
}
// @codingStandardsIgnoreStart Allow long lines. Cannot split these.
$error = array( $error );
}
$parsed = $this->parseMsg( $error );
- $this->dieUsage( $parsed['info'], $parsed['code'] );
+ $extraData = isset( $parsed['data'] ) ? $parsed['data'] : null;
+ $this->dieUsage( $parsed['info'], $parsed['code'], 0, $extraData );
}
/**
$key = array_shift( $error );
}
+ if ( $key instanceof IApiMessage ) {
+ return array(
+ 'code' => $key->getApiCode(),
+ 'info' => $key->inLanguage( 'en' )->useDatabase( false )->text(),
+ 'data' => $key->getApiData()
+ );
+ }
+
if ( isset( self::$messageMap[$key] ) ) {
return array(
'code' => wfMsgReplaceArgs( self::$messageMap[$key]['code'], $error ),
return array(
'action=help'
=> 'apihelp-help-example-main',
+ 'action=help&modules=query&submodules=1'
+ => 'apihelp-help-example-submodules',
'action=help&recursivesubmodules=1'
=> 'apihelp-help-example-recursive',
'action=help&modules=help'
$data['invalidparameter'] = $parameter;
$parsed = $this->parseMsg( $error );
+ if ( isset( $parsed['data'] ) ) {
+ $data = array_merge( $data, $parsed['data'] );
+ }
+
$this->dieUsage( $parsed['info'], $parsed['code'], 0, $data );
}
"apihelp-help-param-wrap": "Wrap the output in a standard API response structure.",
"apihelp-help-param-toc": "Include a table of contents in the HTML output.",
"apihelp-help-example-main": "Help for the main module.",
+ "apihelp-help-example-submodules": "Help for <kbd>action=query</kbd> and all its submodules.",
"apihelp-help-example-recursive": "All help in one page.",
"apihelp-help-example-help": "Help for the help module itself.",
"apihelp-help-example-query": "Help for two query submodules.",
"apihelp-help-param-wrap": "{{doc-apihelp-param|help|wrap}}",
"apihelp-help-param-toc": "{{doc-apihelp-param|help|toc}}",
"apihelp-help-example-main": "{{doc-apihelp-example|help}}",
+ "apihelp-help-example-submodules": "{{doc-apihelp-example|help}}",
"apihelp-help-example-recursive": "{{doc-apihelp-example|help}}",
"apihelp-help-example-help": "{{doc-apihelp-example|help}}",
"apihelp-help-example-query": "{{doc-apihelp-example|help}}",
wfGetLBFactory()->commitMasterChanges();
} catch ( Exception $e ) {
// We don't want exceptions thrown during deferred updates to
- // be reported to the user since the output is already sent.
- // Instead we just log them.
+ // be reported to the user since the output is already sent
if ( !$e instanceof ErrorPageError ) {
MWExceptionHandler::logException( $e );
}
+ // Make sure incomplete transactions are not committed and end any
+ // open atomic sections so that other DB updates have a chance to run
+ wfGetLBFactory()->rollbackMasterChanges();
}
}
/**
* Purge the file object/metadata cache
*/
- function invalidateCache() {
+ public function invalidateCache() {
$key = $this->getCacheKey();
if ( !$key ) {
return;
* @param null|User $user
* @return bool
*/
- function recordUpload2( $oldver, $comment, $pageText, $props = false, $timestamp = false,
- $user = null
+ function recordUpload2(
+ $oldver, $comment, $pageText, $props = false, $timestamp = false, $user = null
) {
-
if ( is_null( $user ) ) {
global $wgUser;
$user = $wgUser;
}
$dbw = $this->repo->getMasterDB();
- $dbw->begin( __METHOD__ );
-
- if ( !$props ) {
- $props = $this->repo->getFileProps( $this->getVirtualUrl() );
- }
# Imports or such might force a certain timestamp; otherwise we generate
# it and can fudge it slightly to keep (name,timestamp) unique on re-upload.
$allowTimeKludge = false;
}
+ $props = $props ?: $this->repo->getFileProps( $this->getVirtualUrl() );
$props['description'] = $comment;
$props['user'] = $user->getId();
$props['user_text'] = $user->getName();
# Fail now if the file isn't there
if ( !$this->fileExists ) {
wfDebug( __METHOD__ . ": File " . $this->getRel() . " went missing!\n" );
- $dbw->rollback( __METHOD__ );
return false;
}
- $reupload = false;
+ $dbw->startAtomic( __METHOD__ );
# Test to see if the row exists using INSERT IGNORE
# This avoids race conditions by locking the row until the commit, and also
__METHOD__,
'IGNORE'
);
- if ( $dbw->affectedRows() == 0 ) {
+
+ $reupload = ( $dbw->affectedRows() == 0 );
+ if ( $reupload ) {
if ( $allowTimeKludge ) {
# Use LOCK IN SHARE MODE to ignore any transaction snapshotting
- $ltimestamp = $dbw->selectField( 'image', 'img_timestamp',
+ $ltimestamp = $dbw->selectField(
+ 'image',
+ 'img_timestamp',
array( 'img_name' => $this->getName() ),
__METHOD__,
- array( 'LOCK IN SHARE MODE' ) );
+ array( 'LOCK IN SHARE MODE' )
+ );
$lUnixtime = $ltimestamp ? wfTimestamp( TS_UNIX, $ltimestamp ) : false;
# Avoid a timestamp that is not newer than the last version
# TODO: the image/oldimage tables should be like page/revision with an ID field
# version of the file was broken. Allow registration of the new
# version to continue anyway, because that's better than having
# an image that's not fixable by user operations.
-
- $reupload = true;
# Collision, this is an update of a file
# Insert previous contents into oldimage
$dbw->insertSelect( 'oldimage', 'image',
# Update the current image row
$dbw->update( 'image',
- array( /* SET */
+ array(
'img_size' => $this->size,
'img_width' => intval( $this->width ),
'img_height' => intval( $this->height ),
array( 'img_name' => $this->getName() ),
__METHOD__
);
- } else {
- # This is a new file, so update the image count
- DeferredUpdates::addUpdate( SiteStatsUpdate::factory( array( 'images' => 1 ) ) );
}
$descTitle = $this->getTitle();
$wikiPage = new WikiFilePage( $descTitle );
$wikiPage->setFile( $this );
- # Add the log entry
- $action = $reupload ? 'overwrite' : 'upload';
-
- $logEntry = new ManualLogEntry( 'upload', $action );
+ // Add the log entry...
+ $logEntry = new ManualLogEntry( 'upload', $reupload ? 'overwrite' : 'upload' );
$logEntry->setPerformer( $user );
$logEntry->setComment( $comment );
$logEntry->setTarget( $descTitle );
-
// Allow people using the api to associate log entries with the upload.
// Log has a timestamp, but sometimes different from upload timestamp.
$logEntry->setParameters(
// now and wait until the page exists.
$logId = $logEntry->insert();
- $exists = $descTitle->exists();
- if ( $exists ) {
- // Page exists, do RC entry now (otherwise we wait for later).
+ if ( $descTitle->exists() ) {
+ // Page exists, do RC entry now (otherwise we wait for later)
$logEntry->publish( $logId );
- }
-
- if ( $exists ) {
- # Create a null revision
- $latest = $descTitle->getLatestRevID();
// Use own context to get the action text in content language
$formatter = LogFormatter::newFromEntry( $logEntry );
$formatter->setContext( RequestContext::newExtraneousContext( $descTitle ) );
false,
$user
);
- if ( !is_null( $nullRevision ) ) {
+ if ( $nullRevision ) {
$nullRevision->insertOn( $dbw );
-
- Hooks::run( 'NewRevisionFromEditComplete', array( $wikiPage, $nullRevision, $latest, $user ) );
+ Hooks::run(
+ 'NewRevisionFromEditComplete',
+ array( $wikiPage, $nullRevision, $nullRevision->getParentId(), $user )
+ );
$wikiPage->updateRevisionOn( $dbw, $nullRevision );
}
- }
-
- # Commit the transaction now, in case something goes wrong later
- # The most important thing is that files don't get lost, especially archives
- # NOTE: once we have support for nested transactions, the commit may be moved
- # to after $wikiPage->doEdit has been called.
- $dbw->commit( __METHOD__ );
-
- # Update memcache after the commit
- $this->invalidateCache();
- if ( $exists ) {
- # Invalidate the cache for the description page
- $descTitle->invalidateCache();
- $descTitle->purgeSquid();
+ $newPageContent = null;
} else {
- # New file; create the description page.
- # There's already a log entry, so don't make a second RC entry
- # Squid and file cache for the description page are purged by doEditContent.
- $content = ContentHandler::makeContent( $pageText, $descTitle );
- $status = $wikiPage->doEditContent(
- $content,
- $comment,
- EDIT_NEW | EDIT_SUPPRESS_RC,
- false,
- $user
- );
-
- // Now that the page exists, make an RC entry.
- // This relies on the resetArticleID() call in WikiPage::insertOn(),
- // which is triggered on $descTitle by doEditContent() above.
- $logEntry->publish( $logId );
- if ( isset( $status->value['revision'] ) ) {
- $dbw->update( 'logging',
- array( 'log_page' => $status->value['revision']->getPage() ),
- array( 'log_id' => $logId ),
- __METHOD__
- );
- }
+ // Make the description page and RC log entry post-commit
+ $newPageContent = ContentHandler::makeContent( $pageText, $descTitle );
}
+ # Defer purges, page creation, and link updates in case they error out.
+ # The most important thing is that files and the DB registry stay synced.
+ $dbw->endAtomic( __METHOD__ );
+
# Do some cache purges after final commit so that:
# a) Changes are more likely to be seen post-purge
# b) They won't cause rollback of the log publish/update above
$that = $this;
- $dbw->onTransactionIdle( function () use ( $that, $reupload, $descTitle ) {
+ $dbw->onTransactionIdle( function () use (
+ $that, $reupload, $wikiPage, $newPageContent, $comment, $user, $logEntry, $logId
+ ) {
+ # Update memcache after the commit
+ $that->invalidateCache();
+
+ if ( $newPageContent ) {
+ # New file page; create the description page.
+ # There's already a log entry, so don't make a second RC entry
+ # Squid and file cache for the description page are purged by doEditContent.
+ $status = $wikiPage->doEditContent(
+ $newPageContent,
+ $comment,
+ EDIT_NEW | EDIT_SUPPRESS_RC,
+ false,
+ $user
+ );
+
+ // Now that the page exists, make an RC entry.
+ // This relies on the resetArticleID() call in WikiPage::insertOn(),
+ // which is triggered on $descTitle by doEditContent() above.
+ $logEntry->publish( $logId );
+ if ( isset( $status->value['revision'] ) ) {
+ /** @var $rev Revision */
+ $rev = $status->value['revision'];
+ $that->getRepo()->getMasterDB()->update(
+ 'logging',
+ array( 'log_page' => $rev->getPage() ),
+ array( 'log_id' => $logId ),
+ __METHOD__
+ );
+ }
+ } else {
+ # Existing file page: invalidate description page cache
+ $wikiPage->getTitle()->invalidateCache();
+ $wikiPage->getTitle()->purgeSquid();
+ }
+
# Run hook for other updates (typically more cache purging)
- Hooks::run( 'FileUpload', array( $that, $reupload, $descTitle->exists() ) );
+ Hooks::run( 'FileUpload', array( $that, $reupload, !$newPageContent ) );
if ( $reupload ) {
# Delete old thumbnails
}
} );
+ if ( !$reupload ) {
+ # This is a new file, so update the image count
+ DeferredUpdates::addUpdate( SiteStatsUpdate::factory( array( 'images' => 1 ) ) );
+ }
+
# Invalidate cache for all pages using this file
DeferredUpdates::addUpdate( new HTMLCacheUpdate( $this->getTitle(), 'imagelinks' ) );
$form = new HTMLForm( $fields, $this->getContext() );
$form->setAction( $this->getPageTitle( 'create' )->getLocalURL() );
$form->setWrapperLegendMsg( 'tags-create-heading' );
- $form->setHeaderText( $this->msg( 'tags-create-explanation' )->plain() );
+ $form->setHeaderText( $this->msg( 'tags-create-explanation' )->parseAsBlock() );
$form->setSubmitCallback( array( $this, 'processCreateTagForm' ) );
$form->setSubmitTextMsg( 'tags-create-submit' );
$form->show();
"htmlform-cloner-required": "At least one value is required.",
"htmlform-title-badnamespace": "[[:$1]] is not in the \"{{ns:$2}}\" namespace.",
"htmlform-title-not-creatable": "\"$1\" is not a creatable page title",
- "htmlform-title-not-exists": "[[:$1]] does not exist.",
+ "htmlform-title-not-exists": "$1 does not exist.",
"htmlform-user-not-exists": "<strong>$1</strong> does not exist.",
"htmlform-user-not-valid": "<strong>$1</strong> isn't a valid username.",
"rawmessage": "$1",
$dbr->selectSQLText(
'image',
array( 'name' => 'img_name' ),
- array( 'img_name' => $imgIN )
+ $imgIN ? array( 'img_name' => $imgIN ) : '1=0'
),
$dbr->selectSQLText(
'oldimage',
array( 'name' => 'oi_archive_name' ),
- $dbr->makeList( $oiWheres, LIST_OR )
+ $oiWheres ? $dbr->makeList( $oiWheres, LIST_OR ) : '1=0'
)
),
true // UNION ALL (performance)
} else {
$file = $repo->newFile( $name );
}
- $this->output( $name . "\n" . $file->getUrl() . "\n\n" );
+ $this->output( $name . "\n" . $file->getCanonicalUrl() . "\n\n" );
}
}
}
*/
ForeignStructuredUpload.prototype.getText = function () {
return (
+ '== {{int:filedesc}} ==\n' +
'{{' +
this.getTemplateName() +
'\n|description=' +
'\n|author=' +
this.getUser() +
'\n}}\n\n' +
+ '== {{int:license-header}} ==\n' +
this.getLicense() +
'\n\n' +
this.getCategories()
$el.attr( 'href', textify( arg ) );
}
}
- return appendWithoutParsing( $el, contents );
+ return appendWithoutParsing( $el.empty(), contents );
},
/**
assert.equal( logSpy.callCount, 2, 'mw.log.warn calls' );
} );
- QUnit.test( 'Integration', 4, function ( assert ) {
- var expected, logSpy;
+ QUnit.test( 'Integration', 5, function ( assert ) {
+ var expected, logSpy, msg;
expected = '<b><a title="Bold" href="/wiki/Bold">Bold</a>!</b>';
mw.messages.set( 'integration-test', '<b>[[Bold]]!</b>' );
expected,
'jQuery plugin $.fn.msg() works correctly'
);
+
+ mw.messages.set( 'integration-test-extlink', '[$1 Link]' );
+ msg = mw.message(
+ 'integration-test-extlink',
+ $( '<a>' ).attr( 'href', 'http://example.com/' )
+ );
+ msg.parse(); // Not a no-op
+ assert.equal(
+ msg.parse(),
+ '<a href="http://example.com/">Link</a>',
+ 'Calling .parse() multiple times does not duplicate link contents'
+ );
} );
}( mediaWiki, jQuery ) );