As was pointed out, it's probably better anyway that the vandal noindex a page and have nothing turn up in searches if the search engine arrives at that exact time, than to have the vandal replace the page with "ARTICLE SUBJECT IS A POOPY-HEAD" and have that turn up in searches if the search engine arrives at that exact time. :) At any rate, this should solve the issue.
=== Configuration changes in 1.14 ===
-* (bug 8068) $wgAllowUserRobotsControl (false by default) can enable new
- __INDEX__ and __NOINDEX__ magic words, which allow user control of search en-
- gine indexing on a per-article basis.
+* $wgExemptFromUserRobotsControl is an array of namespaces to be exempt from
+ the effect of the new __INDEX__/__NOINDEX__ magic words. (Default: null, ex-
+ empt all content namespaces.)
=== New features in 1.14 ===
-None yet
+* (bug 8068) New __INDEX__ and __NOINDEX__ magic words allow user control of
+ search engine indexing on a per-article basis.
=== Bug fixes in 1.14 ===
$wgArticleRobotPolicies = array();
/**
- * Set to true to allow the __INDEX__ and __NOINDEX__ magic words to function.
- * These will allow users to control whether any page they can edit is indexed
- * by search engines.
+ * An array of namespace keys in which the __INDEX__/__NOINDEX__ magic words
+ * will not function, so users can't decide whether pages in that namespace are
+ * indexed by search engines. If set to null, default to $wgContentNamespaces.
+ * Example:
+ * $wgExemptFromUserRobotsControl = array( NS_MAIN, NS_TALK, NS_PROJECT );
*/
-$wgAllowUserRobotsControl = false;
+$wgExemptFromUserRobotsControl = null;
/**
* Specifies the minimal length of a user password. If set to
* @param ParserOutput object &$parserOutput
*/
public function addParserOutputNoText( &$parserOutput ) {
- global $wgAllowUserRobotsControl;
+ global $wgTitle, $wgExemptFromUserRobotsControl, $wgContentNamespaces;
$this->mLanguageLinks += $parserOutput->getLanguageLinks();
$this->addCategoryLinks( $parserOutput->getCategories() );
$this->mNewSectionLink = $parserOutput->getNewSection();
- if( $wgAllowUserRobotsControl ) {
- # FIXME: This probably overrides $wgArticleRobotPolicies, is that wise?
+
+ if( is_null( $wgExemptFromUserRobotsControl ) ) {
+ $bannedNamespaces = $wgContentNamespaces;
+ } else {
+ $bannedNamespaces = $wgExemptFromUserRobotsControl;
+ }
+ if( !in_array( $wgTitle->getNamespace(), $bannedNamespaces ) ) {
+ # FIXME (bug 14900): This overrides $wgArticleRobotPolicies, and it
+ # shouldn't
$this->setIndexPolicy( $parserOutput->getIndexPolicy() );
}
+
$this->addKeywords( $parserOutput );
$this->mParseWarnings = $parserOutput->getWarnings();
if ( $parserOutput->getCacheTime() == -1 ) {