3 * Helper class for the index.php entry point.
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
23 use MediaWiki\Logger\LoggerFactory
;
24 use Psr\Log\LoggerInterface
;
25 use MediaWiki\MediaWikiServices
;
26 use Wikimedia\Rdbms\ILBFactory
;
27 use Wikimedia\Rdbms\ChronologyProtector
;
28 use Wikimedia\Rdbms\DBConnectionError
;
29 use Liuggio\StatsdClient\Sender\SocketSender
;
32 * The MediaWiki class is the helper class for the index.php entry point.
46 * @var string Cache what action this request is
51 * @param IContextSource|null $context
53 public function __construct( IContextSource
$context = null ) {
55 $context = RequestContext
::getMain();
58 $this->context
= $context;
59 $this->config
= $context->getConfig();
63 * Parse the request to get the Title object
65 * @throws MalformedTitleException If a title has been provided by the user, but is invalid.
66 * @return Title Title object to be $wgTitle
68 private function parseTitle() {
69 $request = $this->context
->getRequest();
70 $curid = $request->getInt( 'curid' );
71 $title = $request->getVal( 'title' );
72 $action = $request->getVal( 'action' );
74 if ( $request->getCheck( 'search' ) ) {
75 // Compatibility with old search URLs which didn't use Special:Search
76 // Just check for presence here, so blank requests still
77 // show the search page when using ugly URLs (T10054).
78 $ret = SpecialPage
::getTitleFor( 'Search' );
80 // URLs like this are generated by RC, because rc_title isn't always accurate
81 $ret = Title
::newFromID( $curid );
83 $ret = Title
::newFromURL( $title );
84 // Alias NS_MEDIA page URLs to NS_FILE...we only use NS_MEDIA
85 // in wikitext links to tell Parser to make a direct file link
86 if ( !is_null( $ret ) && $ret->getNamespace() == NS_MEDIA
) {
87 $ret = Title
::makeTitle( NS_FILE
, $ret->getDBkey() );
89 $contLang = MediaWikiServices
::getInstance()->getContentLanguage();
90 // Check variant links so that interwiki links don't have to worry
91 // about the possible different language variants
93 $contLang->hasVariants() && !is_null( $ret ) && $ret->getArticleID() == 0
95 $contLang->findVariantLink( $title, $ret );
99 // If title is not provided, always allow oldid and diff to set the title.
100 // If title is provided, allow oldid and diff to override the title, unless
101 // we are talking about a special page which might use these parameters for
103 if ( $ret === null ||
!$ret->isSpecialPage() ) {
104 // We can have urls with just ?diff=,?oldid= or even just ?diff=
105 $oldid = $request->getInt( 'oldid' );
106 $oldid = $oldid ?
: $request->getInt( 'diff' );
107 // Allow oldid to override a changed or missing title
109 $rev = Revision
::newFromId( $oldid );
110 $ret = $rev ?
$rev->getTitle() : $ret;
114 // Use the main page as default title if nothing else has been provided
116 && strval( $title ) === ''
117 && !$request->getCheck( 'curid' )
118 && $action !== 'delete'
120 $ret = Title
::newMainPage();
123 if ( $ret === null ||
( $ret->getDBkey() == '' && !$ret->isExternal() ) ) {
124 // If we get here, we definitely don't have a valid title; throw an exception.
125 // Try to get detailed invalid title exception first, fall back to MalformedTitleException.
126 Title
::newFromTextThrow( $title );
127 throw new MalformedTitleException( 'badtitletext', $title );
134 * Get the Title object that we'll be acting on, as specified in the WebRequest
137 public function getTitle() {
138 if ( !$this->context
->hasTitle() ) {
140 $this->context
->setTitle( $this->parseTitle() );
141 } catch ( MalformedTitleException
$ex ) {
142 $this->context
->setTitle( SpecialPage
::getTitleFor( 'Badtitle' ) );
145 return $this->context
->getTitle();
149 * Returns the name of the action that will be executed.
151 * @return string Action
153 public function getAction() {
154 if ( $this->action
=== null ) {
155 $this->action
= Action
::getActionName( $this->context
);
158 return $this->action
;
162 * Performs the request.
165 * - local interwiki redirects
170 * @throws MWException|PermissionsError|BadTitleError|HttpError
173 private function performRequest() {
176 $request = $this->context
->getRequest();
177 $requestTitle = $title = $this->context
->getTitle();
178 $output = $this->context
->getOutput();
179 $user = $this->context
->getUser();
181 if ( $request->getVal( 'printable' ) === 'yes' ) {
182 $output->setPrintable();
185 $unused = null; // To pass it by reference
186 Hooks
::run( 'BeforeInitialize', [ &$title, &$unused, &$output, &$user, $request, $this ] );
188 // Invalid titles. T23776: The interwikis must redirect even if the page name is empty.
189 if ( is_null( $title ) ||
( $title->getDBkey() == '' && !$title->isExternal() )
190 ||
$title->isSpecial( 'Badtitle' )
192 $this->context
->setTitle( SpecialPage
::getTitleFor( 'Badtitle' ) );
195 } catch ( MalformedTitleException
$ex ) {
196 throw new BadTitleError( $ex );
198 throw new BadTitleError();
201 // Check user's permissions to read this page.
202 // We have to check here to catch special pages etc.
203 // We will check again in Article::view().
204 $permErrors = $title->isSpecial( 'RunJobs' )
205 ?
[] // relies on HMAC key signature alone
206 : $title->getUserPermissionsErrors( 'read', $user );
207 if ( count( $permErrors ) ) {
208 // T34276: allowing the skin to generate output with $wgTitle or
209 // $this->context->title set to the input title would allow anonymous users to
210 // determine whether a page exists, potentially leaking private data. In fact, the
211 // curid and oldid request parameters would allow page titles to be enumerated even
212 // when they are not guessable. So we reset the title to Special:Badtitle before the
213 // permissions error is displayed.
215 // The skin mostly uses $this->context->getTitle() these days, but some extensions
216 // still use $wgTitle.
217 $badTitle = SpecialPage
::getTitleFor( 'Badtitle' );
218 $this->context
->setTitle( $badTitle );
219 $wgTitle = $badTitle;
221 throw new PermissionsError( 'read', $permErrors );
224 // Interwiki redirects
225 if ( $title->isExternal() ) {
226 $rdfrom = $request->getVal( 'rdfrom' );
228 $url = $title->getFullURL( [ 'rdfrom' => $rdfrom ] );
230 $query = $request->getValues();
231 unset( $query['title'] );
232 $url = $title->getFullURL( $query );
234 // Check for a redirect loop
235 if ( !preg_match( '/^' . preg_quote( $this->config
->get( 'Server' ), '/' ) . '/', $url )
238 // 301 so google et al report the target as the actual url.
239 $output->redirect( $url, 301 );
241 $this->context
->setTitle( SpecialPage
::getTitleFor( 'Badtitle' ) );
244 } catch ( MalformedTitleException
$ex ) {
245 throw new BadTitleError( $ex );
247 throw new BadTitleError();
249 // Handle any other redirects.
250 // Redirect loops, titleless URL, $wgUsePathInfo URLs, and URLs with a variant
251 } elseif ( !$this->tryNormaliseRedirect( $title ) ) {
252 // Prevent information leak via Special:MyPage et al (T109724)
253 $spFactory = MediaWikiServices
::getInstance()->getSpecialPageFactory();
254 if ( $title->isSpecialPage() ) {
255 $specialPage = $spFactory->getPage( $title->getDBkey() );
256 if ( $specialPage instanceof RedirectSpecialPage
) {
257 $specialPage->setContext( $this->context
);
258 if ( $this->config
->get( 'HideIdentifiableRedirects' )
259 && $specialPage->personallyIdentifiableTarget()
261 list( , $subpage ) = $spFactory->resolveAlias( $title->getDBkey() );
262 $target = $specialPage->getRedirect( $subpage );
263 // Target can also be true. We let that case fall through to normal processing.
264 if ( $target instanceof Title
) {
265 if ( $target->isExternal() ) {
266 // Handle interwiki redirects
267 $target = SpecialPage
::getTitleFor(
269 'force/' . $target->getPrefixedDBkey()
273 $query = $specialPage->getRedirectQuery( $subpage ) ?
: [];
274 $request = new DerivativeRequest( $this->context
->getRequest(), $query );
275 $request->setRequestURL( $this->context
->getRequest()->getRequestURL() );
276 $this->context
->setRequest( $request );
277 // Do not varnish cache these. May vary even for anons
278 $this->context
->getOutput()->lowerCdnMaxage( 0 );
279 $this->context
->setTitle( $target );
281 // Reset action type cache. (Special pages have only view)
282 $this->action
= null;
284 $output->addJsConfigVars( [
285 'wgInternalRedirectTargetUrl' => $target->getFullURL( $query ),
287 $output->addModules( 'mediawiki.action.view.redirect' );
293 // Special pages ($title may have changed since if statement above)
294 if ( $title->isSpecialPage() ) {
295 // Actions that need to be made when we have a special pages
296 $spFactory->executePath( $title, $this->context
);
298 // ...otherwise treat it as an article view. The article
299 // may still be a wikipage redirect to another article or URL.
300 $article = $this->initializeArticle();
301 if ( is_object( $article ) ) {
302 $this->performAction( $article, $requestTitle );
303 } elseif ( is_string( $article ) ) {
304 $output->redirect( $article );
306 throw new MWException( "Shouldn't happen: MediaWiki::initializeArticle()"
307 . " returned neither an object nor a URL" );
314 * Handle redirects for uncanonical title requests.
319 * - $wgUsePathInfo URLs.
320 * - URLs with a variant.
321 * - Other non-standard URLs (as long as they have no extra query parameters).
324 * - Normalise title values:
325 * /wiki/Foo%20Bar -> /wiki/Foo_Bar
326 * - Normalise empty title:
327 * /wiki/ -> /wiki/Main
328 * /w/index.php?title= -> /wiki/Main
329 * - Don't redirect anything with query parameters other than 'title' or 'action=view'.
331 * @param Title $title
332 * @return bool True if a redirect was set.
335 private function tryNormaliseRedirect( Title
$title ) {
336 $request = $this->context
->getRequest();
337 $output = $this->context
->getOutput();
339 if ( $request->getVal( 'action', 'view' ) != 'view'
340 ||
$request->wasPosted()
341 ||
( $request->getCheck( 'title' )
342 && $title->getPrefixedDBkey() == $request->getVal( 'title' ) )
343 ||
count( $request->getValueNames( [ 'action', 'title' ] ) )
344 ||
!Hooks
::run( 'TestCanonicalRedirect', [ $request, $title, $output ] )
349 if ( $this->config
->get( 'MainPageIsDomainRoot' ) && $request->getRequestURL() === '/' ) {
353 if ( $title->isSpecialPage() ) {
354 list( $name, $subpage ) = MediaWikiServices
::getInstance()->getSpecialPageFactory()->
355 resolveAlias( $title->getDBkey() );
357 $title = SpecialPage
::getTitleFor( $name, $subpage );
360 // Redirect to canonical url, make it a 301 to allow caching
361 $targetUrl = wfExpandUrl( $title->getFullURL(), PROTO_CURRENT
);
362 if ( $targetUrl == $request->getFullRequestURL() ) {
363 $message = "Redirect loop detected!\n\n" .
364 "This means the wiki got confused about what page was " .
365 "requested; this sometimes happens when moving a wiki " .
366 "to a new server or changing the server configuration.\n\n";
368 if ( $this->config
->get( 'UsePathInfo' ) ) {
369 $message .= "The wiki is trying to interpret the page " .
370 "title from the URL path portion (PATH_INFO), which " .
371 "sometimes fails depending on the web server. Try " .
372 "setting \"\$wgUsePathInfo = false;\" in your " .
373 "LocalSettings.php, or check that \$wgArticlePath " .
376 $message .= "Your web server was detected as possibly not " .
377 "supporting URL path components (PATH_INFO) correctly; " .
378 "check your LocalSettings.php for a customized " .
379 "\$wgArticlePath setting and/or toggle \$wgUsePathInfo " .
382 throw new HttpError( 500, $message );
384 $output->setCdnMaxage( 1200 );
385 $output->redirect( $targetUrl, '301' );
390 * Initialize the main Article object for "standard" actions (view, etc)
391 * Create an Article object for the page, following redirects if needed.
393 * @return Article|string An Article, or a string to redirect to another URL
395 private function initializeArticle() {
396 $title = $this->context
->getTitle();
397 if ( $this->context
->canUseWikiPage() ) {
398 // Try to use request context wiki page, as there
399 // is already data from db saved in per process
400 // cache there from this->getAction() call.
401 $page = $this->context
->getWikiPage();
403 // This case should not happen, but just in case.
404 // @TODO: remove this or use an exception
405 $page = WikiPage
::factory( $title );
406 $this->context
->setWikiPage( $page );
407 wfWarn( "RequestContext::canUseWikiPage() returned false" );
410 // Make GUI wrapper for the WikiPage
411 $article = Article
::newFromWikiPage( $page, $this->context
);
413 // Skip some unnecessary code if the content model doesn't support redirects
414 if ( !ContentHandler
::getForTitle( $title )->supportsRedirects() ) {
418 $request = $this->context
->getRequest();
420 // Namespace might change when using redirects
421 // Check for redirects ...
422 $action = $request->getVal( 'action', 'view' );
423 $file = ( $page instanceof WikiFilePage
) ?
$page->getFile() : null;
424 if ( ( $action == 'view' ||
$action == 'render' ) // ... for actions that show content
425 && !$request->getVal( 'oldid' ) // ... and are not old revisions
426 && !$request->getVal( 'diff' ) // ... and not when showing diff
427 && $request->getVal( 'redirect' ) != 'no' // ... unless explicitly told not to
428 // ... and the article is not a non-redirect image page with associated file
429 && !( is_object( $file ) && $file->exists() && !$file->getRedirected() )
431 // Give extensions a change to ignore/handle redirects as needed
432 $ignoreRedirect = $target = false;
434 Hooks
::run( 'InitializeArticleMaybeRedirect',
435 [ &$title, &$request, &$ignoreRedirect, &$target, &$article ] );
436 $page = $article->getPage(); // reflect any hook changes
438 // Follow redirects only for... redirects.
439 // If $target is set, then a hook wanted to redirect.
440 if ( !$ignoreRedirect && ( $target ||
$page->isRedirect() ) ) {
441 // Is the target already set by an extension?
442 $target = $target ?
: $page->followRedirect();
443 if ( is_string( $target ) && !$this->config
->get( 'DisableHardRedirects' ) ) {
444 // we'll need to redirect
447 if ( is_object( $target ) ) {
448 // Rewrite environment to redirected article
449 $rpage = WikiPage
::factory( $target );
450 $rpage->loadPageData();
451 if ( $rpage->exists() ||
( is_object( $file ) && !$file->isLocal() ) ) {
452 $rarticle = Article
::newFromWikiPage( $rpage, $this->context
);
453 $rarticle->setRedirectedFrom( $title );
455 $article = $rarticle;
456 $this->context
->setTitle( $target );
457 $this->context
->setWikiPage( $article->getPage() );
461 // Article may have been changed by hook
462 $this->context
->setTitle( $article->getTitle() );
463 $this->context
->setWikiPage( $article->getPage() );
471 * Perform one of the "standard" actions
474 * @param Title $requestTitle The original title, before any redirects were applied
476 private function performAction( Page
$page, Title
$requestTitle ) {
477 $request = $this->context
->getRequest();
478 $output = $this->context
->getOutput();
479 $title = $this->context
->getTitle();
480 $user = $this->context
->getUser();
482 if ( !Hooks
::run( 'MediaWikiPerformAction',
483 [ $output, $page, $title, $user, $request, $this ] )
488 $act = $this->getAction();
489 $action = Action
::factory( $act, $page, $this->context
);
491 if ( $action instanceof Action
) {
492 // Narrow DB query expectations for this HTTP request
493 $trxLimits = $this->config
->get( 'TrxProfilerLimits' );
494 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
495 if ( $request->wasPosted() && !$action->doesWrites() ) {
496 $trxProfiler->setExpectations( $trxLimits['POST-nonwrite'], __METHOD__
);
497 $request->markAsSafeRequest();
500 # Let CDN cache things if we can purge them.
501 if ( $this->config
->get( 'UseCdn' ) &&
503 // Use PROTO_INTERNAL because that's what getCdnUrls() uses
504 wfExpandUrl( $request->getRequestURL(), PROTO_INTERNAL
),
505 $requestTitle->getCdnUrls()
508 $output->setCdnMaxage( $this->config
->get( 'CdnMaxAge' ) );
515 // If we've not found out which action it is by now, it's unknown
516 $output->setStatusCode( 404 );
517 $output->showErrorPage( 'nosuchaction', 'nosuchactiontext' );
521 * Run the current MediaWiki instance; index.php just calls this
523 public function run() {
525 $this->setDBProfilingAgent();
528 } catch ( ErrorPageError
$e ) {
529 // T64091: while exceptions are convenient to bubble up GUI errors,
530 // they are not internal application faults. As with normal requests, this
531 // should commit, print the output, do deferred updates, jobs, and profiling.
532 $this->doPreOutputCommit();
533 $e->report(); // display the GUI error
535 } catch ( Exception
$e ) {
536 $context = $this->context
;
537 $action = $context->getRequest()->getVal( 'action', 'view' );
539 $e instanceof DBConnectionError
&&
540 $context->hasTitle() &&
541 $context->getTitle()->canExist() &&
542 in_array( $action, [ 'view', 'history' ], true ) &&
543 HTMLFileCache
::useFileCache( $this->context
, HTMLFileCache
::MODE_OUTAGE
)
545 // Try to use any (even stale) file during outages...
546 $cache = new HTMLFileCache( $context->getTitle(), $action );
547 if ( $cache->isCached() ) {
548 $cache->loadFromFileCache( $context, HTMLFileCache
::MODE_OUTAGE
);
549 print MWExceptionRenderer
::getHTML( $e );
554 MWExceptionHandler
::handleException( $e );
555 } catch ( Error
$e ) {
556 // Type errors and such: at least handle it now and clean up the LBFactory state
557 MWExceptionHandler
::handleException( $e );
560 $this->doPostOutputShutdown( 'normal' );
563 private function setDBProfilingAgent() {
564 $services = MediaWikiServices
::getInstance();
565 // Add a comment for easy SHOW PROCESSLIST interpretation
566 $name = $this->context
->getUser()->getName();
567 $services->getDBLoadBalancerFactory()->setAgentName(
568 mb_strlen( $name ) > 15 ?
mb_substr( $name, 0, 15 ) . '...' : $name
573 * @see MediaWiki::preOutputCommit()
574 * @param callable|null $postCommitWork [default: null]
577 public function doPreOutputCommit( callable
$postCommitWork = null ) {
578 self
::preOutputCommit( $this->context
, $postCommitWork );
582 * This function commits all DB and session changes as needed *before* the
583 * client can receive a response (in case DB commit fails) and thus also before
584 * the response can trigger a subsequent related request by the client
586 * If there is a significant amount of content to flush, it can be done in $postCommitWork
588 * @param IContextSource $context
589 * @param callable|null $postCommitWork [default: null]
592 public static function preOutputCommit(
593 IContextSource
$context, callable
$postCommitWork = null
595 $config = $context->getConfig();
596 $request = $context->getRequest();
597 $output = $context->getOutput();
598 $lbFactory = MediaWikiServices
::getInstance()->getDBLoadBalancerFactory();
600 // Try to make sure that all RDBMs, session, and other storage updates complete
601 ignore_user_abort( true );
603 // Commit all RDBMs changes from the main transaction round
604 $lbFactory->commitMasterChanges(
606 // Abort if any transaction was too big
607 [ 'maxWriteDuration' => $config->get( 'MaxUserDBWriteDuration' ) ]
609 wfDebug( __METHOD__
. ': primary transaction round committed' );
611 // Run updates that need to block the client or affect output (this is the last chance)
612 DeferredUpdates
::doUpdates( 'run', DeferredUpdates
::PRESEND
);
613 wfDebug( __METHOD__
. ': pre-send deferred updates completed' );
614 // Persist the session to avoid race conditions on subsequent requests by the client
615 $request->getSession()->save(); // T214471
616 wfDebug( __METHOD__
. ': session changes committed' );
618 // Figure out whether to wait for DB replication now or to use some method that assures
619 // that subsequent requests by the client will use the DB replication positions written
620 // during the shutdown() call below; the later requires working around replication lag
621 // of the store containing DB replication positions (e.g. dynomite, mcrouter).
622 list( $flags, $strategy ) = self
::getChronProtStrategy( $lbFactory, $output );
623 // Record ChronologyProtector positions for DBs affected in this request at this point
626 $lbFactory->shutdown( $flags, $postCommitWork, $cpIndex, $cpClientId );
627 wfDebug( __METHOD__
. ': LBFactory shutdown completed' );
629 $allowHeaders = !( $output->isDisabled() ||
headers_sent() );
630 if ( $cpIndex > 0 ) {
631 if ( $allowHeaders ) {
633 $expires = $now + ChronologyProtector
::POSITION_COOKIE_TTL
;
634 $options = [ 'prefix' => '' ];
635 $value = $lbFactory::makeCookieValueFromCPIndex( $cpIndex, $now, $cpClientId );
636 $request->response()->setCookie( 'cpPosIndex', $value, $expires, $options );
639 if ( $strategy === 'cookie+url' ) {
640 if ( $output->getRedirect() ) { // sanity
641 $safeUrl = $lbFactory->appendShutdownCPIndexAsQuery(
642 $output->getRedirect(),
645 $output->redirect( $safeUrl );
647 $e = new LogicException( "No redirect; cannot append cpPosIndex parameter." );
648 MWExceptionHandler
::logException( $e );
653 if ( $allowHeaders ) {
654 // Set a cookie to tell all CDN edge nodes to "stick" the user to the DC that
655 // handles this POST request (e.g. the "master" data center). Also have the user
656 // briefly bypass CDN so ChronologyProtector works for cacheable URLs.
657 if ( $request->wasPosted() && $lbFactory->hasOrMadeRecentMasterChanges() ) {
658 $expires = time() +
$config->get( 'DataCenterUpdateStickTTL' );
659 $options = [ 'prefix' => '' ];
660 $request->response()->setCookie( 'UseDC', 'master', $expires, $options );
661 $request->response()->setCookie( 'UseCDNCache', 'false', $expires, $options );
664 // Avoid letting a few seconds of replica DB lag cause a month of stale data.
665 // This logic is also intimately related to the value of $wgCdnReboundPurgeDelay.
666 if ( $lbFactory->laggedReplicaUsed() ) {
667 $maxAge = $config->get( 'CdnMaxageLagged' );
668 $output->lowerCdnMaxage( $maxAge );
669 $request->response()->header( "X-Database-Lagged: true" );
670 wfDebugLog( 'replication',
671 "Lagged DB used; CDN cache TTL limited to $maxAge seconds" );
674 // Avoid long-term cache pollution due to message cache rebuild timeouts (T133069)
675 if ( MessageCache
::singleton()->isDisabled() ) {
676 $maxAge = $config->get( 'CdnMaxageSubstitute' );
677 $output->lowerCdnMaxage( $maxAge );
678 $request->response()->header( "X-Response-Substitute: true" );
684 * @param ILBFactory $lbFactory
685 * @param OutputPage $output
688 private static function getChronProtStrategy( ILBFactory
$lbFactory, OutputPage
$output ) {
689 // Should the client return, their request should observe the new ChronologyProtector
690 // DB positions. This request might be on a foreign wiki domain, so synchronously update
691 // the DB positions in all datacenters to be safe. If this output is not a redirect,
692 // then OutputPage::output() will be relatively slow, meaning that running it in
693 // $postCommitWork should help mask the latency of those updates.
694 $flags = $lbFactory::SHUTDOWN_CHRONPROT_SYNC
;
695 $strategy = 'cookie+sync';
697 $allowHeaders = !( $output->isDisabled() ||
headers_sent() );
698 if ( $output->getRedirect() && $lbFactory->hasOrMadeRecentMasterChanges( INF
) ) {
699 // OutputPage::output() will be fast, so $postCommitWork is useless for masking
700 // the latency of synchronously updating the DB positions in all datacenters.
701 // Try to make use of the time the client spends following redirects instead.
702 $domainDistance = self
::getUrlDomainDistance( $output->getRedirect() );
703 if ( $domainDistance === 'local' && $allowHeaders ) {
704 $flags = $lbFactory::SHUTDOWN_CHRONPROT_ASYNC
;
705 $strategy = 'cookie'; // use same-domain cookie and keep the URL uncluttered
706 } elseif ( $domainDistance === 'remote' ) {
707 $flags = $lbFactory::SHUTDOWN_CHRONPROT_ASYNC
;
708 $strategy = 'cookie+url'; // cross-domain cookie might not work
712 return [ $flags, $strategy ];
717 * @return string Either "local", "remote" if in the farm, "external" otherwise
719 private static function getUrlDomainDistance( $url ) {
720 $clusterWiki = WikiMap
::getWikiFromUrl( $url );
721 if ( WikiMap
::isCurrentWikiId( $clusterWiki ) ) {
722 return 'local'; // the current wiki
724 if ( $clusterWiki !== false ) {
725 return 'remote'; // another wiki in this cluster/farm
732 * This function does work that can be done *after* the
733 * user gets the HTTP response so they don't block on it
735 * This manages deferred updates, job insertion,
736 * final commit, and the logging of profiling data
738 * @param string $mode Use 'fast' to always skip job running
741 public function doPostOutputShutdown( $mode = 'normal' ) {
742 // Record backend request timing
743 $timing = $this->context
->getTiming();
744 $timing->mark( 'requestShutdown' );
746 // Perform the last synchronous operations...
748 // Show visible profiling data if enabled (which cannot be post-send)
749 Profiler
::instance()->logDataPageOutputOnly();
750 } catch ( Exception
$e ) {
751 // An error may already have been shown in run(), so just log it to be safe
752 MWExceptionHandler
::logException( $e );
755 // Disable WebResponse setters for post-send processing (T191537).
756 WebResponse
::disableForPostSend();
758 $blocksHttpClient = true;
759 // Defer everything else if possible...
760 $callback = function () use ( $mode, &$blocksHttpClient ) {
762 $this->restInPeace( $mode, $blocksHttpClient );
763 } catch ( Exception
$e ) {
764 // If this is post-send, then displaying errors can cause broken HTML
765 MWExceptionHandler
::rollbackMasterChangesAndLog( $e );
769 if ( function_exists( 'register_postsend_function' ) ) {
770 // https://github.com/facebook/hhvm/issues/1230
771 register_postsend_function( $callback );
772 /** @noinspection PhpUnusedLocalVariableInspection */
773 $blocksHttpClient = false;
775 if ( function_exists( 'fastcgi_finish_request' ) ) {
776 fastcgi_finish_request();
777 /** @noinspection PhpUnusedLocalVariableInspection */
778 $blocksHttpClient = false;
780 // Either all DB and deferred updates should happen or none.
781 // The latter should not be cancelled due to client disconnect.
782 ignore_user_abort( true );
789 private function main() {
792 $output = $this->context
->getOutput();
793 $request = $this->context
->getRequest();
795 // Send Ajax requests to the Ajax dispatcher.
796 if ( $request->getVal( 'action' ) === 'ajax' ) {
797 // Set a dummy title, because $wgTitle == null might break things
798 $title = Title
::makeTitle( NS_SPECIAL
, 'Badtitle/performing an AJAX call in '
801 $this->context
->setTitle( $title );
804 $dispatcher = new AjaxDispatcher( $this->config
);
805 $dispatcher->performAction( $this->context
->getUser() );
810 // Get title from request parameters,
811 // is set on the fly by parseTitle the first time.
812 $title = $this->getTitle();
813 $action = $this->getAction();
816 // Set DB query expectations for this HTTP request
817 $trxLimits = $this->config
->get( 'TrxProfilerLimits' );
818 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
819 $trxProfiler->setLogger( LoggerFactory
::getInstance( 'DBPerformance' ) );
820 if ( $request->hasSafeMethod() ) {
821 $trxProfiler->setExpectations( $trxLimits['GET'], __METHOD__
);
823 $trxProfiler->setExpectations( $trxLimits['POST'], __METHOD__
);
826 // If the user has forceHTTPS set to true, or if the user
827 // is in a group requiring HTTPS, or if they have the HTTPS
828 // preference set, redirect them to HTTPS.
829 // Note: Do this after $wgTitle is setup, otherwise the hooks run from
830 // isLoggedIn() will do all sorts of weird stuff.
832 $request->getProtocol() == 'http' &&
833 // switch to HTTPS only when supported by the server
834 preg_match( '#^https://#', wfExpandUrl( $request->getRequestURL(), PROTO_HTTPS
) ) &&
836 $request->getSession()->shouldForceHTTPS() ||
837 // Check the cookie manually, for paranoia
838 $request->getCookie( 'forceHTTPS', '' ) ||
839 // check for prefixed version that was used for a time in older MW versions
840 $request->getCookie( 'forceHTTPS' ) ||
841 // Avoid checking the user and groups unless it's enabled.
843 $this->context
->getUser()->isLoggedIn()
844 && $this->context
->getUser()->requiresHTTPS()
848 $oldUrl = $request->getFullRequestURL();
849 $redirUrl = preg_replace( '#^http://#', 'https://', $oldUrl );
851 // ATTENTION: This hook is likely to be removed soon due to overall design of the system.
852 if ( Hooks
::run( 'BeforeHttpsRedirect', [ $this->context
, &$redirUrl ] ) ) {
853 if ( $request->wasPosted() ) {
854 // This is weird and we'd hope it almost never happens. This
855 // means that a POST came in via HTTP and policy requires us
856 // redirecting to HTTPS. It's likely such a request is going
857 // to fail due to post data being lost, but let's try anyway
858 // and just log the instance.
860 // @todo FIXME: See if we could issue a 307 or 308 here, need
861 // to see how clients (automated & browser) behave when we do
862 wfDebugLog( 'RedirectedPosts', "Redirected from HTTP to HTTPS: $oldUrl" );
864 // Setup dummy Title, otherwise OutputPage::redirect will fail
865 $title = Title
::newFromText( 'REDIR', NS_MAIN
);
866 $this->context
->setTitle( $title );
867 // Since we only do this redir to change proto, always send a vary header
868 $output->addVaryHeader( 'X-Forwarded-Proto' );
869 $output->redirect( $redirUrl );
876 if ( $title->canExist() && HTMLFileCache
::useFileCache( $this->context
) ) {
877 // Try low-level file cache hit
878 $cache = new HTMLFileCache( $title, $action );
879 if ( $cache->isCacheGood( /* Assume up to date */ ) ) {
880 // Check incoming headers to see if client has this cached
881 $timestamp = $cache->cacheTimestamp();
882 if ( !$output->checkLastModified( $timestamp ) ) {
883 $cache->loadFromFileCache( $this->context
);
885 // Do any stats increment/watchlist stuff, assuming user is viewing the
886 // latest revision (which should always be the case for file cache)
887 $this->context
->getWikiPage()->doViewUpdates( $this->context
->getUser() );
888 // Tell OutputPage that output is taken care of
895 // Actually do the work of the request and build up any output
896 $this->performRequest();
898 // GUI-ify and stash the page output in MediaWiki::doPreOutputCommit() while
899 // ChronologyProtector synchronizes DB positions or replicas across all datacenters.
901 $outputWork = function () use ( $output, &$buffer ) {
902 if ( $buffer === null ) {
903 $buffer = $output->output( true );
909 // Now commit any transactions, so that unreported errors after
910 // output() don't roll back the whole DB transaction and so that
911 // we avoid having both success and error text in the response
912 $this->doPreOutputCommit( $outputWork );
914 // Now send the actual output
919 * Ends this task peacefully
920 * @param string $mode Use 'fast' to always skip job running
921 * @param bool $blocksHttpClient Whether this blocks an HTTP response to a client
923 public function restInPeace( $mode = 'fast', $blocksHttpClient = true ) {
924 $lbFactory = MediaWikiServices
::getInstance()->getDBLoadBalancerFactory();
925 // Assure deferred updates are not in the main transaction
926 $lbFactory->commitMasterChanges( __METHOD__
);
928 // Loosen DB query expectations since the HTTP client is unblocked
929 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
930 $trxProfiler->redefineExpectations(
931 $this->context
->getRequest()->hasSafeMethod()
932 ?
$this->config
->get( 'TrxProfilerLimits' )['PostSend-GET']
933 : $this->config
->get( 'TrxProfilerLimits' )['PostSend-POST'],
937 // Do any deferred jobs; preferring to run them now if a client will not wait on them
938 DeferredUpdates
::doUpdates( $blocksHttpClient ?
'enqueue' : 'run' );
940 // Now that everything specific to this request is done,
941 // try to occasionally run jobs (if enabled) from the queues
942 if ( $mode === 'normal' ) {
943 $this->triggerJobs();
946 // Log profiling data, e.g. in the database or UDP
947 wfLogProfilingData();
949 // Commit and close up!
950 $lbFactory->commitMasterChanges( __METHOD__
);
951 $lbFactory->shutdown( $lbFactory::SHUTDOWN_NO_CHRONPROT
);
953 wfDebug( "Request ended normally\n" );
957 * Send out any buffered statsd data according to sampling rules
959 * @param IBufferingStatsdDataFactory $stats
960 * @param Config $config
961 * @throws ConfigException
964 public static function emitBufferedStatsdData(
965 IBufferingStatsdDataFactory
$stats, Config
$config
967 if ( $config->get( 'StatsdServer' ) && $stats->hasData() ) {
969 $statsdServer = explode( ':', $config->get( 'StatsdServer' ), 2 );
970 $statsdHost = $statsdServer[0];
971 $statsdPort = $statsdServer[1] ??
8125;
972 $statsdSender = new SocketSender( $statsdHost, $statsdPort );
973 $statsdClient = new SamplingStatsdClient( $statsdSender, true, false );
974 $statsdClient->setSamplingRates( $config->get( 'StatsdSamplingRates' ) );
975 $statsdClient->send( $stats->getData() );
977 $stats->clearData(); // empty buffer for the next round
978 } catch ( Exception
$ex ) {
979 MWExceptionHandler
::logException( $ex );
985 * Potentially open a socket and sent an HTTP request back to the server
986 * to run a specified number of jobs. This registers a callback to cleanup
987 * the socket once it's done.
989 public function triggerJobs() {
990 $jobRunRate = $this->config
->get( 'JobRunRate' );
991 if ( $this->getTitle()->isSpecial( 'RunJobs' ) ) {
992 return; // recursion guard
993 } elseif ( $jobRunRate <= 0 ||
wfReadOnly() ) {
997 if ( $jobRunRate < 1 ) {
998 $max = mt_getrandmax();
999 if ( mt_rand( 0, $max ) > $max * $jobRunRate ) {
1000 return; // the higher the job run rate, the less likely we return here
1004 $n = intval( $jobRunRate );
1007 $logger = LoggerFactory
::getInstance( 'runJobs' );
1010 if ( $this->config
->get( 'RunJobsAsync' ) ) {
1011 // Send an HTTP request to the job RPC entry point if possible
1012 $invokedWithSuccess = $this->triggerAsyncJobs( $n, $logger );
1013 if ( !$invokedWithSuccess ) {
1014 // Fall back to blocking on running the job(s)
1015 $logger->warning( "Jobs switched to blocking; Special:RunJobs disabled" );
1016 $this->triggerSyncJobs( $n, $logger );
1019 $this->triggerSyncJobs( $n, $logger );
1021 } catch ( JobQueueError
$e ) {
1022 // Do not make the site unavailable (T88312)
1023 MWExceptionHandler
::logException( $e );
1028 * @param int $n Number of jobs to try to run
1029 * @param LoggerInterface $runJobsLogger
1031 private function triggerSyncJobs( $n, LoggerInterface
$runJobsLogger ) {
1032 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
1033 $old = $trxProfiler->setSilenced( true );
1035 $runner = new JobRunner( $runJobsLogger );
1036 $runner->run( [ 'maxJobs' => $n ] );
1038 $trxProfiler->setSilenced( $old );
1043 * @param int $n Number of jobs to try to run
1044 * @param LoggerInterface $runJobsLogger
1045 * @return bool Success
1047 private function triggerAsyncJobs( $n, LoggerInterface
$runJobsLogger ) {
1048 // Do not send request if there are probably no jobs
1049 $group = JobQueueGroup
::singleton();
1050 if ( !$group->queuesHaveJobs( JobQueueGroup
::TYPE_DEFAULT
) ) {
1054 $query = [ 'title' => 'Special:RunJobs',
1055 'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() +
5 ];
1056 $query['signature'] = SpecialRunJobs
::getQuerySignature(
1057 $query, $this->config
->get( 'SecretKey' ) );
1059 $errno = $errstr = null;
1060 $info = wfParseUrl( $this->config
->get( 'CanonicalServer' ) );
1061 $host = $info ?
$info['host'] : null;
1063 if ( isset( $info['scheme'] ) && $info['scheme'] == 'https' ) {
1064 $host = "tls://" . $host;
1067 if ( isset( $info['port'] ) ) {
1068 $port = $info['port'];
1071 Wikimedia\
suppressWarnings();
1072 $sock = $host ?
fsockopen(
1077 // If it takes more than 100ms to connect to ourselves there is a problem...
1080 Wikimedia\restoreWarnings
();
1082 $invokedWithSuccess = true;
1084 $special = MediaWikiServices
::getInstance()->getSpecialPageFactory()->
1085 getPage( 'RunJobs' );
1086 $url = $special->getPageTitle()->getCanonicalURL( $query );
1088 "POST $url HTTP/1.1\r\n" .
1089 "Host: {$info['host']}\r\n" .
1090 "Connection: Close\r\n" .
1091 "Content-Length: 0\r\n\r\n"
1094 $runJobsLogger->info( "Running $n job(s) via '$url'" );
1095 // Send a cron API request to be performed in the background.
1096 // Give up if this takes too long to send (which should be rare).
1097 stream_set_timeout( $sock, 2 );
1098 $bytes = fwrite( $sock, $req );
1099 if ( $bytes !== strlen( $req ) ) {
1100 $invokedWithSuccess = false;
1101 $runJobsLogger->error( "Failed to start cron API (socket write error)" );
1103 // Do not wait for the response (the script should handle client aborts).
1104 // Make sure that we don't close before that script reaches ignore_user_abort().
1105 $start = microtime( true );
1106 $status = fgets( $sock );
1107 $sec = microtime( true ) - $start;
1108 if ( !preg_match( '#^HTTP/\d\.\d 202 #', $status ) ) {
1109 $invokedWithSuccess = false;
1110 $runJobsLogger->error( "Failed to start cron API: received '$status' ($sec)" );
1115 $invokedWithSuccess = false;
1116 $runJobsLogger->error( "Failed to start cron API (socket error $errno): $errstr" );
1119 return $invokedWithSuccess;