3 * Helper class for the index.php entry point.
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
23 use MediaWiki\Logger\LoggerFactory
;
24 use Psr\Log\LoggerInterface
;
25 use MediaWiki\MediaWikiServices
;
26 use Wikimedia\Rdbms\ILBFactory
;
27 use Wikimedia\Rdbms\ChronologyProtector
;
28 use Wikimedia\Rdbms\DBConnectionError
;
29 use Liuggio\StatsdClient\Sender\SocketSender
;
32 * The MediaWiki class is the helper class for the index.php entry point.
46 * @var string Cache what action this request is
51 * @param IContextSource|null $context
53 public function __construct( IContextSource
$context = null ) {
55 $context = RequestContext
::getMain();
58 $this->context
= $context;
59 $this->config
= $context->getConfig();
63 * Parse the request to get the Title object
65 * @throws MalformedTitleException If a title has been provided by the user, but is invalid.
66 * @return Title Title object to be $wgTitle
68 private function parseTitle() {
69 $request = $this->context
->getRequest();
70 $curid = $request->getInt( 'curid' );
71 $title = $request->getVal( 'title' );
72 $action = $request->getVal( 'action' );
74 if ( $request->getCheck( 'search' ) ) {
75 // Compatibility with old search URLs which didn't use Special:Search
76 // Just check for presence here, so blank requests still
77 // show the search page when using ugly URLs (T10054).
78 $ret = SpecialPage
::getTitleFor( 'Search' );
80 // URLs like this are generated by RC, because rc_title isn't always accurate
81 $ret = Title
::newFromID( $curid );
83 $ret = Title
::newFromURL( $title );
84 // Alias NS_MEDIA page URLs to NS_FILE...we only use NS_MEDIA
85 // in wikitext links to tell Parser to make a direct file link
86 if ( !is_null( $ret ) && $ret->getNamespace() == NS_MEDIA
) {
87 $ret = Title
::makeTitle( NS_FILE
, $ret->getDBkey() );
89 $contLang = MediaWikiServices
::getInstance()->getContentLanguage();
90 // Check variant links so that interwiki links don't have to worry
91 // about the possible different language variants
93 $contLang->hasVariants() && !is_null( $ret ) && $ret->getArticleID() == 0
95 $contLang->findVariantLink( $title, $ret );
99 // If title is not provided, always allow oldid and diff to set the title.
100 // If title is provided, allow oldid and diff to override the title, unless
101 // we are talking about a special page which might use these parameters for
103 if ( $ret === null ||
!$ret->isSpecialPage() ) {
104 // We can have urls with just ?diff=,?oldid= or even just ?diff=
105 $oldid = $request->getInt( 'oldid' );
106 $oldid = $oldid ?
: $request->getInt( 'diff' );
107 // Allow oldid to override a changed or missing title
109 $rev = Revision
::newFromId( $oldid );
110 $ret = $rev ?
$rev->getTitle() : $ret;
114 // Use the main page as default title if nothing else has been provided
116 && strval( $title ) === ''
117 && !$request->getCheck( 'curid' )
118 && $action !== 'delete'
120 $ret = Title
::newMainPage();
123 if ( $ret === null ||
( $ret->getDBkey() == '' && !$ret->isExternal() ) ) {
124 // If we get here, we definitely don't have a valid title; throw an exception.
125 // Try to get detailed invalid title exception first, fall back to MalformedTitleException.
126 Title
::newFromTextThrow( $title );
127 throw new MalformedTitleException( 'badtitletext', $title );
134 * Get the Title object that we'll be acting on, as specified in the WebRequest
137 public function getTitle() {
138 if ( !$this->context
->hasTitle() ) {
140 $this->context
->setTitle( $this->parseTitle() );
141 } catch ( MalformedTitleException
$ex ) {
142 $this->context
->setTitle( SpecialPage
::getTitleFor( 'Badtitle' ) );
145 return $this->context
->getTitle();
149 * Returns the name of the action that will be executed.
151 * @return string Action
153 public function getAction() {
154 if ( $this->action
=== null ) {
155 $this->action
= Action
::getActionName( $this->context
);
158 return $this->action
;
162 * Performs the request.
165 * - local interwiki redirects
170 * @throws MWException|PermissionsError|BadTitleError|HttpError
173 private function performRequest() {
176 $request = $this->context
->getRequest();
177 $requestTitle = $title = $this->context
->getTitle();
178 $output = $this->context
->getOutput();
179 $user = $this->context
->getUser();
181 if ( $request->getVal( 'printable' ) === 'yes' ) {
182 $output->setPrintable();
185 $unused = null; // To pass it by reference
186 Hooks
::run( 'BeforeInitialize', [ &$title, &$unused, &$output, &$user, $request, $this ] );
188 // Invalid titles. T23776: The interwikis must redirect even if the page name is empty.
189 if ( is_null( $title ) ||
( $title->getDBkey() == '' && !$title->isExternal() )
190 ||
$title->isSpecial( 'Badtitle' )
192 $this->context
->setTitle( SpecialPage
::getTitleFor( 'Badtitle' ) );
195 } catch ( MalformedTitleException
$ex ) {
196 throw new BadTitleError( $ex );
198 throw new BadTitleError();
201 // Check user's permissions to read this page.
202 // We have to check here to catch special pages etc.
203 // We will check again in Article::view().
204 $permErrors = $title->isSpecial( 'RunJobs' )
205 ?
[] // relies on HMAC key signature alone
206 : $title->getUserPermissionsErrors( 'read', $user );
207 if ( count( $permErrors ) ) {
208 // T34276: allowing the skin to generate output with $wgTitle or
209 // $this->context->title set to the input title would allow anonymous users to
210 // determine whether a page exists, potentially leaking private data. In fact, the
211 // curid and oldid request parameters would allow page titles to be enumerated even
212 // when they are not guessable. So we reset the title to Special:Badtitle before the
213 // permissions error is displayed.
215 // The skin mostly uses $this->context->getTitle() these days, but some extensions
216 // still use $wgTitle.
217 $badTitle = SpecialPage
::getTitleFor( 'Badtitle' );
218 $this->context
->setTitle( $badTitle );
219 $wgTitle = $badTitle;
221 throw new PermissionsError( 'read', $permErrors );
224 // Interwiki redirects
225 if ( $title->isExternal() ) {
226 $rdfrom = $request->getVal( 'rdfrom' );
228 $url = $title->getFullURL( [ 'rdfrom' => $rdfrom ] );
230 $query = $request->getValues();
231 unset( $query['title'] );
232 $url = $title->getFullURL( $query );
234 // Check for a redirect loop
235 if ( !preg_match( '/^' . preg_quote( $this->config
->get( 'Server' ), '/' ) . '/', $url )
238 // 301 so google et al report the target as the actual url.
239 $output->redirect( $url, 301 );
241 $this->context
->setTitle( SpecialPage
::getTitleFor( 'Badtitle' ) );
244 } catch ( MalformedTitleException
$ex ) {
245 throw new BadTitleError( $ex );
247 throw new BadTitleError();
249 // Handle any other redirects.
250 // Redirect loops, titleless URL, $wgUsePathInfo URLs, and URLs with a variant
251 } elseif ( !$this->tryNormaliseRedirect( $title ) ) {
252 // Prevent information leak via Special:MyPage et al (T109724)
253 $spFactory = MediaWikiServices
::getInstance()->getSpecialPageFactory();
254 if ( $title->isSpecialPage() ) {
255 $specialPage = $spFactory->getPage( $title->getDBkey() );
256 if ( $specialPage instanceof RedirectSpecialPage
) {
257 $specialPage->setContext( $this->context
);
258 if ( $this->config
->get( 'HideIdentifiableRedirects' )
259 && $specialPage->personallyIdentifiableTarget()
261 list( , $subpage ) = $spFactory->resolveAlias( $title->getDBkey() );
262 $target = $specialPage->getRedirect( $subpage );
263 // target can also be true. We let that case fall through to normal processing.
264 if ( $target instanceof Title
) {
265 $query = $specialPage->getRedirectQuery( $subpage ) ?
: [];
266 $request = new DerivativeRequest( $this->context
->getRequest(), $query );
267 $request->setRequestURL( $this->context
->getRequest()->getRequestURL() );
268 $this->context
->setRequest( $request );
269 // Do not varnish cache these. May vary even for anons
270 $this->context
->getOutput()->lowerCdnMaxage( 0 );
271 $this->context
->setTitle( $target );
273 // Reset action type cache. (Special pages have only view)
274 $this->action
= null;
276 $output->addJsConfigVars( [
277 'wgInternalRedirectTargetUrl' => $target->getFullURL( $query ),
279 $output->addModules( 'mediawiki.action.view.redirect' );
285 // Special pages ($title may have changed since if statement above)
286 if ( $title->isSpecialPage() ) {
287 // Actions that need to be made when we have a special pages
288 $spFactory->executePath( $title, $this->context
);
290 // ...otherwise treat it as an article view. The article
291 // may still be a wikipage redirect to another article or URL.
292 $article = $this->initializeArticle();
293 if ( is_object( $article ) ) {
294 $this->performAction( $article, $requestTitle );
295 } elseif ( is_string( $article ) ) {
296 $output->redirect( $article );
298 throw new MWException( "Shouldn't happen: MediaWiki::initializeArticle()"
299 . " returned neither an object nor a URL" );
306 * Handle redirects for uncanonical title requests.
311 * - $wgUsePathInfo URLs.
312 * - URLs with a variant.
313 * - Other non-standard URLs (as long as they have no extra query parameters).
316 * - Normalise title values:
317 * /wiki/Foo%20Bar -> /wiki/Foo_Bar
318 * - Normalise empty title:
319 * /wiki/ -> /wiki/Main
320 * /w/index.php?title= -> /wiki/Main
321 * - Don't redirect anything with query parameters other than 'title' or 'action=view'.
323 * @param Title $title
324 * @return bool True if a redirect was set.
327 private function tryNormaliseRedirect( Title
$title ) {
328 $request = $this->context
->getRequest();
329 $output = $this->context
->getOutput();
331 if ( $request->getVal( 'action', 'view' ) != 'view'
332 ||
$request->wasPosted()
333 ||
( $request->getCheck( 'title' )
334 && $title->getPrefixedDBkey() == $request->getVal( 'title' ) )
335 ||
count( $request->getValueNames( [ 'action', 'title' ] ) )
336 ||
!Hooks
::run( 'TestCanonicalRedirect', [ $request, $title, $output ] )
341 if ( $title->isSpecialPage() ) {
342 list( $name, $subpage ) = MediaWikiServices
::getInstance()->getSpecialPageFactory()->
343 resolveAlias( $title->getDBkey() );
345 $title = SpecialPage
::getTitleFor( $name, $subpage );
348 // Redirect to canonical url, make it a 301 to allow caching
349 $targetUrl = wfExpandUrl( $title->getFullURL(), PROTO_CURRENT
);
350 if ( $targetUrl == $request->getFullRequestURL() ) {
351 $message = "Redirect loop detected!\n\n" .
352 "This means the wiki got confused about what page was " .
353 "requested; this sometimes happens when moving a wiki " .
354 "to a new server or changing the server configuration.\n\n";
356 if ( $this->config
->get( 'UsePathInfo' ) ) {
357 $message .= "The wiki is trying to interpret the page " .
358 "title from the URL path portion (PATH_INFO), which " .
359 "sometimes fails depending on the web server. Try " .
360 "setting \"\$wgUsePathInfo = false;\" in your " .
361 "LocalSettings.php, or check that \$wgArticlePath " .
364 $message .= "Your web server was detected as possibly not " .
365 "supporting URL path components (PATH_INFO) correctly; " .
366 "check your LocalSettings.php for a customized " .
367 "\$wgArticlePath setting and/or toggle \$wgUsePathInfo " .
370 throw new HttpError( 500, $message );
372 $output->setCdnMaxage( 1200 );
373 $output->redirect( $targetUrl, '301' );
378 * Initialize the main Article object for "standard" actions (view, etc)
379 * Create an Article object for the page, following redirects if needed.
381 * @return Article|string An Article, or a string to redirect to another URL
383 private function initializeArticle() {
384 $title = $this->context
->getTitle();
385 if ( $this->context
->canUseWikiPage() ) {
386 // Try to use request context wiki page, as there
387 // is already data from db saved in per process
388 // cache there from this->getAction() call.
389 $page = $this->context
->getWikiPage();
391 // This case should not happen, but just in case.
392 // @TODO: remove this or use an exception
393 $page = WikiPage
::factory( $title );
394 $this->context
->setWikiPage( $page );
395 wfWarn( "RequestContext::canUseWikiPage() returned false" );
398 // Make GUI wrapper for the WikiPage
399 $article = Article
::newFromWikiPage( $page, $this->context
);
401 // Skip some unnecessary code if the content model doesn't support redirects
402 if ( !ContentHandler
::getForTitle( $title )->supportsRedirects() ) {
406 $request = $this->context
->getRequest();
408 // Namespace might change when using redirects
409 // Check for redirects ...
410 $action = $request->getVal( 'action', 'view' );
411 $file = ( $page instanceof WikiFilePage
) ?
$page->getFile() : null;
412 if ( ( $action == 'view' ||
$action == 'render' ) // ... for actions that show content
413 && !$request->getVal( 'oldid' ) // ... and are not old revisions
414 && !$request->getVal( 'diff' ) // ... and not when showing diff
415 && $request->getVal( 'redirect' ) != 'no' // ... unless explicitly told not to
416 // ... and the article is not a non-redirect image page with associated file
417 && !( is_object( $file ) && $file->exists() && !$file->getRedirected() )
419 // Give extensions a change to ignore/handle redirects as needed
420 $ignoreRedirect = $target = false;
422 Hooks
::run( 'InitializeArticleMaybeRedirect',
423 [ &$title, &$request, &$ignoreRedirect, &$target, &$article ] );
424 $page = $article->getPage(); // reflect any hook changes
426 // Follow redirects only for... redirects.
427 // If $target is set, then a hook wanted to redirect.
428 if ( !$ignoreRedirect && ( $target ||
$page->isRedirect() ) ) {
429 // Is the target already set by an extension?
430 $target = $target ?
: $page->followRedirect();
431 if ( is_string( $target ) && !$this->config
->get( 'DisableHardRedirects' ) ) {
432 // we'll need to redirect
435 if ( is_object( $target ) ) {
436 // Rewrite environment to redirected article
437 $rpage = WikiPage
::factory( $target );
438 $rpage->loadPageData();
439 if ( $rpage->exists() ||
( is_object( $file ) && !$file->isLocal() ) ) {
440 $rarticle = Article
::newFromWikiPage( $rpage, $this->context
);
441 $rarticle->setRedirectedFrom( $title );
443 $article = $rarticle;
444 $this->context
->setTitle( $target );
445 $this->context
->setWikiPage( $article->getPage() );
449 // Article may have been changed by hook
450 $this->context
->setTitle( $article->getTitle() );
451 $this->context
->setWikiPage( $article->getPage() );
459 * Perform one of the "standard" actions
462 * @param Title $requestTitle The original title, before any redirects were applied
464 private function performAction( Page
$page, Title
$requestTitle ) {
465 $request = $this->context
->getRequest();
466 $output = $this->context
->getOutput();
467 $title = $this->context
->getTitle();
468 $user = $this->context
->getUser();
470 if ( !Hooks
::run( 'MediaWikiPerformAction',
471 [ $output, $page, $title, $user, $request, $this ] )
476 $act = $this->getAction();
477 $action = Action
::factory( $act, $page, $this->context
);
479 if ( $action instanceof Action
) {
480 // Narrow DB query expectations for this HTTP request
481 $trxLimits = $this->config
->get( 'TrxProfilerLimits' );
482 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
483 if ( $request->wasPosted() && !$action->doesWrites() ) {
484 $trxProfiler->setExpectations( $trxLimits['POST-nonwrite'], __METHOD__
);
485 $request->markAsSafeRequest();
488 # Let CDN cache things if we can purge them.
489 if ( $this->config
->get( 'UseCdn' ) &&
491 // Use PROTO_INTERNAL because that's what getCdnUrls() uses
492 wfExpandUrl( $request->getRequestURL(), PROTO_INTERNAL
),
493 $requestTitle->getCdnUrls()
496 $output->setCdnMaxage( $this->config
->get( 'CdnMaxAge' ) );
503 // If we've not found out which action it is by now, it's unknown
504 $output->setStatusCode( 404 );
505 $output->showErrorPage( 'nosuchaction', 'nosuchactiontext' );
509 * Run the current MediaWiki instance; index.php just calls this
511 public function run() {
513 $this->setDBProfilingAgent();
516 } catch ( ErrorPageError
$e ) {
517 // T64091: while exceptions are convenient to bubble up GUI errors,
518 // they are not internal application faults. As with normal requests, this
519 // should commit, print the output, do deferred updates, jobs, and profiling.
520 $this->doPreOutputCommit();
521 $e->report(); // display the GUI error
523 } catch ( Exception
$e ) {
524 $context = $this->context
;
525 $action = $context->getRequest()->getVal( 'action', 'view' );
527 $e instanceof DBConnectionError
&&
528 $context->hasTitle() &&
529 $context->getTitle()->canExist() &&
530 in_array( $action, [ 'view', 'history' ], true ) &&
531 HTMLFileCache
::useFileCache( $this->context
, HTMLFileCache
::MODE_OUTAGE
)
533 // Try to use any (even stale) file during outages...
534 $cache = new HTMLFileCache( $context->getTitle(), $action );
535 if ( $cache->isCached() ) {
536 $cache->loadFromFileCache( $context, HTMLFileCache
::MODE_OUTAGE
);
537 print MWExceptionRenderer
::getHTML( $e );
542 MWExceptionHandler
::handleException( $e );
543 } catch ( Error
$e ) {
544 // Type errors and such: at least handle it now and clean up the LBFactory state
545 MWExceptionHandler
::handleException( $e );
548 $this->doPostOutputShutdown( 'normal' );
551 private function setDBProfilingAgent() {
552 $services = MediaWikiServices
::getInstance();
553 // Add a comment for easy SHOW PROCESSLIST interpretation
554 $name = $this->context
->getUser()->getName();
555 $services->getDBLoadBalancerFactory()->setAgentName(
556 mb_strlen( $name ) > 15 ?
mb_substr( $name, 0, 15 ) . '...' : $name
561 * @see MediaWiki::preOutputCommit()
562 * @param callable|null $postCommitWork [default: null]
565 public function doPreOutputCommit( callable
$postCommitWork = null ) {
566 self
::preOutputCommit( $this->context
, $postCommitWork );
570 * This function commits all DB and session changes as needed *before* the
571 * client can receive a response (in case DB commit fails) and thus also before
572 * the response can trigger a subsequent related request by the client
574 * If there is a significant amount of content to flush, it can be done in $postCommitWork
576 * @param IContextSource $context
577 * @param callable|null $postCommitWork [default: null]
580 public static function preOutputCommit(
581 IContextSource
$context, callable
$postCommitWork = null
583 $config = $context->getConfig();
584 $request = $context->getRequest();
585 $output = $context->getOutput();
586 $lbFactory = MediaWikiServices
::getInstance()->getDBLoadBalancerFactory();
588 // Try to make sure that all RDBMs, session, and other storage updates complete
589 ignore_user_abort( true );
591 // Commit all RDBMs changes from the main transaction round
592 $lbFactory->commitMasterChanges(
594 // Abort if any transaction was too big
595 [ 'maxWriteDuration' => $config->get( 'MaxUserDBWriteDuration' ) ]
597 wfDebug( __METHOD__
. ': primary transaction round committed' );
599 // Run updates that need to block the client or affect output (this is the last chance)
600 DeferredUpdates
::doUpdates( 'run', DeferredUpdates
::PRESEND
);
601 wfDebug( __METHOD__
. ': pre-send deferred updates completed' );
602 // Persist the session to avoid race conditions on subsequent requests by the client
603 $request->getSession()->save(); // T214471
604 wfDebug( __METHOD__
. ': session changes committed' );
606 // Figure out whether to wait for DB replication now or to use some method that assures
607 // that subsequent requests by the client will use the DB replication positions written
608 // during the shutdown() call below; the later requires working around replication lag
609 // of the store containing DB replication positions (e.g. dynomite, mcrouter).
610 list( $flags, $strategy ) = self
::getChronProtStrategy( $lbFactory, $output );
611 // Record ChronologyProtector positions for DBs affected in this request at this point
614 $lbFactory->shutdown( $flags, $postCommitWork, $cpIndex, $cpClientId );
615 wfDebug( __METHOD__
. ': LBFactory shutdown completed' );
617 $allowHeaders = !( $output->isDisabled() ||
headers_sent() );
618 if ( $cpIndex > 0 ) {
619 if ( $allowHeaders ) {
621 $expires = $now + ChronologyProtector
::POSITION_COOKIE_TTL
;
622 $options = [ 'prefix' => '' ];
623 $value = $lbFactory::makeCookieValueFromCPIndex( $cpIndex, $now, $cpClientId );
624 $request->response()->setCookie( 'cpPosIndex', $value, $expires, $options );
627 if ( $strategy === 'cookie+url' ) {
628 if ( $output->getRedirect() ) { // sanity
629 $safeUrl = $lbFactory->appendShutdownCPIndexAsQuery(
630 $output->getRedirect(),
633 $output->redirect( $safeUrl );
635 $e = new LogicException( "No redirect; cannot append cpPosIndex parameter." );
636 MWExceptionHandler
::logException( $e );
641 if ( $allowHeaders ) {
642 // Set a cookie to tell all CDN edge nodes to "stick" the user to the DC that
643 // handles this POST request (e.g. the "master" data center). Also have the user
644 // briefly bypass CDN so ChronologyProtector works for cacheable URLs.
645 if ( $request->wasPosted() && $lbFactory->hasOrMadeRecentMasterChanges() ) {
646 $expires = time() +
$config->get( 'DataCenterUpdateStickTTL' );
647 $options = [ 'prefix' => '' ];
648 $request->response()->setCookie( 'UseDC', 'master', $expires, $options );
649 $request->response()->setCookie( 'UseCDNCache', 'false', $expires, $options );
652 // Avoid letting a few seconds of replica DB lag cause a month of stale data.
653 // This logic is also intimately related to the value of $wgCdnReboundPurgeDelay.
654 if ( $lbFactory->laggedReplicaUsed() ) {
655 $maxAge = $config->get( 'CdnMaxageLagged' );
656 $output->lowerCdnMaxage( $maxAge );
657 $request->response()->header( "X-Database-Lagged: true" );
658 wfDebugLog( 'replication',
659 "Lagged DB used; CDN cache TTL limited to $maxAge seconds" );
662 // Avoid long-term cache pollution due to message cache rebuild timeouts (T133069)
663 if ( MessageCache
::singleton()->isDisabled() ) {
664 $maxAge = $config->get( 'CdnMaxageSubstitute' );
665 $output->lowerCdnMaxage( $maxAge );
666 $request->response()->header( "X-Response-Substitute: true" );
672 * @param ILBFactory $lbFactory
673 * @param OutputPage $output
676 private static function getChronProtStrategy( ILBFactory
$lbFactory, OutputPage
$output ) {
677 // Should the client return, their request should observe the new ChronologyProtector
678 // DB positions. This request might be on a foreign wiki domain, so synchronously update
679 // the DB positions in all datacenters to be safe. If this output is not a redirect,
680 // then OutputPage::output() will be relatively slow, meaning that running it in
681 // $postCommitWork should help mask the latency of those updates.
682 $flags = $lbFactory::SHUTDOWN_CHRONPROT_SYNC
;
683 $strategy = 'cookie+sync';
685 $allowHeaders = !( $output->isDisabled() ||
headers_sent() );
686 if ( $output->getRedirect() && $lbFactory->hasOrMadeRecentMasterChanges( INF
) ) {
687 // OutputPage::output() will be fast, so $postCommitWork is useless for masking
688 // the latency of synchronously updating the DB positions in all datacenters.
689 // Try to make use of the time the client spends following redirects instead.
690 $domainDistance = self
::getUrlDomainDistance( $output->getRedirect() );
691 if ( $domainDistance === 'local' && $allowHeaders ) {
692 $flags = $lbFactory::SHUTDOWN_CHRONPROT_ASYNC
;
693 $strategy = 'cookie'; // use same-domain cookie and keep the URL uncluttered
694 } elseif ( $domainDistance === 'remote' ) {
695 $flags = $lbFactory::SHUTDOWN_CHRONPROT_ASYNC
;
696 $strategy = 'cookie+url'; // cross-domain cookie might not work
700 return [ $flags, $strategy ];
705 * @return string Either "local", "remote" if in the farm, "external" otherwise
707 private static function getUrlDomainDistance( $url ) {
708 $clusterWiki = WikiMap
::getWikiFromUrl( $url );
709 if ( WikiMap
::isCurrentWikiId( $clusterWiki ) ) {
710 return 'local'; // the current wiki
712 if ( $clusterWiki !== false ) {
713 return 'remote'; // another wiki in this cluster/farm
720 * This function does work that can be done *after* the
721 * user gets the HTTP response so they don't block on it
723 * This manages deferred updates, job insertion,
724 * final commit, and the logging of profiling data
726 * @param string $mode Use 'fast' to always skip job running
729 public function doPostOutputShutdown( $mode = 'normal' ) {
730 // Record backend request timing
731 $timing = $this->context
->getTiming();
732 $timing->mark( 'requestShutdown' );
734 // Perform the last synchronous operations...
736 // Show visible profiling data if enabled (which cannot be post-send)
737 Profiler
::instance()->logDataPageOutputOnly();
738 } catch ( Exception
$e ) {
739 // An error may already have been shown in run(), so just log it to be safe
740 MWExceptionHandler
::rollbackMasterChangesAndLog( $e );
743 // Disable WebResponse setters for post-send processing (T191537).
744 WebResponse
::disableForPostSend();
746 $blocksHttpClient = true;
747 // Defer everything else if possible...
748 $callback = function () use ( $mode, &$blocksHttpClient ) {
750 $this->restInPeace( $mode, $blocksHttpClient );
751 } catch ( Exception
$e ) {
752 // If this is post-send, then displaying errors can cause broken HTML
753 MWExceptionHandler
::rollbackMasterChangesAndLog( $e );
757 if ( function_exists( 'register_postsend_function' ) ) {
758 // https://github.com/facebook/hhvm/issues/1230
759 register_postsend_function( $callback );
760 /** @noinspection PhpUnusedLocalVariableInspection */
761 $blocksHttpClient = false;
763 if ( function_exists( 'fastcgi_finish_request' ) ) {
764 fastcgi_finish_request();
765 /** @noinspection PhpUnusedLocalVariableInspection */
766 $blocksHttpClient = false;
768 // Either all DB and deferred updates should happen or none.
769 // The latter should not be cancelled due to client disconnect.
770 ignore_user_abort( true );
777 private function main() {
780 $output = $this->context
->getOutput();
781 $request = $this->context
->getRequest();
783 // Send Ajax requests to the Ajax dispatcher.
784 if ( $request->getVal( 'action' ) === 'ajax' ) {
785 // Set a dummy title, because $wgTitle == null might break things
786 $title = Title
::makeTitle( NS_SPECIAL
, 'Badtitle/performing an AJAX call in '
789 $this->context
->setTitle( $title );
792 $dispatcher = new AjaxDispatcher( $this->config
);
793 $dispatcher->performAction( $this->context
->getUser() );
798 // Get title from request parameters,
799 // is set on the fly by parseTitle the first time.
800 $title = $this->getTitle();
801 $action = $this->getAction();
804 // Set DB query expectations for this HTTP request
805 $trxLimits = $this->config
->get( 'TrxProfilerLimits' );
806 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
807 $trxProfiler->setLogger( LoggerFactory
::getInstance( 'DBPerformance' ) );
808 if ( $request->hasSafeMethod() ) {
809 $trxProfiler->setExpectations( $trxLimits['GET'], __METHOD__
);
811 $trxProfiler->setExpectations( $trxLimits['POST'], __METHOD__
);
814 // If the user has forceHTTPS set to true, or if the user
815 // is in a group requiring HTTPS, or if they have the HTTPS
816 // preference set, redirect them to HTTPS.
817 // Note: Do this after $wgTitle is setup, otherwise the hooks run from
818 // isLoggedIn() will do all sorts of weird stuff.
820 $request->getProtocol() == 'http' &&
821 // switch to HTTPS only when supported by the server
822 preg_match( '#^https://#', wfExpandUrl( $request->getRequestURL(), PROTO_HTTPS
) ) &&
824 $request->getSession()->shouldForceHTTPS() ||
825 // Check the cookie manually, for paranoia
826 $request->getCookie( 'forceHTTPS', '' ) ||
827 // check for prefixed version that was used for a time in older MW versions
828 $request->getCookie( 'forceHTTPS' ) ||
829 // Avoid checking the user and groups unless it's enabled.
831 $this->context
->getUser()->isLoggedIn()
832 && $this->context
->getUser()->requiresHTTPS()
836 $oldUrl = $request->getFullRequestURL();
837 $redirUrl = preg_replace( '#^http://#', 'https://', $oldUrl );
839 // ATTENTION: This hook is likely to be removed soon due to overall design of the system.
840 if ( Hooks
::run( 'BeforeHttpsRedirect', [ $this->context
, &$redirUrl ] ) ) {
841 if ( $request->wasPosted() ) {
842 // This is weird and we'd hope it almost never happens. This
843 // means that a POST came in via HTTP and policy requires us
844 // redirecting to HTTPS. It's likely such a request is going
845 // to fail due to post data being lost, but let's try anyway
846 // and just log the instance.
848 // @todo FIXME: See if we could issue a 307 or 308 here, need
849 // to see how clients (automated & browser) behave when we do
850 wfDebugLog( 'RedirectedPosts', "Redirected from HTTP to HTTPS: $oldUrl" );
852 // Setup dummy Title, otherwise OutputPage::redirect will fail
853 $title = Title
::newFromText( 'REDIR', NS_MAIN
);
854 $this->context
->setTitle( $title );
855 // Since we only do this redir to change proto, always send a vary header
856 $output->addVaryHeader( 'X-Forwarded-Proto' );
857 $output->redirect( $redirUrl );
864 if ( $title->canExist() && HTMLFileCache
::useFileCache( $this->context
) ) {
865 // Try low-level file cache hit
866 $cache = new HTMLFileCache( $title, $action );
867 if ( $cache->isCacheGood( /* Assume up to date */ ) ) {
868 // Check incoming headers to see if client has this cached
869 $timestamp = $cache->cacheTimestamp();
870 if ( !$output->checkLastModified( $timestamp ) ) {
871 $cache->loadFromFileCache( $this->context
);
873 // Do any stats increment/watchlist stuff, assuming user is viewing the
874 // latest revision (which should always be the case for file cache)
875 $this->context
->getWikiPage()->doViewUpdates( $this->context
->getUser() );
876 // Tell OutputPage that output is taken care of
883 // Actually do the work of the request and build up any output
884 $this->performRequest();
886 // GUI-ify and stash the page output in MediaWiki::doPreOutputCommit() while
887 // ChronologyProtector synchronizes DB positions or replicas across all datacenters.
889 $outputWork = function () use ( $output, &$buffer ) {
890 if ( $buffer === null ) {
891 $buffer = $output->output( true );
897 // Now commit any transactions, so that unreported errors after
898 // output() don't roll back the whole DB transaction and so that
899 // we avoid having both success and error text in the response
900 $this->doPreOutputCommit( $outputWork );
902 // Now send the actual output
907 * Ends this task peacefully
908 * @param string $mode Use 'fast' to always skip job running
909 * @param bool $blocksHttpClient Whether this blocks an HTTP response to a client
911 public function restInPeace( $mode = 'fast', $blocksHttpClient = true ) {
912 $lbFactory = MediaWikiServices
::getInstance()->getDBLoadBalancerFactory();
913 // Assure deferred updates are not in the main transaction
914 $lbFactory->commitMasterChanges( __METHOD__
);
916 // Loosen DB query expectations since the HTTP client is unblocked
917 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
918 $trxProfiler->redefineExpectations(
919 $this->context
->getRequest()->hasSafeMethod()
920 ?
$this->config
->get( 'TrxProfilerLimits' )['PostSend-GET']
921 : $this->config
->get( 'TrxProfilerLimits' )['PostSend-POST'],
925 // Do any deferred jobs; preferring to run them now if a client will not wait on them
926 DeferredUpdates
::doUpdates( $blocksHttpClient ?
'enqueue' : 'run' );
928 // Now that everything specific to this request is done,
929 // try to occasionally run jobs (if enabled) from the queues
930 if ( $mode === 'normal' ) {
931 $this->triggerJobs();
934 // Log profiling data, e.g. in the database or UDP
935 wfLogProfilingData();
937 // Commit and close up!
938 $lbFactory->commitMasterChanges( __METHOD__
);
939 $lbFactory->shutdown( $lbFactory::SHUTDOWN_NO_CHRONPROT
);
941 wfDebug( "Request ended normally\n" );
945 * Send out any buffered statsd data according to sampling rules
947 * @param IBufferingStatsdDataFactory $stats
948 * @param Config $config
949 * @throws ConfigException
952 public static function emitBufferedStatsdData(
953 IBufferingStatsdDataFactory
$stats, Config
$config
955 if ( $config->get( 'StatsdServer' ) && $stats->hasData() ) {
957 $statsdServer = explode( ':', $config->get( 'StatsdServer' ), 2 );
958 $statsdHost = $statsdServer[0];
959 $statsdPort = $statsdServer[1] ??
8125;
960 $statsdSender = new SocketSender( $statsdHost, $statsdPort );
961 $statsdClient = new SamplingStatsdClient( $statsdSender, true, false );
962 $statsdClient->setSamplingRates( $config->get( 'StatsdSamplingRates' ) );
963 $statsdClient->send( $stats->getData() );
965 $stats->clearData(); // empty buffer for the next round
966 } catch ( Exception
$ex ) {
967 MWExceptionHandler
::logException( $ex );
973 * Potentially open a socket and sent an HTTP request back to the server
974 * to run a specified number of jobs. This registers a callback to cleanup
975 * the socket once it's done.
977 public function triggerJobs() {
978 $jobRunRate = $this->config
->get( 'JobRunRate' );
979 if ( $this->getTitle()->isSpecial( 'RunJobs' ) ) {
980 return; // recursion guard
981 } elseif ( $jobRunRate <= 0 ||
wfReadOnly() ) {
985 if ( $jobRunRate < 1 ) {
986 $max = mt_getrandmax();
987 if ( mt_rand( 0, $max ) > $max * $jobRunRate ) {
988 return; // the higher the job run rate, the less likely we return here
992 $n = intval( $jobRunRate );
995 $logger = LoggerFactory
::getInstance( 'runJobs' );
998 if ( $this->config
->get( 'RunJobsAsync' ) ) {
999 // Send an HTTP request to the job RPC entry point if possible
1000 $invokedWithSuccess = $this->triggerAsyncJobs( $n, $logger );
1001 if ( !$invokedWithSuccess ) {
1002 // Fall back to blocking on running the job(s)
1003 $logger->warning( "Jobs switched to blocking; Special:RunJobs disabled" );
1004 $this->triggerSyncJobs( $n, $logger );
1007 $this->triggerSyncJobs( $n, $logger );
1009 } catch ( JobQueueError
$e ) {
1010 // Do not make the site unavailable (T88312)
1011 MWExceptionHandler
::logException( $e );
1016 * @param int $n Number of jobs to try to run
1017 * @param LoggerInterface $runJobsLogger
1019 private function triggerSyncJobs( $n, LoggerInterface
$runJobsLogger ) {
1020 $trxProfiler = Profiler
::instance()->getTransactionProfiler();
1021 $old = $trxProfiler->setSilenced( true );
1023 $runner = new JobRunner( $runJobsLogger );
1024 $runner->run( [ 'maxJobs' => $n ] );
1026 $trxProfiler->setSilenced( $old );
1031 * @param int $n Number of jobs to try to run
1032 * @param LoggerInterface $runJobsLogger
1033 * @return bool Success
1035 private function triggerAsyncJobs( $n, LoggerInterface
$runJobsLogger ) {
1036 // Do not send request if there are probably no jobs
1037 $group = JobQueueGroup
::singleton();
1038 if ( !$group->queuesHaveJobs( JobQueueGroup
::TYPE_DEFAULT
) ) {
1042 $query = [ 'title' => 'Special:RunJobs',
1043 'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() +
5 ];
1044 $query['signature'] = SpecialRunJobs
::getQuerySignature(
1045 $query, $this->config
->get( 'SecretKey' ) );
1047 $errno = $errstr = null;
1048 $info = wfParseUrl( $this->config
->get( 'CanonicalServer' ) );
1049 $host = $info ?
$info['host'] : null;
1051 if ( isset( $info['scheme'] ) && $info['scheme'] == 'https' ) {
1052 $host = "tls://" . $host;
1055 if ( isset( $info['port'] ) ) {
1056 $port = $info['port'];
1059 Wikimedia\
suppressWarnings();
1060 $sock = $host ?
fsockopen(
1065 // If it takes more than 100ms to connect to ourselves there is a problem...
1068 Wikimedia\restoreWarnings
();
1070 $invokedWithSuccess = true;
1072 $special = MediaWikiServices
::getInstance()->getSpecialPageFactory()->
1073 getPage( 'RunJobs' );
1074 $url = $special->getPageTitle()->getCanonicalURL( $query );
1076 "POST $url HTTP/1.1\r\n" .
1077 "Host: {$info['host']}\r\n" .
1078 "Connection: Close\r\n" .
1079 "Content-Length: 0\r\n\r\n"
1082 $runJobsLogger->info( "Running $n job(s) via '$url'" );
1083 // Send a cron API request to be performed in the background.
1084 // Give up if this takes too long to send (which should be rare).
1085 stream_set_timeout( $sock, 2 );
1086 $bytes = fwrite( $sock, $req );
1087 if ( $bytes !== strlen( $req ) ) {
1088 $invokedWithSuccess = false;
1089 $runJobsLogger->error( "Failed to start cron API (socket write error)" );
1091 // Do not wait for the response (the script should handle client aborts).
1092 // Make sure that we don't close before that script reaches ignore_user_abort().
1093 $start = microtime( true );
1094 $status = fgets( $sock );
1095 $sec = microtime( true ) - $start;
1096 if ( !preg_match( '#^HTTP/\d\.\d 202 #', $status ) ) {
1097 $invokedWithSuccess = false;
1098 $runJobsLogger->error( "Failed to start cron API: received '$status' ($sec)" );
1103 $invokedWithSuccess = false;
1104 $runJobsLogger->error( "Failed to start cron API (socket error $errno): $errstr" );
1107 return $invokedWithSuccess;