X-Git-Url: http://git.cyclocoop.org/?a=blobdiff_plain;f=index.php;h=1d229b0252ef7bdab005c804b9e8aabaf2b7ef55;hb=f44391e2af4a686e1d49b38c98107feef5436f43;hp=5500eb5537a406b07c6e551864bf3d17c61ae4f6;hpb=da73d9192f1512ea7b79a0ca4797926f22b21e76;p=lhc%2Fweb%2Fwiklou.git diff --git a/index.php b/index.php index 5500eb5537..1d229b0252 100644 --- a/index.php +++ b/index.php @@ -1,202 +1,106 @@ $GLOBALS overwrite vulnerability'); -} - -# Valid web server entry point, enable includes. -# Please don't move this line to includes/Defines.php. This line essentially defines -# a valid entry point. If you put it in includes/Defines.php, then any script that includes -# it becomes an entry point, thereby defeating its purpose. -define( 'MEDIAWIKI', true ); -require_once( './includes/Defines.php' ); - -if( !file_exists( 'LocalSettings.php' ) ) { - $IP = "." ; - require_once( 'includes/DefaultSettings.php' ); # used for printing the version -?> - - - - MediaWiki <?php echo $wgVersion ?> - - - - - The MediaWiki logo - -

MediaWiki

-
- config/LocalSettings.php to the parent directory." ); - } else { - echo( "Please setup the wiki first." ); - } - ?> -
- - -getVal( 'action', 'view' ); -$title = $wgRequest->getVal( 'title' ); - -if ($wgRequest->getVal( 'printable' ) == 'yes') { - $wgOut->setPrintable(); +$maxLag = $wgRequest->getVal( 'maxlag' ); +if( !is_null( $maxLag ) && !$mediaWiki->checkMaxLag( $maxLag ) ) { + exit; } -if ( '' == $title && 'delete' != $action ) { - $wgTitle = Title::newFromText( wfMsgForContent( 'mainpage' ) ); -} elseif ( $curid = $wgRequest->getInt( 'curid' ) ) { - # URLs like this are generated by RC, because rc_title isn't always accurate - $wgTitle = Title::newFromID( $curid ); -} else { - $wgTitle = Title::newFromURL( $title ); - /* check variant links so that interwiki links don't have to worry about - the possible different language variants - */ - if( count($wgContLang->getVariants()) > 1 && !is_null($wgTitle) && $wgTitle->getArticleID() == 0 ) - $wgContLang->findVariantLink( $title, $wgTitle ); +# Set title from request parameters +$wgTitle = $mediaWiki->checkInitialQueries( $wgRequest ); -} wfProfileOut( 'main-misc-setup' ); -# Debug statement for user levels -// print_r($wgUser); - -$search = $wgRequest->getText( 'search' ); -if( !is_null( $search ) && $search !== '' ) { - // Compatibility with old search URLs which didn't use Special:Search - // Do this above the read whitelist check for security... - $wgTitle = Title::makeTitle( NS_SPECIAL, 'Search' ); -} +$action = $wgRequest->getVal( 'action' ); -# If the user is not logged in, the Namespace:title of the article must be in -# the Read array in order for the user to see it. (We have to check here to -# catch special pages etc. We check again in Article::view()) -if ( !is_null( $wgTitle ) && !$wgTitle->userCanRead() ) { - $wgOut->loginToUse(); - $wgOut->output(); +# Send Ajax requests to the Ajax dispatcher. +if( $wgUseAjax && $action == 'ajax' ) { + $dispatcher = new AjaxDispatcher(); + $dispatcher->performAction(); + $mediaWiki->restInPeace(); exit; } -wfProfileIn( 'main-action' ); - -require_once( "includes/Wiki.php" ) ; -$mediaWiki = new MediaWiki() ; - -if( !$wgDisableInternalSearch && !is_null( $search ) && $search !== '' ) { - require_once( 'includes/SpecialSearch.php' ); - $wgTitle = Title::makeTitle( NS_SPECIAL, 'Search' ); - wfSpecialSearch(); -} else if( !$wgTitle or $wgTitle->getDBkey() == '' ) { - $wgTitle = Title::newFromText( wfMsgForContent( 'badtitle' ) ); - $wgOut->errorpage( 'badtitle', 'badtitletext' ); -} else if ( $wgTitle->getInterwiki() != '' ) { - if( $rdfrom = $wgRequest->getVal( 'rdfrom' ) ) { - $url = $wgTitle->getFullURL( 'rdfrom=' . urlencode( $rdfrom ) ); - } else { - $url = $wgTitle->getFullURL(); - } - # Check for a redirect loop - if ( !preg_match( '/^' . preg_quote( $wgServer, '/' ) . '/', $url ) && $wgTitle->isLocal() ) { - $wgOut->redirect( $url ); - } else { - $wgTitle = Title::newFromText( wfMsgForContent( 'badtitle' ) ); - $wgOut->errorpage( 'badtitle', 'badtitletext' ); - } -} else if ( ( $action == 'view' ) && - (!isset( $_GET['title'] ) || $wgTitle->getPrefixedDBKey() != $_GET['title'] ) && - !count( array_diff( array_keys( $_GET ), array( 'action', 'title' ) ) ) ) -{ - /* redirect to canonical url, make it a 301 to allow caching */ - $wgOut->setSquidMaxage( 1200 ); - $wgOut->redirect( $wgTitle->getFullURL(), '301'); -} else if ( $mediaWiki->initializeSpecialCases( $wgTitle ) ) { - # Do nothing, everything was already done by $mediaWiki - -} else { - - - $wgArticle =& $mediaWiki->initializeArticle( $wgTitle, $wgRequest, $action ); - - if( in_array( $action, $wgDisabledActions ) ) { - $wgOut->errorpage( 'nosuchaction', 'nosuchactiontext' ); - } else { - $mediaWiki->setVal( "SquidMaxage", $wgSquidMaxage ); - $mediaWiki->setVal( "EnableDublinCoreRdf", $wgEnableDublinCoreRdf ); - $mediaWiki->setVal( "EnableCreativeCommonsRdf", $wgEnableCreativeCommonsRdf ); - $mediaWiki->setVal( "CommandLineMode", $wgCommandLineMode ); - $mediaWiki->setVal( "UseExternalEditor", $wgUseExternalEditor ); - $mediaWiki->performAction( $action, $wgOut, $wgArticle, $wgTitle, $wgUser, $wgRequest ); +if( $wgUseFileCache && $wgTitle !== null ) { + wfProfileIn( 'main-try-filecache' ); + // Raw pages should handle cache control on their own, + // even when using file cache. This reduces hits from clients. + if( $action != 'raw' && HTMLFileCache::useFileCache() ) { + /* Try low-level file cache hit */ + $cache = new HTMLFileCache( $wgTitle, $action ); + if( $cache->isFileCacheGood( /* Assume up to date */ ) ) { + /* Check incoming headers to see if client has this cached */ + if( !$wgOut->checkLastModified( $cache->fileCacheTime() ) ) { + $cache->loadFromFileCache(); + } + # Do any stats increment/watchlist stuff + $wgArticle = MediaWiki::articleFromTitle( $wgTitle ); + $wgArticle->viewUpdates(); + # Tell $wgOut that output is taken care of + $wgOut->disable(); + wfProfileOut( 'main-try-filecache' ); + $mediaWiki->finalCleanup( $wgOut ); + $mediaWiki->restInPeace(); + exit; + } } - - -} -wfProfileOut( 'main-action' ); - -# Deferred updates aren't really deferred anymore. It's important to report errors to the -# user, and that means doing this before OutputPage::output(). Note that for page saves, -# the client will wait until the script exits anyway before following the redirect. -wfProfileIn( 'main-updates' ); -foreach( $wgDeferredUpdateList as $up ) { - $up->doUpdate(); -} -wfProfileOut( 'main-updates' ); - -wfProfileIn( 'main-cleanup' ); -$wgLoadBalancer->saveMasterPos(); - -# Now commit any transactions, so that unreported errors after output() don't roll back the whole thing -$wgLoadBalancer->commitAll(); - -$wgOut->output(); - -foreach( $wgPostCommitUpdateList as $up ) { - $up->doUpdate(); + wfProfileOut( 'main-try-filecache' ); } -wfProfileOut( 'main-cleanup' ); - -wfProfileClose(); -logProfilingData(); -$wgLoadBalancer->closeAll(); -wfDebug( "Request ended normally\n" ); -?> +# Setting global variables in mediaWiki +$mediaWiki->setVal( 'DisableHardRedirects', $wgDisableHardRedirects ); +$mediaWiki->setVal( 'EnableCreativeCommonsRdf', $wgEnableCreativeCommonsRdf ); +$mediaWiki->setVal( 'EnableDublinCoreRdf', $wgEnableDublinCoreRdf ); +$mediaWiki->setVal( 'Server', $wgServer ); +$mediaWiki->setVal( 'SquidMaxage', $wgSquidMaxage ); +$mediaWiki->setVal( 'UseExternalEditor', $wgUseExternalEditor ); +$mediaWiki->setVal( 'UsePathInfo', $wgUsePathInfo ); + +$mediaWiki->performRequestForTitle( $wgTitle, $wgArticle, $wgOut, $wgUser, $wgRequest ); +$mediaWiki->finalCleanup( $wgOut ); +$mediaWiki->restInPeace();