Make DjVu metadata be stored as serialized PHP array.
[lhc/web/wiklou.git] / includes / Wiki.php
index d78d7cb..fbafba8 100644 (file)
@@ -587,6 +587,9 @@ class MediaWiki {
                // Actually do the work of the request and build up any output
                $this->performRequest();
 
+               // Either all DB and deferred updates should happen or none.
+               // The later should not be cancelled due to client disconnect.
+               ignore_user_abort( true );
                // Now commit any transactions, so that unreported errors after
                // output() don't roll back the whole DB transaction
                wfGetLBFactory()->commitMasterChanges();
@@ -621,10 +624,12 @@ class MediaWiki {
         * the socket once it's done.
         */
        protected function triggerJobs() {
-               global $wgJobRunRate, $wgServer, $wgScriptPath, $wgScriptExtension, $wgEnableAPI;
+               global $wgJobRunRate, $wgServer, $wgRunJobsAsync;
 
                if ( $wgJobRunRate <= 0 || wfReadOnly() ) {
                        return;
+               } elseif ( $this->getTitle()->isSpecial( 'RunJobs' ) ) {
+                       return; // recursion guard
                }
 
                $section = new ProfileSection( __METHOD__ );
@@ -639,16 +644,21 @@ class MediaWiki {
                        $n = intval( $wgJobRunRate );
                }
 
-               $query = array( 'action' => 'runjobs',
-                       'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5 );
-               $query['signature'] = ApiRunJobs::getQuerySignature( $query );
-
-               if ( !$wgEnableAPI ) {
-                       // Fall back to running the job here while the user waits
-                       ApiRunJobs::executeJobs( $n );
+               if ( !$wgRunJobsAsync ) {
+                       // If running jobs asynchronously has been disabled, run the job here
+                       // while the user waits
+                       SpecialRunJobs::executeJobs( $n );
                        return;
                }
 
+               if ( !JobQueueGroup::singleton()->queuesHaveJobs( JobQueueGroup::TYPE_DEFAULT ) ) {
+                       return; // do not send request if there are probably no jobs
+               }
+
+               $query = array( 'title' => 'Special:RunJobs',
+                       'tasks' => 'jobs', 'maxjobs' => $n, 'sigexpiry' => time() + 5 );
+               $query['signature'] = SpecialRunJobs::getQuerySignature( $query );
+
                $errno = $errstr = null;
                $info = wfParseUrl( $wgServer );
                wfSuppressWarnings();
@@ -656,17 +666,20 @@ class MediaWiki {
                        $info['host'],
                        isset( $info['port'] ) ? $info['port'] : 80,
                        $errno,
-                       $errstr
+                       $errstr,
+                       // If it takes more than 100ms to connect to ourselves there
+                       // is a problem elsewhere.
+                       0.1
                );
                wfRestoreWarnings();
                if ( !$sock ) {
                        wfDebugLog( 'runJobs', "Failed to start cron API (socket error $errno): $errstr\n" );
                        // Fall back to running the job here while the user waits
-                       ApiRunJobs::executeJobs( $n );
+                       SpecialRunJobs::executeJobs( $n );
                        return;
                }
 
-               $url = wfAppendQuery( "{$wgScriptPath}/api{$wgScriptExtension}", $query );
+               $url = wfAppendQuery( wfScript( 'index' ), $query );
                $req = "POST $url HTTP/1.1\r\nHost: {$info['host']}\r\nConnection: Close\r\n\r\n";
 
                wfDebugLog( 'runJobs', "Running $n job(s) via '$url'\n" );