From 0f1224fd921aab15a81f8a3cede22012557b4d65 Mon Sep 17 00:00:00 2001 From: "Mark A. Hershberger" Date: Fri, 15 Jan 2010 05:56:57 +0000 Subject: [PATCH] follow-up r60811 clean up code, write some tests for the existing uses of HttpFunctions. Still need to test background download, proxy tests, etc. --- includes/HttpFunctions.php | 969 +++++++++++++++++++++---------------- tests/HttpTest.php | 185 +++++++ 2 files changed, 742 insertions(+), 412 deletions(-) create mode 100644 tests/HttpTest.php diff --git a/includes/HttpFunctions.php b/includes/HttpFunctions.php index adc027612f..35874e85a0 100644 --- a/includes/HttpFunctions.php +++ b/includes/HttpFunctions.php @@ -8,29 +8,23 @@ * @ingroup HTTP */ class Http { - // Syncronous download (in a single request) - const SYNC_DOWNLOAD = 1; - - // Asynchronous download ( background process with multiple requests ) - const ASYNC_DOWNLOAD = 2; - /** - * Get the contents of a file by HTTP + * Perform an HTTP request * @param $method string HTTP method. Usually GET/POST * @param $url string Full URL to act on - * @param $timeout int Seconds to timeout. 'default' falls to $wgHTTPTimeout - * @param $curlOptions array Optional array of extra params to pass - * to curl_setopt() + * @param $opts options to pass to HttpRequest object + * @returns mixed (bool)false on failure or a string on success */ public static function request( $method, $url, $opts = array() ) { - $opts['method'] = ( strtoupper( $method ) == 'GET' || strtoupper( $method ) == 'POST' ) - ? strtoupper( $method ) : null; - $req = HttpRequest::newRequest( $url, $opts ); + $opts['method'] = strtoupper( $method ); + if ( !array_key_exists( 'timeout', $opts ) ) { + $opts['timeout'] = 'default'; + } + $req = HttpRequest::factory( $url, $opts ); $status = $req->doRequest(); - if( $status->isOK() ) { - return $status->value; + if ( $status->isOK() ) { + return $req->getContent(); } else { - wfDebug( 'http error: ' . $status->getWikiText() ); return false; } } @@ -39,10 +33,8 @@ class Http { * Simple wrapper for Http::request( 'GET' ) * @see Http::request() */ - public static function get( $url, $timeout = false, $opts = array() ) { - global $wgSyncHTTPTimeout; - if( $timeout ) - $opts['timeout'] = $timeout; + public static function get( $url, $timeout = 'default', $opts = array() ) { + $opts['timeout'] = $timeout; return Http::request( 'GET', $url, $opts ); } @@ -54,24 +46,90 @@ class Http { return Http::request( 'POST', $url, $opts ); } - public static function doDownload( $url, $target_file_path, $dl_mode = self::SYNC_DOWNLOAD, - $redirectCount = 0 ) - { + /** + * Check if the URL can be served by localhost + * @param $url string Full url to check + * @return bool + */ + public static function isLocalURL( $url ) { + global $wgCommandLineMode, $wgConf; + if ( $wgCommandLineMode ) { + return false; + } + + // Extract host part + $matches = array(); + if ( preg_match( '!^http://([\w.-]+)[/:].*$!', $url, $matches ) ) { + $host = $matches[1]; + // Split up dotwise + $domainParts = explode( '.', $host ); + // Check if this domain or any superdomain is listed in $wgConf as a local virtual host + $domainParts = array_reverse( $domainParts ); + for ( $i = 0; $i < count( $domainParts ); $i++ ) { + $domainPart = $domainParts[$i]; + if ( $i == 0 ) { + $domain = $domainPart; + } else { + $domain = $domainPart . '.' . $domain; + } + if ( $wgConf->isLocalVHost( $domain ) ) { + return true; + } + } + } + return false; + } + + /** + * A standard user-agent we can use for external requests. + * @returns string + */ + public static function userAgent() { + global $wgVersion; + return "MediaWiki/$wgVersion"; + } + + /** + * Checks that the given URI is a valid one + * @param $uri Mixed: URI to check for validity + * @returns bool + */ + public static function isValidURI( $uri ) { + return preg_match( + '/(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?/', + $uri, + $matches + ); + } + /** + * Fetch a URL, write the result to a file. + * @params $url string url to fetch + * @params $targetFilePath string full path (including filename) to write the file to + * @params $async bool whether the download should be asynchronous (defaults to false) + * @params $redirectCount int used internally to keep track of the number of redirects + * + * @returns Status -- for async requests this will contain the request key + */ + public static function doDownload( $url, $targetFilePath, $async = false, $redirectCount = 0 ) { global $wgPhpCli, $wgMaxUploadSize, $wgMaxRedirects; + // do a quick check to HEAD to insure the file size is not > $wgMaxUploadSize - $headRequest = HttpRequest::newRequest( $url, array( 'headers_only' => true ) ); + $headRequest = HttpRequest::factory( $url, array( 'headersOnly' => true ) ); $headResponse = $headRequest->doRequest(); - if( !$headResponse->isOK() ) { + if ( !$headResponse->isOK() ) { return $headResponse; } $head = $headResponse->value; // check for redirects: - if( isset( $head['Location'] ) && strrpos( $head[0], '302' ) !== false ) { - if( $redirectCount < $wgMaxRedirects ) { - if( self::isValidURI( $head['Location'] ) ) { - return self::doDownload( $head['Location'], $target_file_path, - $dl_mode, $redirectCount++ ); + if ( $redirectCount < 0 ) { + $redirectCount = 0; + } + if ( isset( $head['Location'] ) && strrpos( $head[0], '302' ) !== false ) { + if ( $redirectCount < $wgMaxRedirects ) { + if ( self::isValidURI( $head['Location'] ) ) { + return self::doDownload( $head['Location'], $targetFilePath, + $async, $redirectCount++ ); } else { return Status::newFatal( 'upload-proto-error' ); } @@ -80,332 +138,355 @@ class Http { } } // we did not get a 200 ok response: - if( strrpos( $head[0], '200 OK' ) === false ) { + if ( strrpos( $head[0], '200 OK' ) === false ) { return Status::newFatal( 'upload-http-error', htmlspecialchars( $head[0] ) ); } - $content_length = ( isset( $head['Content-Length'] ) ) ? $head['Content-Length'] : null; - if( $content_length ) { - if( $content_length > $wgMaxUploadSize ) { - return Status::newFatal( 'requested file length ' . $content_length . - ' is greater than $wgMaxUploadSize: ' . $wgMaxUploadSize ); + $contentLength = $head['Content-Length']; + if ( $contentLength ) { + if ( $contentLength > $wgMaxUploadSize ) { + return Status::newFatal( 'requested file length ' . $contentLength . + ' is greater than $wgMaxUploadSize: ' . $wgMaxUploadSize ); } } // check if we can find phpCliPath (for doing a background shell request to // php to do the download: - if( $wgPhpCli && wfShellExecEnabled() && $dl_mode == self::ASYNC_DOWNLOAD ) { + if ( $async && $wgPhpCli && wfShellExecEnabled() ) { wfDebug( __METHOD__ . "\nASYNC_DOWNLOAD\n" ); - //setup session and shell call: - return self::initBackgroundDownload( $url, $target_file_path, $content_length ); + // setup session and shell call: + return self::startBackgroundRequest( $url, $targetFilePath, $contentLength ); } else { wfDebug( __METHOD__ . "\nSYNC_DOWNLOAD\n" ); // SYNC_DOWNLOAD download as much as we can in the time we have to execute $opts['method'] = 'GET'; - $opts['target_file_path'] = $target_file_path; - $req = HttpRequest::newRequest( $url, $opts ); + $opts['targetFilePath'] = $mTargetFilePath; + $req = HttpRequest::factory( $url, $opts ); return $req->doRequest(); } } /** - * a non blocking request (generally an exit point in the application) - * should write to a file location and give updates + * Start backgrounded (i.e. non blocking) request. The + * backgrounded request will provide updates to the user's session + * data. + * @param $url string the URL to download + * @param $targetFilePath string the destination for the downloaded file + * @param $contentLength int (optional) the length of the download from the HTTP header * + * @returns Status */ - private static function initBackgroundDownload( $url, $target_file_path, - $content_length = null ) - { + private static function startBackgroundRequest( $url, $targetFilePath, $contentLength = null ) { global $IP, $wgPhpCli, $wgServer; $status = Status::newGood(); - // generate a session id with all the details for the download (pid, target_file_path ) - $upload_session_key = self::getUploadSessionKey(); - $session_id = session_id(); + // generate a session id with all the details for the download (pid, targetFilePath ) + $requestKey = self::createRequestKey(); + $sessionID = session_id(); // store the url and target path: - $_SESSION['wsDownload'][$upload_session_key]['url'] = $url; - $_SESSION['wsDownload'][$upload_session_key]['target_file_path'] = $target_file_path; + $_SESSION['wsBgRequest'][$requestKey]['url'] = $url; + $_SESSION['wsBgRequest'][$requestKey]['targetFilePath'] = $targetFilePath; // since we request from the cmd line we lose the original host name pass in the session: - $_SESSION['wsDownload'][$upload_session_key]['orgServer'] = $wgServer; + $_SESSION['wsBgRequest'][$requestKey]['orgServer'] = $wgServer; - if( $content_length ) - $_SESSION['wsDownload'][$upload_session_key]['content_length'] = $content_length; + if ( $contentLength ) { + $_SESSION['wsBgRequest'][$requestKey]['contentLength'] = $contentLength; + } // set initial loaded bytes: - $_SESSION['wsDownload'][$upload_session_key]['loaded'] = 0; + $_SESSION['wsBgRequest'][$requestKey]['loaded'] = 0; // run the background download request: - $cmd = $wgPhpCli . ' ' . $IP . "/maintenance/http_session_download.php " . - "--sid {$session_id} --usk {$upload_session_key} --wiki " . wfWikiId(); + $cmd = $wgPhpCli . ' ' . $IP . "/maintenance/httpSessionDownload.php " . + "--sid {$sessionID} --usk {$requestKey} --wiki " . wfWikiId(); $pid = wfShellBackgroundExec( $cmd ); // the pid is not of much use since we won't be visiting this same apache any-time soon. - if( !$pid ) - return Status::newFatal( 'could not run background shell exec' ); + if ( !$pid ) + return Status::newFatal( 'http-could-not-background' ); - // update the status value with the $upload_session_key (for the user to - // check on the status of the upload) - $status->value = $upload_session_key; + // update the status value with the $requestKey (for the user to + // check on the status of the download) + $status->value = $requestKey; // return good status return $status; } - static function getUploadSessionKey() { - $key = mt_rand( 0, 0x7fffffff ); - $_SESSION['wsUploadData'][$key] = array(); + /** + * Returns a unique, random string that can be used as an request key and + * preloads it into the session data. + * + * @returns string + */ + static function createRequestKey() { + if ( !array_key_exists( 'wsBgRequest', $_SESSION ) ) { + $_SESSION['wsBgRequest'] = array(); + } + + $key = uniqid( 'bgrequest', true ); + + // This is probably over-defensive. + while ( array_key_exists( $key, $_SESSION['wsBgRequest'] ) ) { + $key = uniqid( 'bgrequest', true ); + } + $_SESSION['wsBgRequest'][$key] = array(); + return $key; } /** - * used to run a session based download. Is initiated via the shell. + * Recover the necessary session and request information + * @param $sessionID string + * @param $requestKey string the HTTP request key * - * @param $session_id String: the session id to grab download details from - * @param $upload_session_key String: the key of the given upload session - * (a given client could have started a few http uploads at once) + * @returns array request information */ - public static function doSessionIdDownload( $session_id, $upload_session_key ) { - global $wgUser, $wgEnableWriteAPI, $wgAsyncHTTPTimeout, $wgServer, - $wgSessionsInMemcached, $wgSessionHandler, $wgSessionStarted; - wfDebug( __METHOD__ . "\n\n doSessionIdDownload :\n\n" ); + private static function recoverSession( $sessionID, $requestKey ) { + global $wgUser, $wgServer, $wgSessionsInMemcached; + // set session to the provided key: - session_id( $session_id ); - //fire up mediaWiki session system: + session_id( $sessionID ); + // fire up mediaWiki session system: wfSetupSession(); // start the session - if( session_start() === false ) { + if ( session_start() === false ) { wfDebug( __METHOD__ . ' could not start session' ); } // get all the vars we need from session_id - if( !isset( $_SESSION[ 'wsDownload' ][$upload_session_key] ) ) { - wfDebug( __METHOD__ . ' Error:could not find upload session'); + if ( !isset( $_SESSION[ 'wsBgRequest' ][ $requestKey ] ) ) { + wfDebug( __METHOD__ . ' Error:could not find upload session' ); exit(); } // setup the global user from the session key we just inherited $wgUser = User::newFromSession(); // grab the session data to setup the request: - $sd =& $_SESSION['wsDownload'][$upload_session_key]; + $sd =& $_SESSION['wsBgRequest'][$requestKey]; // update the wgServer var ( since cmd line thinks we are localhost // when we are really orgServer) - if( isset( $sd['orgServer'] ) && $sd['orgServer'] ) { + if ( isset( $sd['orgServer'] ) && $sd['orgServer'] ) { $wgServer = $sd['orgServer']; } // close down the session so we can other http queries can get session // updates: (if not $wgSessionsInMemcached) - if( !$wgSessionsInMemcached ) + if ( !$wgSessionsInMemcached ) { session_write_close(); + } - $req = HttpRequest::newRequest( $sd['url'], array( - 'target_file_path' => $sd['target_file_path'], - 'upload_session_key'=> $upload_session_key, - 'timeout' => $wgAsyncHTTPTimeout, - 'do_close_session_update' => true - ) ); - // run the actual request .. (this can take some time) - wfDebug( __METHOD__ . 'do Session Download :: ' . $sd['url'] . ' tf: ' . - $sd['target_file_path'] . "\n\n"); - $status = $req->doRequest(); - //wfDebug("done with req status is: ". $status->isOK(). ' '.$status->getWikiText(). "\n"); + return $sd; + } - // start up the session again: - if( session_start() === false ) { - wfDebug( __METHOD__ . ' ERROR:: Could not start session'); + /** + * Update the session with the finished information. + * @param $sessionID string + * @param $requestKey string the HTTP request key + */ + private static function updateSession( $sessionID, $requestKey, $status ) { + + if ( session_start() === false ) { + wfDebug( __METHOD__ . ' ERROR:: Could not start session' ); } - // grab the updated session data pointer - $sd =& $_SESSION['wsDownload'][$upload_session_key]; - // if error update status: - if( !$status->isOK() ) { + + $sd =& $_SESSION['wsBgRequest'][$requestKey]; + if ( !$status->isOK() ) { $sd['apiUploadResult'] = FormatJson::encode( array( 'error' => $status->getWikiText() ) ); + } else { + $sd['apiUploadResult'] = FormatJson::encode( $status->value ); } - // if status okay process upload using fauxReq to api: - if( $status->isOK() ){ - // setup the FauxRequest + + session_write_close(); + } + + /** + * Take care of the downloaded file + * @param $sd array + * @param $status Status + * + * @returns Status + */ + private static function doFauxRequest( $sd, $status ) { + global $wgEnableWriteAPI; + + if ( $status->isOK() ) { $fauxReqData = $sd['mParams']; // Fix boolean parameters - foreach( $fauxReqData as $k => $v ) { - if( $v === false ) + foreach ( $fauxReqData as $k => $v ) { + if ( $v === false ) unset( $fauxReqData[$k] ); } $fauxReqData['action'] = 'upload'; $fauxReqData['format'] = 'json'; - $fauxReqData['internalhttpsession'] = $upload_session_key; + $fauxReqData['internalhttpsession'] = $requestKey; + // evil but no other clean way about it: - $faxReq = new FauxRequest( $fauxReqData, true ); - $processor = new ApiMain( $faxReq, $wgEnableWriteAPI ); + $fauxReq = new FauxRequest( $fauxReqData, true ); + $processor = new ApiMain( $fauxReq, $wgEnableWriteAPI ); - //init the mUpload var for the $processor + // init the mUpload var for the $processor $processor->execute(); $processor->getResult()->cleanUpUTF8(); $printer = $processor->createPrinterByName( 'json' ); $printer->initPrinter( false ); ob_start(); $printer->execute(); - $apiUploadResult = ob_get_clean(); // the status updates runner will grab the result form the session: - $sd['apiUploadResult'] = $apiUploadResult; + $status->value = ob_get_clean(); } - // close the session: - session_write_close(); + return $status; } /** - * Check if the URL can be served by localhost - * @param $url string Full url to check - * @return bool + * Run a session based download. + * + * @param $sessionID string: the session id with the download details + * @param $requestKey string: the key of the given upload session + * (a given client could have started a few http uploads at once) */ - public static function isLocalURL( $url ) { - global $wgCommandLineMode, $wgConf; - if ( $wgCommandLineMode ) { - return false; - } + public static function doSessionIdDownload( $sessionID, $requestKey ) { + global $wgAsyncHTTPTimeout; - // Extract host part - $matches = array(); - if ( preg_match( '!^http://([\w.-]+)[/:].*$!', $url, $matches ) ) { - $host = $matches[1]; - // Split up dotwise - $domainParts = explode( '.', $host ); - // Check if this domain or any superdomain is listed in $wgConf as a local virtual host - $domainParts = array_reverse( $domainParts ); - for ( $i = 0; $i < count( $domainParts ); $i++ ) { - $domainPart = $domainParts[$i]; - if ( $i == 0 ) { - $domain = $domainPart; - } else { - $domain = $domainPart . '.' . $domain; - } - if ( $wgConf->isLocalVHost( $domain ) ) { - return true; - } - } - } - return false; - } + wfDebug( __METHOD__ . "\n\n doSessionIdDownload :\n\n" ); + $sd = self::recoverSession( $sessionID ); + $req = HttpRequest::factory( $sd['url'], + array( + 'targetFilePath' => $sd['targetFilePath'], + 'requestKey' => $requestKey, + 'timeout' => $wgAsyncHTTPTimeout, + ) ); - /** - * Return a standard user-agent we can use for external requests. - */ - public static function userAgent() { - global $wgVersion; - return "MediaWiki/$wgVersion"; - } + // run the actual request .. (this can take some time) + wfDebug( __METHOD__ . 'do Session Download :: ' . $sd['url'] . ' tf: ' . + $sd['targetFilePath'] . "\n\n" ); + $status = $req->doRequest(); - /** - * Checks that the given URI is a valid one - * @param $uri Mixed: URI to check for validity - */ - public static function isValidURI( $uri ){ - return preg_match( - '/(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?/', - $uri, - $matches - ); + self::updateSession( $sessionID, $requestKey, + self::handleFauxResponse( $sd, $status ) ); } } +/** + * This wrapper class will call out to curl (if available) or fallback + * to regular PHP if necessary for handling internal HTTP requests. + */ class HttpRequest { - var $target_file_path; - var $upload_session_key; - function __construct( $url, $opt ){ + private $targetFilePath; + private $requestKey; + protected $content; + protected $timeout = 'default'; + protected $headersOnly = null; + protected $postdata = null; + protected $proxy = null; + protected $no_proxy = false; + protected $sslVerifyHost = true; + protected $caInfo = null; + protected $method = "GET"; + protected $url; + public $status; + + /** + * @param $url string url to use + * @param $options array (optional) extra params to pass + * Possible keys for the array: + * method + * timeout + * targetFilePath + * requestKey + * headersOnly + * postdata + * proxy + * no_proxy + * sslVerifyHost + * caInfo + */ + function __construct( $url = null, $opt ) { + global $wgHTTPTimeout; - global $wgSyncHTTPTimeout; $this->url = $url; - // set the timeout to default sync timeout (unless the timeout option is provided) - $this->timeout = ( isset( $opt['timeout'] ) ) ? $opt['timeout'] : $wgSyncHTTPTimeout; - //check special key default - if($this->timeout == 'default'){ - $opts['timeout'] = $wgSyncHTTPTimeout; - } - $this->method = ( isset( $opt['method'] ) ) ? $opt['method'] : 'GET'; - $this->target_file_path = ( isset( $opt['target_file_path'] ) ) - ? $opt['target_file_path'] : false; - $this->upload_session_key = ( isset( $opt['upload_session_key'] ) ) - ? $opt['upload_session_key'] : false; - $this->headers_only = ( isset( $opt['headers_only'] ) ) ? $opt['headers_only'] : false; - $this->do_close_session_update = isset( $opt['do_close_session_update'] ); - $this->postData = isset( $opt['postdata'] ) ? $opt['postdata'] : ''; + if ( !ini_get( 'allow_url_fopen' ) ) { + $this->status = Status::newFatal( 'allow_url_fopen needs to be enabled for http copy to work' ); + } elseif ( !Http::isValidURI( $this->url ) ) { + $this->status = Status::newFatal( 'bad-url' ); + } else { + $this->status = Status::newGood( 100 ); // continue + } - $this->proxy = isset( $opt['proxy'] )? $opt['proxy'] : ''; + if ( array_key_exists( 'timeout', $opt ) && $opt['timeout'] != 'default' ) { + $this->timeout = $opt['timeout']; + } else { + $this->timeout = $wgHTTPTimeout; + } - $this->ssl_verifyhost = (isset( $opt['ssl_verifyhost'] ))? $opt['ssl_verifyhost']: false; + $members = array( "targetFilePath", "requestKey", "headersOnly", "postdata", + "proxy", "no_proxy", "sslVerifyHost", "caInfo", "method" ); + foreach ( $members as $o ) { + if ( array_key_exists( $o, $opt ) ) { + $this->$o = $opt[$o]; + } + } - $this->cainfo = (isset( $opt['cainfo'] ))? $op['cainfo']: false; + if ( is_array( $this->postdata ) ) { + $this->postdata = wfArrayToCGI( $this->postdata ); + } + } + /** + * For backwards compatibility, we provide a __toString method so + * that any code that expects a string result from Http::Get() + * will see the content of the request. + */ + function __toString() { + return $this->content; } - public static function newRequest($url, $opt){ - # select the handler (use curl if available) - if ( function_exists( 'curl_init' ) ) { - return new curlHttpRequest($url, $opt); + /** + * Generate a new request object + * @see HttpRequest::__construct + */ + public static function factory( $url, $opt ) { + global $wgForceHTTPEngine; + + if ( function_exists( 'curl_init' ) && $wgForceHTTPEngine == "curl" ) { + return new CurlHttpRequest( $url, $opt ); } else { - return new phpHttpRequest($url, $opt); + return new PhpHttpRequest( $url, $opt ); } } - /** - * Get the contents of a file by HTTP - * @param $url string Full URL to act on - * @param $Opt associative array Optional array of options: - * 'method' => 'GET', 'POST' etc. - * 'target_file_path' => if curl should output to a target file - * 'adapter' => 'curl', 'soket' - */ - public function doRequest() { - # Make sure we have a valid url - if( !Http::isValidURI( $this->url ) ) - return Status::newFatal('bad-url'); - //do the actual request: - return $this->doReq(); + public function getContent() { + return $this->content; } -} -class curlHttpRequest extends HttpRequest { - public function doReq(){ - global $wgHTTPProxy, $wgTitle; - $status = Status::newGood(); - $c = curl_init( $this->url ); - - // only do proxy setup if ( not suppressed $this->proxy === false ) - if( $this->proxy !== false ){ - if( $this->proxy ){ - curl_setopt( $c, CURLOPT_PROXY, $this->proxy ); - } else if ( Http::isLocalURL( $this->url ) ) { - curl_setopt( $c, CURLOPT_PROXY, 'localhost:80' ); - } else if ( $wgHTTPProxy ) { - curl_setopt( $c, CURLOPT_PROXY, $wgHTTPProxy ); + public function handleOutput() { + // if we wrote to a target file close up or return error + if ( $this->targetFilePath ) { + $this->writer->close(); + if ( !$this->writer->status->isOK() ) { + $this->status = $this->writer->status; + return $this->status; } } + } - curl_setopt( $c, CURLOPT_TIMEOUT, $this->timeout ); - curl_setopt( $c, CURLOPT_USERAGENT, Http::userAgent() ); + public function doRequest() { + global $wgTitle; - if( $this->ssl_verifyhost ) - curl_setopt( $c, CURLOPT_SSL_VERIFYHOST, $this->ssl_verifyhost); + if ( !$this->status->isOK() ) { + return $this->status; + } - if( $this->cainfo ) - curl_setopt( $c, CURLOPT_CAINFO, $this->cainfo); + $this->initRequest(); - if ( $this->headers_only ) { - curl_setopt( $c, CURLOPT_NOBODY, true ); - curl_setopt( $c, CURLOPT_HEADER, true ); - } elseif ( $this->method == 'POST' ) { - curl_setopt( $c, CURLOPT_POST, true ); - curl_setopt( $c, CURLOPT_POSTFIELDS, $this->postData ); - // Suppress 'Expect: 100-continue' header, as some servers - // will reject it with a 417 and Curl won't auto retry - // with HTTP 1.0 fallback - curl_setopt( $c, CURLOPT_HTTPHEADER, array( 'Expect:' ) ); - } else { - curl_setopt( $c, CURLOPT_CUSTOMREQUEST, $this->method ); + if ( !$this->no_proxy ) { + $this->proxySetup(); } # Set the referer to $wgTitle, even in command-line mode @@ -414,268 +495,332 @@ class curlHttpRequest extends HttpRequest { # $_SERVER['REQUEST_URI'] gives a less reliable indication of the # referring page. if ( is_object( $wgTitle ) ) { - curl_setopt( $c, CURLOPT_REFERER, $wgTitle->getFullURL() ); + $this->set_referer( $wgTitle->getFullURL() ); } - // set the write back function (if we are writing to a file) - if( $this->target_file_path ) { - $cwrite = new simpleFileWriter( $this->target_file_path, - $this->upload_session_key, - $this->do_close_session_update - ); - if( !$cwrite->status->isOK() ) { - wfDebug( __METHOD__ . "ERROR in setting up simpleFileWriter\n" ); - $status = $cwrite->status; - return $status; - } - curl_setopt( $c, CURLOPT_WRITEFUNCTION, array( $cwrite, 'callbackWriteBody' ) ); + $this->setupOutputHandler(); + + if ( $this->status->isOK() ) { + $this->spinTheWheel(); } - // start output grabber: - if( !$this->target_file_path ) - ob_start(); + if ( !$this->status->isOK() ) { + return $this->status; + } - //run the actual curl_exec: - try { - if ( false === curl_exec( $c ) ) { - $error_txt ='Error sending request: #' . curl_errno( $c ) .' '. curl_error( $c ); - wfDebug( __METHOD__ . $error_txt . "\n" ); - $status = Status::newFatal( $error_txt ); + $this->handleOutput(); + + $this->finish(); + return $this->status; + } + + public function setupOutputHandler() { + if ( $this->targetFilePath ) { + $this->writer = new SimpleFileWriter( $this->targetFilePath, + $this->requestKey ); + if ( !$this->writer->status->isOK() ) { + wfDebug( __METHOD__ . "ERROR in setting up SimpleFileWriter\n" ); + $this->status = $this->writer->status; + return $this->status; } - } catch ( Exception $e ) { - // do something with curl exec error? + $this->setCallback( array( $this, 'readAndSave' ) ); + } else { + $this->setCallback( array( $this, 'readOnly' ) ); + } + } +} + +/** + * HttpRequest implemented using internal curl compiled into PHP + */ +class CurlHttpRequest extends HttpRequest { + private $c; + + public function initRequest() { + $this->c = curl_init( $this->url ); + } + + public function proxySetup() { + global $wgHTTPProxy; + + if ( is_string( $this->proxy ) ) { + curl_setopt( $this->c, CURLOPT_PROXY, $this->proxy ); + } else if ( Http::isLocalURL( $this->url ) ) { /* Not sure this makes any sense. */ + curl_setopt( $this->c, CURLOPT_PROXY, 'localhost:80' ); + } else if ( $wgHTTPProxy ) { + curl_setopt( $this->c, CURLOPT_PROXY, $wgHTTPProxy ); } - // if direct request output the results to the stats value: - if( !$this->target_file_path && $status->isOK() ) { - $status->value = ob_get_contents(); - ob_end_clean(); + } + + public function setCallback( $cb ) { + curl_setopt( $this->c, CURLOPT_WRITEFUNCTION, $cb ); + } + + public function spinTheWheel() { + curl_setopt( $this->c, CURLOPT_TIMEOUT, $this->timeout ); + curl_setopt( $this->c, CURLOPT_USERAGENT, Http::userAgent() ); + curl_setopt( $this->c, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_0 ); + + if ( $this->sslVerifyHost ) { + curl_setopt( $this->c, CURLOPT_SSL_VERIFYHOST, $this->sslVerifyHost ); } - // if we wrote to a target file close up or return error - if( $this->target_file_path ) { - $cwrite->close(); - if( !$cwrite->status->isOK() ) { - return $cwrite->status; - } + + if ( $this->caInfo ) { + curl_setopt( $this->c, CURLOPT_CAINFO, $this->caInfo ); } - if ( $this->headers_only ) { - $headers = explode( "\n", $status->value ); - $headerArray = array(); - foreach ( $headers as $header ) { - if ( !strlen( trim( $header ) ) ) - continue; - $headerParts = explode( ':', $header, 2 ); - if ( count( $headerParts ) == 1 ) { - $headerArray[] = trim( $header ); - } else { - list( $key, $val ) = $headerParts; - $headerArray[trim( $key )] = trim( $val ); - } - } - $status->value = $headerArray; + if ( $this->headersOnly ) { + curl_setopt( $this->c, CURLOPT_NOBODY, true ); + curl_setopt( $this->c, CURLOPT_HEADER, true ); + } elseif ( $this->method == 'POST' ) { + curl_setopt( $this->c, CURLOPT_POST, true ); + curl_setopt( $this->c, CURLOPT_POSTFIELDS, $this->postdata ); + // Suppress 'Expect: 100-continue' header, as some servers + // will reject it with a 417 and Curl won't auto retry + // with HTTP 1.0 fallback + curl_setopt( $this->c, CURLOPT_HTTPHEADER, array( 'Expect:' ) ); } else { - # Don't return the text of error messages, return false on error - $retcode = curl_getinfo( $c, CURLINFO_HTTP_CODE ); - if ( $retcode != 200 ) { - wfDebug( __METHOD__ . ": HTTP return code $retcode\n" ); - $status = Status::newFatal( "HTTP return code $retcode\n" ); + curl_setopt( $this->c, CURLOPT_CUSTOMREQUEST, $this->method ); + } + + try { + if ( false === curl_exec( $this->c ) ) { + $error_txt = 'Error sending request: #' . curl_errno( $this->c ) . ' ' . curl_error( $this->c ); + wfDebug( __METHOD__ . $error_txt . "\n" ); + $this->status->fatal( $error_txt ); /* i18n? */ } - # Don't return truncated output - $errno = curl_errno( $c ); + } catch ( Exception $e ) { + $errno = curl_errno( $this->c ); if ( $errno != CURLE_OK ) { - $errstr = curl_error( $c ); + $errstr = curl_error( $this->c ); wfDebug( __METHOD__ . ": CURL error code $errno: $errstr\n" ); - $status = Status::newFatal( " CURL error code $errno: $errstr\n" ); + $this->status->fatal( "CURL error code $errno: $errstr\n" ); /* i18n? */ } } + } - curl_close( $c ); - // return the result obj - return $status; + public function readOnly( $curlH, $content ) { + $this->content .= $content; + return strlen( $content ); } -} -class phpHttpRequest extends HttpRequest { - public function doReq() { - global $wgTitle, $wgHTTPProxy; - # Check for php.ini allow_url_fopen - if( !ini_get( 'allow_url_fopen' ) ) { - return Status::newFatal( 'allow_url_fopen needs to be enabled for http copy to work' ); - } - // start with good status: - $status = Status::newGood(); + public function readAndSave( $curlH, $content ) { + return $this->writer->write( $content ); + } - if ( $this->headers_only ) { - $status->value = get_headers( $this->url, 1 ); - return $status; + public function getCode() { + # Don't return truncated output + $code = curl_getinfo( $this->c, CURLINFO_HTTP_CODE ); + if ( $code < 400 ) { + $this->status->setResult( true, $code ); + } else { + $this->status->setResult( false, $code ); } + } - // setup the headers - $headers = array( "User-Agent: " . Http::userAgent() ); - if ( is_object( $wgTitle ) ) { - $headers[] = "Referer: ". $wgTitle->getFullURL(); - } + public function finish() { + curl_close( $this->c ); + } + +} + +class PhpHttpRequest extends HttpRequest { + private $reqHeaders; + private $callback; + private $fh; - if( strcasecmp( $this->method, 'post' ) == 0 ) { + public function initRequest() { + $this->reqHeaders[] = "User-Agent: " . Http::userAgent(); + $this->reqHeaders[] = "Accept: */*"; + if ( $this->method == 'POST' ) { // Required for HTTP 1.0 POSTs - $headers[] = "Content-Length: 0"; + $this->reqHeaders[] = "Content-Length: " . strlen( $this->postdata ); + $this->reqHeaders[] = "Content-type: application/x-www-form-urlencoded"; } + } - $httpContextOptions = array( - 'method' => $this->method, - 'header' => implode( "\r\n", $headers ), - 'timeout' => $this->timeout - ); + public function proxySetup() { + global $wgHTTPProxy; - // Proxy setup: - if( $this->proxy ){ - $httpContextOptions['proxy'] = 'tcp://' . $this->proxy; - }else if ( Http::isLocalURL( $this->url ) ) { - $httpContextOptions['proxy'] = 'tcp://localhost:80'; + if ( $this->proxy ) { + $this->proxy = 'tcp://' . $this->proxy; + } elseif ( Http::isLocalURL( $this->url ) ) { + $this->proxy = 'tcp://localhost:80'; } elseif ( $wgHTTPProxy ) { - $httpContextOptions['proxy'] = 'tcp://' . $wgHTTPProxy ; + $this->proxy = 'tcp://' . $wgHTTPProxy ; } + } - $fcontext = stream_context_create ( - array( - 'http' => $httpContextOptions - ) - ); + public function setReferrer( $url ) { + $this->reqHeaders[] = "Referer: $url"; + } - $fh = fopen( $this->url, "r", false, $fcontext); + public function setCallback( $cb ) { + $this->callback = $cb; + } - // set the write back function (if we are writing to a file) - if( $this->target_file_path ) { - $cwrite = new simpleFileWriter( $this->target_file_path, - $this->upload_session_key, $this->do_close_session_update ); - if( !$cwrite->status->isOK() ) { - wfDebug( __METHOD__ . "ERROR in setting up simpleFileWriter\n" ); - $status = $cwrite->status; - return $status; - } + public function readOnly( $contents ) { + if ( $this->headersOnly ) { + return false; + } + $this->content .= $contents; - // Read $fh into the simpleFileWriter (grab in 64K chunks since - // it's likely a ~large~ media file) - while ( !feof( $fh ) ) { - $contents = fread( $fh, 65536 ); - $cwrite->callbackWriteBody( $fh, $contents ); - } - $cwrite->close(); - // check for simpleFileWriter error: - if( !$cwrite->status->isOK() ) { - return $cwrite->status; - } + return strlen( $contents ); + } + + public function readAndSave( $contents ) { + if ( $this->headersOnly ) { + return false; + } + return $this->writer->write( $content ); + } + + public function finish() { + fclose( $this->fh ); + } + + public function spinTheWheel() { + $opts = array(); + if ( $this->proxy && !$this->no_proxy ) { + $opts['proxy'] = $this->proxy; + $opts['request_fulluri'] = true; + } + + $opts['method'] = $this->method; + $opts['timeout'] = $this->timeout; + $opts['header'] = implode( "\r\n", $this->reqHeaders ); + if ( version_compare( "5.3.0", phpversion(), ">" ) ) { + $opts['protocol_version'] = "1.0"; } else { - // read $fh into status->value - $status->value = @stream_get_contents( $fh ); + $opts['protocol_version'] = "1.1"; } - //close the url file wrapper - fclose( $fh ); - // check for "false" - if( $status->value === false ) { - $status->error( 'file_get_contents-failed' ); + if ( $this->postdata ) { + $opts['content'] = $this->postdata; } - return $status; - } + $context = stream_context_create( array( 'http' => $opts ) ); + $this->fh = fopen( $this->url, "r", false, $context ); + $result = stream_get_meta_data( $this->fh ); + + if ( $result['timed_out'] ) { + $this->status->error( __CLASS__ . '::timed-out-in-headers' ); + } + + $this->headers = $result['wrapper_data']; + + $end = false; + $size = 8192; + while ( !$end ) { + $contents = fread( $this->fh, $size ); + $size = call_user_func( $this->callback, $contents ); + $end = ( $size == 0 ) || feof( $this->fh ); + } + } } /** * SimpleFileWriter with session id updates */ -class simpleFileWriter { - var $target_file_path; - var $status = null; - var $session_id = null; - var $session_update_interval = 0; // how often to update the session while downloading - - function simpleFileWriter( $target_file_path, $upload_session_key, - $do_close_session_update = false ) - { - $this->target_file_path = $target_file_path; - $this->upload_session_key = $upload_session_key; +class SimpleFileWriter { + private $targetFilePath = null; + private $status = null; + private $sessionId = null; + private $sessionUpdateInterval = 0; // how often to update the session while downloading + private $currentFileSize = 0; + private $requestKey = null; + private $prevTime = 0; + private $fp = null; + + /** + * @param $targetFilePath string the path to write the file out to + * @param $requestKey string the request to update + */ + function __construct__( $targetFilePath, $requestKey ) { + $this->targetFilePath = $targetFilePath; + $this->requestKey = $requestKey; $this->status = Status::newGood(); - $this->do_close_session_update = $do_close_session_update; // open the file: - $this->fp = fopen( $this->target_file_path, 'w' ); - if( $this->fp === false ) { + $this->fp = fopen( $this->targetFilePath, 'w' ); + if ( $this->fp === false ) { $this->status = Status::newFatal( 'HTTP::could-not-open-file-for-writing' ); } // true start time $this->prevTime = time(); } - public function callbackWriteBody( $ch, $data_packet ) { + public function write( $dataPacket ) { global $wgMaxUploadSize, $wgLang; + if ( !$this->status->isOK() ) { + return false; + } + // write out the content - if( fwrite( $this->fp, $data_packet ) === false ) { - wfDebug( __METHOD__ ." ::could-not-write-to-file\n" ); + if ( fwrite( $this->fp, $dataPacket ) === false ) { + wfDebug( __METHOD__ . " ::could-not-write-to-file\n" ); $this->status = Status::newFatal( 'HTTP::could-not-write-to-file' ); - return 0; + return false; } // check file size: clearstatcache(); - $this->current_fsize = filesize( $this->target_file_path ); - - if( $this->current_fsize > $wgMaxUploadSize ) { - wfDebug( __METHOD__ . " ::http download too large\n" ); - $this->status = Status::newFatal( 'HTTP::file-has-grown-beyond-upload-limit-killing: ' . - 'downloaded more than ' . - $wgLang->formatSize( $wgMaxUploadSize ) . ' ' ); - return 0; - } - // if more than session_update_interval second have passed update_session_progress - if( $this->do_close_session_update && $this->upload_session_key && - ( ( time() - $this->prevTime ) > $this->session_update_interval ) ) { - $this->prevTime = time(); - $session_status = $this->update_session_progress(); - if( !$session_status->isOK() ) { - $this->status = $session_status; - wfDebug( __METHOD__ . ' update session failed or was canceled'); - return 0; - } + $this->currentFileSize = filesize( $this->targetFilePath ); + + if ( $this->currentFileSize > $wgMaxUploadSize ) { + wfDebug( __METHOD__ . " ::http-download-too-large\n" ); + $this->status = Status::newFatal( 'HTTP::file-has-grown-beyond-upload-limit-killing: ' . /* i18n? */ + 'downloaded more than ' . + $wgLang->formatSize( $wgMaxUploadSize ) . ' ' ); + return false; + } + // if more than session_update_interval second have passed updateProgress + if ( $this->requestKey && + ( ( time() - $this->prevTime ) > $this->sessionUpdateInterval ) ) { + $this->prevTime = time(); + $session_status = $this->updateProgress(); + if ( !$session_status->isOK() ) { + $this->status = $session_status; + wfDebug( __METHOD__ . ' update session failed or was canceled' ); + return false; + } } - return strlen( $data_packet ); + return strlen( $dataPacket ); } - public function update_session_progress() { + public function updateProgress() { global $wgSessionsInMemcached; - $status = Status::newGood(); + // start the session (if necessary) - if( !$wgSessionsInMemcached ) { + if ( !$wgSessionsInMemcached ) { wfSuppressWarnings(); - if( session_start() === false ) { + if ( session_start() === false ) { wfDebug( __METHOD__ . ' could not start session' ); exit( 0 ); } wfRestoreWarnings(); } - $sd =& $_SESSION['wsDownload'][ $this->upload_session_key ]; + $sd =& $_SESSION['wsBgRequest'][ $this->requestKey ]; // check if the user canceled the request: - if( isset( $sd['user_cancel'] ) && $sd['user_cancel'] == true ) { - //@@todo kill the download + if ( $sd['userCancel'] ) { + // @@todo kill the download return Status::newFatal( 'user-canceled-request' ); } // update the progress bytes download so far: - $sd['loaded'] = $this->current_fsize; + $sd['loaded'] = $this->currentFileSize; // close down the session so we can other http queries can get session updates: - if( !$wgSessionsInMemcached ) + if ( !$wgSessionsInMemcached ) session_write_close(); - return $status; + return Status::newGood(); } public function close() { - // do a final session update: - if( $this->do_close_session_update ) { - $this->update_session_progress(); - } + $this->updateProgress(); + // close up the file handle: - if( false === fclose( $this->fp ) ) { + if ( false === fclose( $this->fp ) ) { $this->status = Status::newFatal( 'HTTP::could-not-close-file' ); } } diff --git a/tests/HttpTest.php b/tests/HttpTest.php new file mode 100644 index 0000000000..f79bbeb22b --- /dev/null +++ b/tests/HttpTest.php @@ -0,0 +1,185 @@ + "review=test" ); + + function setup() { + if ( is_array( self::$content ) ) { + return; + } + $content = tempnam( sys_get_temp_dir(), "" ); + $headers = tempnam( sys_get_temp_dir(), "" ); + if ( !$content && !$headers ) { + die( "Couldn't create temp file!" ); + } + + /* Maybe use wget instead of curl here ... just to use a different codebase? */ + foreach ( $this->test_geturl as $u ) { + system( "curl -0 -s -D $headers '$u' -o $content" ); + self::$content["GET $u"] = file_get_contents( $content ); + self::$headers["GET $u"] = file_get_contents( $headers ); + } + foreach ( $this->test_requesturl as $u ) { + system( "curl -0 -s -X POST -H 'Content-Length: 0' -D $headers '$u' -o $content" ); + self::$content["POST $u"] = file_get_contents( $content ); + self::$headers["POST $u"] = file_get_contents( $headers ); + } + foreach ( $this->test_posturl as $u => $postdata ) { + system( "curl -0 -s -X POST -d '$postdata' -D $headers '$u' -o $content" ); + self::$content["POST $u => $postdata"] = file_get_contents( $content ); + self::$headers["POST $u => $postdata"] = file_get_contents( $headers ); + } + unlink( $content ); + unlink( $headers ); + } + + /* ./phase3/includes/Import.php:1108: $data = Http::request( $method, $url ); */ + /* ./includes/Import.php:1124: $link = Title::newFromText( "$interwiki:Special:Export/$page" ); */ + /* ./includes/Import.php:1134: return ImportStreamSource::newFromURL( $url, "POST" ); */ + function runHTTPRequests() { + global $wgForceHTTPEngine; + + /* no postdata here because the only request I could find in code so far didn't have any */ + foreach ( $this->test_requesturl as $u ) { + $r = Http::request( "POST", $u ); + $this->assertEquals( self::$content["POST $u"], $r, "POST $u with $wgForceHTTPEngine" ); + } + } + + function testRequestPHP() { + global $wgForceHTTPEngine; + + $wgForceHTTPEngine = "php"; + self::runHTTPRequests(); + } + + function testRequestCurl() { + global $wgForceHTTPEngine; + + $wgForceHTTPEngine = "curl"; + self::runHTTPRequests(); + } + + /* ./extensions/SpamBlacklist/SpamBlacklist_body.php:164: $httpText = Http::get( $fileName ); */ + /* ./extensions/ApiSVGProxy/ApiSVGProxy.body.php:44: $contents = Http::get( $file->getFullUrl() ); */ + /* ./extensions/BookInformation/drivers/IsbnDb.php:24: if( ( $xml = Http::get( $uri ) ) !== false ) { */ + /* ./extensions/BookInformation/drivers/Amazon.php:23: if( ( $xml = Http::get( $uri ) ) !== false ) { */ + /* ./extensions/TitleBlacklist/TitleBlacklist.list.php:217: $result = Http::get( $url ); */ + /* ./extensions/TSPoll/TSPoll.php:68: $get_server = Http::get( 'http://toolserver.org/~jan/poll/dev/main.php?page=wiki_output&id='.$id ); */ + /* ./extensions/TSPoll/TSPoll.php:70: $get_server = Http::get( 'http://toolserver.org/~jan/poll/main.php?page=wiki_output&id='.$id ); */ + /* ./extensions/DoubleWiki/DoubleWiki.php:56: $translation = Http::get( $url.$sep.'action=render' ); */ + /* ./extensions/ExternalPages/ExternalPages_body.php:177: $serializedText = Http::get( $this->mPageURL ); */ + /* ./extensions/Translate/utils/TranslationHelpers.php:143: $suggestions = Http::get( $url, $timeout ); */ + /* ./extensions/Translate/SpecialImportTranslations.php:169: $filedata = Http::get( $url ); ; */ + /* ./extensions/Translate/TranslateEditAddons.php:338: $suggestions = Http::get( $url, $timeout ); */ + /* ./extensions/SecurePoll/includes/user/Auth.php:283: $value = Http::get( $url, 20, $curlParams ); */ + /* ./extensions/DumpHTML/dumpHTML.inc:778: $contents = Http::get( $url ); */ + /* ./extensions/DumpHTML/dumpHTML.inc:1298: $contents = Http::get( $sourceUrl ); */ + /* ./extensions/DumpHTML/dumpHTML.inc:1373: $contents = Http::get( $sourceUrl ); */ + /* ./phase3/maintenance/rebuildInterwiki.inc:101: $intermap = Http::get( 'http://meta.wikimedia.org/w/index.php?title=Interwiki_map&action=raw', 30 ); */ + /* ./phase3/maintenance/findhooks.php:98: $allhookdata = Http::get( 'http://www.mediawiki.org/w/api.php?action=query&list=categorymembers&cmtitle=Category:MediaWiki_hooks&cmlimit=500&format=php' ); */ + /* ./phase3/maintenance/findhooks.php:109: $oldhookdata = Http::get( 'http://www.mediawiki.org/w/api.php?action=query&list=categorymembers&cmtitle=Category:Removed_hooks&cmlimit=500&format=php' ); */ + /* ./phase3/maintenance/dumpInterwiki.inc:95: $intermap = Http::get( 'http://meta.wikimedia.org/w/index.php?title=Interwiki_map&action=raw', 30 ); */ + /* ./phase3/includes/parser/Parser.php:3204: $text = Http::get($url); */ + /* ./phase3/includes/filerepo/ForeignAPIRepo.php:131: $data = Http::get( $url ); */ + /* ./phase3/includes/filerepo/ForeignAPIRepo.php:205: $thumb = Http::get( $foreignUrl ); */ + /* ./phase3/includes/filerepo/File.php:1105: $res = Http::get( $renderUrl ); */ + /* ./phase3/includes/GlobalFunctions.php:2760: * @deprecated Use Http::get() instead */ + /* ./phase3/includes/GlobalFunctions.php:2764: return Http::get( $url ); */ + /* ./phase3/includes/ExternalStoreHttp.php:18: $ret = Http::get( $url ); */ + /* ./phase3/includes/Import.php:357: $data = Http::get( $src ); */ + /* ./extensions/ExternalData/ED_Utils.php:291: return Http::get( $url, 'default', array(CURLOPT_SSL_VERIFYPEER => false) ); */ + /* ./extensions/ExternalData/ED_Utils.php:293: return Http::get( $url ); */ + /* ./extensions/ExternalData/ED_Utils.php:306: $page = Http::get( $url, 'default', array(CURLOPT_SSL_VERIFYPEER => false) ); */ + /* ./extensions/ExternalData/ED_Utils.php:308: $page = Http::get( $url ); */ + /* ./extensions/CodeReview/backend/Subversion.php:320: $blob = Http::get( $target, $this->mTimeout ); */ + /* ./extensions/AmazonPlus/AmazonPlus.php:214: $this->response = Http::get( $urlstr ); */ + /* ./extensions/StaticWiki/StaticWiki.php:24: $text = Http::get( $url ) ; */ + /* ./extensions/StaticWiki/StaticWiki.php:64: $history = Http::get ( $wgStaticWikiExternalSite . "index.php?title=" . urlencode ( $url_title ) . "&action=history" ) ; */ + /* ./extensions/Configure/scripts/findSettings.php:126: $cont = Http::get( "http://www.mediawiki.org/w/index.php?title={$page}&action=raw" ); */ + /* ./extensions/TorBlock/TorBlock.class.php:148: $data = Http::get( $url ); */ + /* ./extensions/HoneypotIntegration/HoneypotIntegration.class.php:60: $data = Http::get( $wgHoneypotURLSource, 'default', */ + /* ./extensions/SemanticForms/includes/SF_Utils.inc:378: $page_contents = Http::get($url); */ + /* ./extensions/LocalisationUpdate/LocalisationUpdate.class.php:172: $basefilecontents = Http::get( $basefile ); */ + /* ./extensions/APC/SpecialAPC.php:245: $rss = Http::get( 'http://pecl.php.net/feeds/pkg_apc.rss' ); */ + /* ./extensions/Interlanguage/Interlanguage.php:56: $a = Http::get( $url ); */ + /* ./extensions/MWSearch/MWSearch_body.php:492: $data = Http::get( $searchUrl, $wgLuceneSearchTimeout, $httpOpts); */ + function runHTTPGets() { + global $wgForceHTTPEngine; + + foreach ( $this->test_geturl as $u ) { + $r = Http::get( $u ); + $this->assertEquals( self::$content["GET $u"], $r, "Get $u with $wgForceHTTPEngine" ); + } + } + + function testGetPHP() { + global $wgForceHTTPEngine; + + $wgForceHTTPEngine = "php"; + self::runHTTPGets(); + } + + function testGetCurl() { + global $wgForceHTTPEngine; + + $wgForceHTTPEngine = "curl"; + self::runHTTPGets(); + } + + /* ./phase3/maintenance/parserTests.inc:1618: return Http::post( $url, array( 'postdata' => wfArrayToCGI( $data ) ) ); */ + function runHTTPPosts() { + global $wgForceHTTPEngine; + + foreach ( $this->test_posturl as $u => $postdata ) { + $r = Http::post( $u, array( "postdata" => $postdata ) ); + $this->assertEquals( self::$content["POST $u => $postdata"], $r, "POST $u (postdata=$postdata) with $wgForceHTTPEngine" ); + } + } + + function testPostPHP() { + global $wgForceHTTPEngine; + + $wgForceHTTPEngine = "php"; + self::runHTTPPosts(); + } + + function testPostCurl() { + global $wgForceHTTPEngine; + + $wgForceHTTPEngine = "curl"; + self::runHTTPPosts(); + } + + function testDoDownload() { + } + + function testStartBackgroundDownload() { + } + + function testGetUploadSessionKey() { + } + + function testDoSessionIdDownload() { + } + + function testIsLocalURL() { + } + + /* ./extensions/DonationInterface/payflowpro_gateway/payflowpro_gateway.body.php:559: $user_agent = Http::userAgent(); */ + function testUserAgent() { + } + + function testIsValidURI() { + } +} -- 2.20.1