*/
class Http {
- const SYNC_DOWNLOAD = 1; // syncronys upload (in a single request)
- const ASYNC_DOWNLOAD = 2; // asynchronous upload we should spawn out another process and monitor progress if possible)
+ const SYNC_DOWNLOAD = 1; // syncronous upload (in a single request)
+ const ASYNC_DOWNLOAD = 2; // asynchronous upload
var $body = '';
- public static function request( $method, $url, $opts = array() ){
- $opts['method'] = ( strtoupper( $method ) == 'GET' || strtoupper( $method ) == 'POST' ) ? strtoupper( $method ) : null;
- $req = new HttpRequest( $url, $opts );
+ public static function request( $method, $url, $opts = array() ) {
+ $opts['method'] = ( strtoupper( $method ) == 'GET' || strtoupper( $method ) == 'POST' )
+ ? strtoupper( $method ) : null;
+ $req = HttpRequest::newRequest( $url, $opts );
$status = $req->doRequest();
- if( $status->isOK() ){
+ if( $status->isOK() ) {
return $status->value;
} else {
wfDebug( 'http error: ' . $status->getWikiText() );
return Http::request( 'POST', $url, $opts );
}
- public static function doDownload( $url, $target_file_path, $dl_mode = self::SYNC_DOWNLOAD, $redirectCount = 0 ){
+ public static function doDownload( $url, $target_file_path, $dl_mode = self::SYNC_DOWNLOAD,
+ $redirectCount = 0 )
+ {
global $wgPhpCli, $wgMaxUploadSize, $wgMaxRedirects;
// do a quick check to HEAD to insure the file size is not > $wgMaxUploadSize
- $headRequest = new HttpRequest( $url, array( 'headers_only' => true ) );
+ $headRequest = HttpRequest::newRequest( $url, array( 'headers_only' => true ) );
$headResponse = $headRequest->doRequest();
- if( !$headResponse->isOK() ){
+ if( !$headResponse->isOK() ) {
return $headResponse;
}
$head = $headResponse->value;
// check for redirects:
- if( isset( $head['Location'] ) && strrpos( $head[0], '302' ) !== false ){
- if( $redirectCount < $wgMaxRedirects ){
- if( UploadFromUrl::isValidURI( $head['Location'] ) ){
- return self::doDownload( $head['Location'], $target_file_path, $dl_mode, $redirectCount++ );
+ if( isset( $head['Location'] ) && strrpos( $head[0], '302' ) !== false ) {
+ if( $redirectCount < $wgMaxRedirects ) {
+ if( self::isValidURI( $head['Location'] ) ) {
+ return self::doDownload( $head['Location'], $target_file_path,
+ $dl_mode, $redirectCount++ );
} else {
return Status::newFatal( 'upload-proto-error' );
}
}
}
// we did not get a 200 ok response:
- if( strrpos( $head[0], '200 OK' ) === false ){
+ if( strrpos( $head[0], '200 OK' ) === false ) {
return Status::newFatal( 'upload-http-error', htmlspecialchars( $head[0] ) );
}
$content_length = ( isset( $head['Content-Length'] ) ) ? $head['Content-Length'] : null;
- if( $content_length ){
- if( $content_length > $wgMaxUploadSize ){
- return Status::newFatal( 'requested file length ' . $content_length . ' is greater than $wgMaxUploadSize: ' . $wgMaxUploadSize );
+ if( $content_length ) {
+ if( $content_length > $wgMaxUploadSize ) {
+ return Status::newFatal( 'requested file length ' . $content_length .
+ ' is greater than $wgMaxUploadSize: ' . $wgMaxUploadSize );
}
}
- // check if we can find phpCliPath (for doing a background shell request to php to do the download:
- if( $wgPhpCli && wfShellExecEnabled() && $dl_mode == self::ASYNC_DOWNLOAD ){
- wfDebug( __METHOD__ . "\ASYNC_DOWNLOAD\n" );
+ // check if we can find phpCliPath (for doing a background shell request to
+ // php to do the download:
+ if( $wgPhpCli && wfShellExecEnabled() && $dl_mode == self::ASYNC_DOWNLOAD ) {
+ wfDebug( __METHOD__ . "\nASYNC_DOWNLOAD\n" );
//setup session and shell call:
return self::initBackgroundDownload( $url, $target_file_path, $content_length );
} else {
// SYNC_DOWNLOAD download as much as we can in the time we have to execute
$opts['method'] = 'GET';
$opts['target_file_path'] = $target_file_path;
- $req = new HttpRequest( $url, $opts );
+ $req = HttpRequest::newRequest( $url, $opts );
return $req->doRequest();
}
}
* should write to a file location and give updates
*
*/
- private static function initBackgroundDownload( $url, $target_file_path, $content_length = null ){
+ private static function initBackgroundDownload( $url, $target_file_path,
+ $content_length = null )
+ {
global $wgMaxUploadSize, $IP, $wgPhpCli, $wgServer;
$status = Status::newGood();
$_SESSION['wsDownload'][$upload_session_key]['loaded'] = 0;
// run the background download request:
- $cmd = $wgPhpCli . ' ' . $IP . "/maintenance/http_session_download.php --sid {$session_id} --usk {$upload_session_key}";
+ $cmd = $wgPhpCli . ' ' . $IP . "/maintenance/http_session_download.php " .
+ "--sid {$session_id} --usk {$upload_session_key} --wiki " . wfWikiId();
$pid = wfShellBackgroundExec( $cmd );
// the pid is not of much use since we won't be visiting this same apache any-time soon.
if( !$pid )
return Status::newFatal( 'could not run background shell exec' );
- // update the status value with the $upload_session_key (for the user to check on the status of the upload)
+ // update the status value with the $upload_session_key (for the user to
+ // check on the status of the upload)
$status->value = $upload_session_key;
// return good status
return $status;
}
- static function getUploadSessionKey(){
+ static function getUploadSessionKey() {
$key = mt_rand( 0, 0x7fffffff );
$_SESSION['wsUploadData'][$key] = array();
return $key;
* @param $upload_session_key String: the key of the given upload session
* (a given client could have started a few http uploads at once)
*/
- public static function doSessionIdDownload( $session_id, $upload_session_key ){
+ public static function doSessionIdDownload( $session_id, $upload_session_key ) {
global $wgUser, $wgEnableWriteAPI, $wgAsyncHTTPTimeout, $wgServer,
$wgSessionsInMemcached, $wgSessionHandler, $wgSessionStarted;
wfDebug( __METHOD__ . "\n\n doSessionIdDownload :\n\n" );
wfSetupSession();
// start the session
- if( session_start() === false ){
+ if( session_start() === false ) {
wfDebug( __METHOD__ . ' could not start session' );
}
// get all the vars we need from session_id
- if( !isset( $_SESSION[ 'wsDownload' ][$upload_session_key] ) ){
+ if( !isset( $_SESSION[ 'wsDownload' ][$upload_session_key] ) ) {
wfDebug( __METHOD__ . ' Error:could not find upload session');
exit();
}
// grab the session data to setup the request:
$sd =& $_SESSION['wsDownload'][$upload_session_key];
- // update the wgServer var ( since cmd line thinks we are localhost when we are really orgServer)
- if( isset( $sd['orgServer'] ) && $sd['orgServer'] ){
+ // update the wgServer var ( since cmd line thinks we are localhost
+ // when we are really orgServer)
+ if( isset( $sd['orgServer'] ) && $sd['orgServer'] ) {
$wgServer = $sd['orgServer'];
}
- // close down the session so we can other http queries can get session updates: (if not $wgSessionsInMemcached)
+ // close down the session so we can other http queries can get session
+ // updates: (if not $wgSessionsInMemcached)
if( !$wgSessionsInMemcached )
session_write_close();
- $req = new HttpRequest( $sd['url'], array(
+ $req = HttpRequest::newRequest( $sd['url'], array(
'target_file_path' => $sd['target_file_path'],
'upload_session_key'=> $upload_session_key,
'timeout' => $wgAsyncHTTPTimeout,
'do_close_session_update' => true
) );
// run the actual request .. (this can take some time)
- wfDebug( __METHOD__ . 'do Session Download :: ' . $sd['url'] . ' tf: ' . $sd['target_file_path'] . "\n\n");
+ wfDebug( __METHOD__ . 'do Session Download :: ' . $sd['url'] . ' tf: ' .
+ $sd['target_file_path'] . "\n\n");
$status = $req->doRequest();
//wfDebug("done with req status is: ". $status->isOK(). ' '.$status->getWikiText(). "\n");
// start up the session again:
- if( session_start() === false ){
+ if( session_start() === false ) {
wfDebug( __METHOD__ . ' ERROR:: Could not start session');
}
// grab the updated session data pointer
$sd =& $_SESSION['wsDownload'][$upload_session_key];
// if error update status:
- if( !$status->isOK() ){
- $sd['apiUploadResult'] = ApiFormatJson::getJsonEncode(
+ if( !$status->isOK() ) {
+ $sd['apiUploadResult'] = FormatJson::encode(
array( 'error' => $status->getWikiText() )
);
}
global $wgVersion;
return "MediaWiki/$wgVersion";
}
+
+ /**
+ * Checks that the given URI is a valid one
+ * @param $uri Mixed: URI to check for validity
+ */
+ public static function isValidURI( $uri ){
+ return preg_match(
+ '/(ftp|http|https):\/\/(\w+:{0,1}\w*@)?(\S+)(:[0-9]+)?(\/|\/([\w#!:.?+=&%@!\-\/]))?/',
+ $uri,
+ $matches
+ );
+ }
}
class HttpRequest {
var $target_file_path;
var $upload_session_key;
- var $supportedCurlOpts = array(
- 'CURLOPT_SSL_VERIFYHOST',
- 'CURLOPT_CAINFO',
- 'CURLOPT_COOKIE',
- 'CURLOPT_FOLLOWLOCATION',
- 'CURLOPT_FAILONERROR'
- );
function __construct( $url, $opt ){
+
global $wgSyncHTTPTimeout;
- // double check that it's a valid url:
$this->url = $url;
-
// set the timeout to default sync timeout (unless the timeout option is provided)
$this->timeout = ( isset( $opt['timeout'] ) ) ? $opt['timeout'] : $wgSyncHTTPTimeout;
//check special key default
}
$this->method = ( isset( $opt['method'] ) ) ? $opt['method'] : 'GET';
- $this->target_file_path = ( isset( $opt['target_file_path'] ) ) ? $opt['target_file_path'] : false;
- $this->upload_session_key = ( isset( $opt['upload_session_key'] ) ) ? $opt['upload_session_key'] : false;
+ $this->target_file_path = ( isset( $opt['target_file_path'] ) )
+ ? $opt['target_file_path'] : false;
+ $this->upload_session_key = ( isset( $opt['upload_session_key'] ) )
+ ? $opt['upload_session_key'] : false;
$this->headers_only = ( isset( $opt['headers_only'] ) ) ? $opt['headers_only'] : false;
$this->do_close_session_update = isset( $opt['do_close_session_update'] );
$this->postData = isset( $opt['postdata'] ) ? $opt['postdata'] : '';
- $this->curlOpt = array();
- //check for some curl options:
- foreach($this->supportedCurlOpts as $curlOpt){
- if(isset($opt[ $curlOpt ])){
- $this->curlOpt[$curlOpt] = $opt[ $curlOpt ];
- }
+ $this->ssl_verifyhost = (isset( $opt['ssl_verifyhost'] ))? $opt['ssl_verifyhost']: false;
+
+ $this->cainfo = (isset( $opt['cainfo'] ))? $op['cainfo']: false;
+
+ }
+
+ public static function newRequest($url, $opt){
+ # select the handler (use curl if available)
+ if ( function_exists( 'curl_init' ) ) {
+ return new curlHttpRequest($url, $opt);
+ } else {
+ return new phpHttpRequest($url, $opt);
}
}
*/
public function doRequest() {
# Make sure we have a valid url
- if( !UploadFromUrl::isValidURI( $this->url ) )
+ if( !Http::isValidURI( $this->url ) )
return Status::newFatal('bad-url');
-
- # Use curl if available
- if ( function_exists( 'curl_init' ) ) {
- return $this->doCurlReq();
- } else {
- return $this->doPhpReq();
- }
+ //do the actual request:
+ return $this->doReq();
}
-
- private function doCurlReq(){
+}
+class curlHttpRequest extends HttpRequest {
+ public function doReq(){
global $wgHTTPProxy, $wgTitle;
$status = Status::newGood();
// proxy setup:
if ( Http::isLocalURL( $this->url ) ) {
curl_setopt( $c, CURLOPT_PROXY, 'localhost:80' );
- } else if ( $wgHTTPProxy ) {
+ } elseif ( $wgHTTPProxy ) {
curl_setopt( $c, CURLOPT_PROXY, $wgHTTPProxy );
}
curl_setopt( $c, CURLOPT_TIMEOUT, $this->timeout );
curl_setopt( $c, CURLOPT_USERAGENT, Http::userAgent() );
- //set any curl specific opts:
- foreach($this->curlOpt as $optKey => $optVal){
- curl_setopt($c, constant( $optKey ), $optVal);
- }
+ if($this->ssl_verifyhost)
+ curl_setopt( $c, CURLOPT_SSL_VERIFYHOST, $this->ssl_verifyhost);
+
+ if($this->cainfo)
+ curl_setopt( $c, CURLOPT_CAINFO, $this->cainfo);
if ( $this->headers_only ) {
curl_setopt( $c, CURLOPT_NOBODY, true );
}
// set the write back function (if we are writing to a file)
- if( $this->target_file_path ){
+ if( $this->target_file_path ) {
$cwrite = new simpleFileWriter( $this->target_file_path,
$this->upload_session_key,
$this->do_close_session_update
);
- if( !$cwrite->status->isOK() ){
+ if( !$cwrite->status->isOK() ) {
wfDebug( __METHOD__ . "ERROR in setting up simpleFileWriter\n" );
$status = $cwrite->status;
return $status;
// do something with curl exec error?
}
// if direct request output the results to the stats value:
- if( !$this->target_file_path && $status->isOK() ){
+ if( !$this->target_file_path && $status->isOK() ) {
$status->value = ob_get_contents();
ob_end_clean();
}
// if we wrote to a target file close up or return error
- if( $this->target_file_path ){
+ if( $this->target_file_path ) {
$cwrite->close();
- if( !$cwrite->status->isOK() ){
+ if( !$cwrite->status->isOK() ) {
return $cwrite->status;
}
}
}
curl_close( $c );
-
// return the result obj
return $status;
}
-
- public function doPhpReq(){
+}
+class phpHttpRequest extends HttpRequest {
+ public function doReq() {
global $wgTitle, $wgHTTPProxy;
# Check for php.ini allow_url_fopen
- if( !ini_get( 'allow_url_fopen' ) ){
+ if( !ini_get( 'allow_url_fopen' ) ) {
return Status::newFatal( 'allow_url_fopen needs to be enabled for http copy to work' );
}
$fh = fopen( $this->url, "r", false, $fcontext);
// set the write back function (if we are writing to a file)
- if( $this->target_file_path ){
- $cwrite = new simpleFileWriter( $this->target_file_path, $this->upload_session_key, $this->do_close_session_update );
- if( !$cwrite->status->isOK() ){
+ if( $this->target_file_path ) {
+ $cwrite = new simpleFileWriter( $this->target_file_path,
+ $this->upload_session_key, $this->do_close_session_update );
+ if( !$cwrite->status->isOK() ) {
wfDebug( __METHOD__ . "ERROR in setting up simpleFileWriter\n" );
$status = $cwrite->status;
return $status;
}
- // read $fh into the simpleFileWriter (grab in 64K chunks since its likely a ~large~ media file)
+ // read $fh into the simpleFileWriter (grab in 64K chunks since
+ // it's likely a ~large~ media file)
while ( !feof( $fh ) ) {
$contents = fread( $fh, 65536 );
$cwrite->callbackWriteBody( $fh, $contents );
}
$cwrite->close();
// check for simpleFileWriter error:
- if( !$cwrite->status->isOK() ){
+ if( !$cwrite->status->isOK() ) {
return $cwrite->status;
}
} else {
fclose( $fh );
// check for "false"
- if( $status->value === false ){
+ if( $status->value === false ) {
$status->error( 'file_get_contents-failed' );
}
return $status;
var $session_id = null;
var $session_update_interval = 0; // how often to update the session while downloading
- function simpleFileWriter( $target_file_path, $upload_session_key, $do_close_session_update = false ){
+ function simpleFileWriter( $target_file_path, $upload_session_key,
+ $do_close_session_update = false )
+ {
$this->target_file_path = $target_file_path;
$this->upload_session_key = $upload_session_key;
$this->status = Status::newGood();
$this->do_close_session_update = $do_close_session_update;
// open the file:
$this->fp = fopen( $this->target_file_path, 'w' );
- if( $this->fp === false ){
+ if( $this->fp === false ) {
$this->status = Status::newFatal( 'HTTP::could-not-open-file-for-writing' );
}
// true start time
$this->prevTime = time();
}
- public function callbackWriteBody( $ch, $data_packet ){
+ public function callbackWriteBody( $ch, $data_packet ) {
global $wgMaxUploadSize, $wgLang;
// write out the content
- if( fwrite( $this->fp, $data_packet ) === false ){
+ if( fwrite( $this->fp, $data_packet ) === false ) {
wfDebug( __METHOD__ ." ::could-not-write-to-file\n" );
$this->status = Status::newFatal( 'HTTP::could-not-write-to-file' );
return 0;
clearstatcache();
$this->current_fsize = filesize( $this->target_file_path );
- if( $this->current_fsize > $wgMaxUploadSize ){
+ if( $this->current_fsize > $wgMaxUploadSize ) {
wfDebug( __METHOD__ . " ::http download too large\n" );
- $this->status = Status::newFatal( 'HTTP::file-has-grown-beyond-upload-limit-killing: downloaded more than ' .
+ $this->status = Status::newFatal( 'HTTP::file-has-grown-beyond-upload-limit-killing: ' .
+ 'downloaded more than ' .
$wgLang->formatSize( $wgMaxUploadSize ) . ' ' );
return 0;
}
( ( time() - $this->prevTime ) > $this->session_update_interval ) ) {
$this->prevTime = time();
$session_status = $this->update_session_progress();
- if( !$session_status->isOK() ){
+ if( !$session_status->isOK() ) {
$this->status = $session_status;
wfDebug( __METHOD__ . ' update session failed or was canceled');
return 0;
return strlen( $data_packet );
}
- public function update_session_progress(){
+ public function update_session_progress() {
global $wgSessionsInMemcached;
$status = Status::newGood();
// start the session (if necessary)
- if( !$wgSessionsInMemcached ){
+ if( !$wgSessionsInMemcached ) {
wfSuppressWarnings();
- if( session_start() === false ){
+ if( session_start() === false ) {
wfDebug( __METHOD__ . ' could not start session' );
exit( 0 );
}
}
$sd =& $_SESSION['wsDownload'][ $this->upload_session_key ];
// check if the user canceled the request:
- if( isset( $sd['user_cancel'] ) && $sd['user_cancel'] == true ){
+ if( isset( $sd['user_cancel'] ) && $sd['user_cancel'] == true ) {
//@@todo kill the download
return Status::newFatal( 'user-canceled-request' );
}
return $status;
}
- public function close(){
+ public function close() {
// do a final session update:
- if( $this->do_close_session_update ){
+ if( $this->do_close_session_update ) {
$this->update_session_progress();
}
// close up the file handle:
- if( false === fclose( $this->fp ) ){
+ if( false === fclose( $this->fp ) ) {
$this->status = Status::newFatal( 'HTTP::could-not-close-file' );
}
}