X-Git-Url: http://git.cyclocoop.org/?a=blobdiff_plain;f=maintenance%2Fbackup.inc;h=30bd0d88f8fc24e7a8f864cbedb9e79475267c97;hb=fc46b81e3f1ebdf9f93a6539a56a54b0c417ef7b;hp=d40636a6f3ad1c910e7a646c6a3ab37661b7dff2;hpb=e174a4ddfb96feee0a8305c60aade97c0ee30d3c;p=lhc%2Fweb%2Fwiklou.git diff --git a/maintenance/backup.inc b/maintenance/backup.inc index d40636a6f3..30bd0d88f8 100644 --- a/maintenance/backup.inc +++ b/maintenance/backup.inc @@ -18,16 +18,22 @@ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * http://www.gnu.org/copyleft/gpl.html * - * @package MediaWiki - * @subpackage SpecialPage + * @file + * @ingroup Dump Maintenance */ +/** + * @ingroup Dump Maintenance + */ class DumpDBZip2Output extends DumpPipeOutput { function DumpDBZip2Output( $file ) { parent::DumpPipeOutput( "dbzip2", $file ); } } +/** + * @ingroup Dump Maintenance + */ class BackupDumper { var $reportingInterval = 100; var $reporting = true; @@ -41,6 +47,7 @@ class BackupDumper { var $endId = 0; var $sink = null; // Output filters var $stubText = false; // include rev_text_id instead of text; for 2-pass dump + var $dumpUploads = false; function BackupDumper( $args ) { $this->stderr = fopen( "php://stderr", "wt" ); @@ -98,8 +105,9 @@ class BackupDumper { $sink = null; $sinks = array(); foreach( $args as $arg ) { + $matches = array(); if( preg_match( '/^--(.+?)(?:=(.+?)(?::(.+?))?)?$/', $arg, $matches ) ) { - @list( $full, $opt, $val, $param ) = $matches; + @list( /* $full */ , $opt, $val, $param ) = $matches; switch( $opt ) { case "plugin": $this->loadPlugin( $val, $param ); @@ -167,28 +175,38 @@ class BackupDumper { // extension point for subclasses to add options } - function dump( $history, $text = MW_EXPORT_TEXT ) { + function dump( $history, $text = WikiExporter::TEXT ) { # Notice messages will foul up your XML output even if they're # relatively harmless. - ini_set( 'display_errors', false ); + if( ini_get( 'display_errors' ) ) + ini_set( 'display_errors', 'stderr' ); $this->initProgress( $history ); - $db =& $this->backupDb(); - $exporter = new WikiExporter( $db, $history, MW_EXPORT_STREAM, $text ); + $db = $this->backupDb(); + $exporter = new WikiExporter( $db, $history, WikiExporter::STREAM, $text ); + $exporter->dumpUploads = $this->dumpUploads; $wrapper = new ExportProgressFilter( $this->sink, $this ); $exporter->setOutputSink( $wrapper ); if( !$this->skipHeader ) $exporter->openStream(); - - if( is_null( $this->pages ) ) { + # Log item dumps: all or by range + if( $history & WikiExporter::LOGS ) { + if( $this->startId || $this->endId ) { + $exporter->logsByRange( $this->startId, $this->endId ); + } else { + $exporter->allLogs(); + } + # Page dumps: all or by page ID range + } else if( is_null( $this->pages ) ) { if( $this->startId || $this->endId ) { $exporter->pagesByRange( $this->startId, $this->endId ); } else { $exporter->allPages(); } + # Dump of specific pages } else { $exporter->pagesByName( $this->pages ); } @@ -203,27 +221,37 @@ class BackupDumper { * Initialise starting time and maximum revision count. * We'll make ETA calculations based an progress, assuming relatively * constant per-revision rate. - * @param int $history MW_EXPORT_CURRENT or MW_EXPORT_FULL + * @param int $history WikiExporter::CURRENT or WikiExporter::FULL */ - function initProgress( $history = MW_EXPORT_FULL ) { - $table = ($history == MW_EXPORT_CURRENT) ? 'page' : 'revision'; - $field = ($history == MW_EXPORT_CURRENT) ? 'page_id' : 'rev_id'; + function initProgress( $history = WikiExporter::FULL ) { + $table = ($history == WikiExporter::CURRENT) ? 'page' : 'revision'; + $field = ($history == WikiExporter::CURRENT) ? 'page_id' : 'rev_id'; - $dbr =& wfGetDB( DB_SLAVE ); + $dbr = wfGetDB( DB_SLAVE ); $this->maxCount = $dbr->selectField( $table, "MAX($field)", '', 'BackupDumper::dump' ); $this->startTime = wfTime(); } - function &backupDb() { - global $wgDBadminuser, $wgDBadminpassword; - global $wgDBname, $wgDebugDumpSql; - $flags = ($wgDebugDumpSql ? DBO_DEBUG : 0) | DBO_DEFAULT; // god-damn hack - $db = new Database( $this->backupServer(), $wgDBadminuser, $wgDBadminpassword, $wgDBname, false, $flags ); - $timeout = 3600 * 24; - $db->query( "SET net_read_timeout=$timeout" ); - $db->query( "SET net_write_timeout=$timeout" ); + /** + * @fixme the --server parameter is currently not respected, as it doesn't seem + * terribly easy to ask the load balancer for a particular connection by name. + */ + function backupDb() { + $this->lb = wfGetLBFactory()->newMainLB(); + $db = $this->lb->getConnection( DB_SLAVE, 'backup' ); + + // Discourage the server from disconnecting us if it takes a long time + // to read out the big ol' batch query. + $db->setTimeout( 3600 * 24 ); + return $db; } + + function __destruct() { + if( isset( $this->lb ) ) { + $this->lb->closeAll(); + } + } function backupServer() { global $wgDBserver; @@ -288,5 +316,3 @@ class ExportProgressFilter extends DumpFilter { $this->progress->revCount(); } } - -?>