<?php
/**
+ * Script that postprocesses XML dumps from dumpBackup.php to add page text
+ *
* Copyright (C) 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
- * @package MediaWiki
- * @subpackage SpecialPage
+ * @file
+ * @ingroup Maintenance
*/
$originalDir = getcwd();
-require_once( 'commandLine.inc' );
-require_once( 'SpecialExport.php' );
-require_once( 'maintenance/backup.inc' );
+require_once( dirname( __FILE__ ) . '/commandLine.inc' );
+require_once( 'backup.inc' );
/**
- * Stream wrapper around 7za filter program.
- * Required since we can't pass an open file resource to XMLReader->open()
- * which is used for the text prefetch.
+ * @ingroup Maintenance
*/
-class SevenZipStream {
- var $stream;
-
- private function stripPath( $path ) {
- $prefix = 'mediawiki.compress.7z://';
- return substr( $path, strlen( $prefix ) );
- }
-
- function stream_open( $path, $mode, $options, &$opened_path ) {
- if( $mode{0} == 'r' ) {
- $options = 'e -bd -so';
- } elseif( $mode{0} == 'w' ) {
- $options = 'a -bd -si';
- } else {
- return false;
- }
- $arg = wfEscapeShellArg( $this->stripPath( $path ) );
- $command = "7za $options $arg";
- if( !wfIsWindows() ) {
- // Suppress the stupid messages on stderr
- $command .= ' 2>/dev/null';
- }
- $this->stream = popen( $command, $mode );
- return ($this->stream !== false);
- }
-
- function url_stat( $path, $flags ) {
- return stat( $this->stripPath( $path ) );
- }
-
- // This is all so lame; there should be a default class we can extend
-
- function stream_close() {
- return fclose( $this->stream );
- }
-
- function stream_flush() {
- return fflush( $this->stream );
- }
-
- function stream_read( $count ) {
- return fread( $this->stream, $count );
- }
-
- function stream_write( $data ) {
- return fwrite( $this->stream, $data );
- }
-
- function stream_tell() {
- return ftell( $this->stream );
- }
-
- function stream_eof() {
- return feof( $this->stream );
- }
-
- function stream_seek( $offset, $whence ) {
- return fseek( $this->stream, $offset, $whence );
- }
-}
-stream_wrapper_register( 'mediawiki.compress.7z', 'SevenZipStream' );
-
-
class TextPassDumper extends BackupDumper {
var $prefetch = null;
var $input = "php://stdin";
- var $history = MW_EXPORT_FULL;
+ var $history = WikiExporter::FULL;
var $fetchCount = 0;
var $prefetchCount = 0;
-
- var $failures = 0;
- var $maxFailures = 200;
+ var $prefetchCountLast = 0;
+ var $fetchCountLast = 0;
+
+ var $maxFailures = 5;
+ var $maxConsecutiveFailedTextRetrievals = 200;
var $failureTimeout = 5; // Seconds to sleep after db failure
- function dump() {
- # This shouldn't happen if on console... ;)
+ var $php = "php";
+ var $spawn = false;
+ var $spawnProc = false;
+ var $spawnWrite = false;
+ var $spawnRead = false;
+ var $spawnErr = false;
+
+ var $xmlwriterobj = false;
+
+ // when we spend more than maxTimeAllowed seconds on this run, we continue
+ // processing until we write out the next complete page, then save output file(s),
+ // rename it/them and open new one(s)
+ var $maxTimeAllowed = 0; // 0 = no limit
+ var $timeExceeded = false;
+ var $firstPageWritten = false;
+ var $lastPageWritten = false;
+ var $checkpointJustWritten = false;
+ var $checkpointFiles = array();
+
+ /**
+ * @var DatabaseBase
+ */
+ protected $db;
+
+
+ /**
+ * Drop the database connection $this->db and try to get a new one.
+ *
+ * This function tries to get a /different/ connection if this is
+ * possible. Hence, (if this is possible) it switches to a different
+ * failover upon each call.
+ *
+ * This function resets $this->lb and closes all connections on it.
+ *
+ * @throws MWException
+ */
+ function rotateDb() {
+ // Cleaning up old connections
+ if ( isset( $this->lb ) ) {
+ $this->lb->closeAll();
+ unset( $this->lb );
+ }
+
+ if ( isset( $this->db ) && $this->db->isOpen() )
+ {
+ throw new MWException( 'DB is set and has not been closed by the Load Balancer' );
+ }
+
+
+ unset( $this->db );
+
+ // Trying to set up new connection.
+ // We do /not/ retry upon failure, but delegate to encapsulating logic, to avoid
+ // individually retrying at different layers of code.
+
+ // 1. The LoadBalancer.
+ try {
+ $this->lb = wfGetLBFactory()->newMainLB();
+ } catch (Exception $e) {
+ throw new MWException( __METHOD__ . " rotating DB failed to obtain new load balancer (" . $e->getMessage() . ")" );
+ }
+
+
+ // 2. The Connection, through the load balancer.
+ try {
+ $this->db = $this->lb->getConnection( DB_SLAVE, 'backup' );
+ } catch (Exception $e) {
+ throw new MWException( __METHOD__ . " rotating DB failed to obtain new database (" . $e->getMessage() . ")" );
+ }
+ }
+
+
+ function initProgress( $history ) {
+ parent::initProgress();
+ $this->timeOfCheckpoint = $this->startTime;
+ }
+
+ function dump( $history, $text = WikiExporter::TEXT ) {
+ // This shouldn't happen if on console... ;)
header( 'Content-type: text/html; charset=UTF-8' );
- # Notice messages will foul up your XML output even if they're
- # relatively harmless.
-// ini_set( 'display_errors', false );
+ // Notice messages will foul up your XML output even if they're
+ // relatively harmless.
+ if ( ini_get( 'display_errors' ) )
+ ini_set( 'display_errors', 'stderr' );
$this->initProgress( $this->history );
- $this->db =& $this->backupDb();
+ // We are trying to get an initial database connection to avoid that the
+ // first try of this request's first call to getText fails. However, if
+ // obtaining a good DB connection fails it's not a serious issue, as
+ // getText does retry upon failure and can start without having a working
+ // DB connection.
+ try {
+ $this->rotateDb();
+ } catch (Exception $e) {
+ // We do not even count this as failure. Just let eventual
+ // watchdogs know.
+ $this->progress( "Getting initial DB connection failed (" .
+ $e->getMessage() . ")" );
+ }
$this->egress = new ExportProgressFilter( $this->sink, $this );
+ // it would be nice to do it in the constructor, oh well. need egress set
+ $this->finalOptionCheck();
+
+ // we only want this so we know how to close a stream :-P
+ $this->xmlwriterobj = new XmlDumpWriter();
+
$input = fopen( $this->input, "rt" );
$result = $this->readDump( $input );
- if( WikiError::isError( $result ) ) {
- wfDie( $result->getMessage() );
+ if ( WikiError::isError( $result ) ) {
+ throw new MWException( $result->getMessage() );
+ }
+
+ if ( $this->spawnProc ) {
+ $this->closeSpawn();
}
$this->report( true );
}
function processOption( $opt, $val, $param ) {
+ global $IP;
$url = $this->processFileOpt( $val, $param );
-
+
switch( $opt ) {
case 'prefetch':
- require_once 'maintenance/backupPrefetch.inc';
+ require_once "$IP/maintenance/backupPrefetch.inc";
$this->prefetch = new BaseDump( $url );
break;
case 'stub':
$this->input = $url;
break;
+ case 'maxtime':
+ $this->maxTimeAllowed = intval($val)*60;
+ break;
+ case 'checkpointfile':
+ $this->checkpointFiles[] = $val;
+ break;
case 'current':
- $this->history = MW_EXPORT_CURRENT;
+ $this->history = WikiExporter::CURRENT;
break;
case 'full':
- $this->history = MW_EXPORT_FULL;
+ $this->history = WikiExporter::FULL;
+ break;
+ case 'spawn':
+ $this->spawn = true;
+ if ( $val ) {
+ $this->php = $val;
+ }
break;
}
}
-
+
function processFileOpt( $val, $param ) {
- switch( $val ) {
- case "file":
- return $param;
- case "gzip":
- return "compress.zlib://$param";
- case "bzip2":
- return "compress.bzip2://$param";
- case "7zip":
- return "mediawiki.compress.7z://$param";
- default:
- return $val;
+ $fileURIs = explode(';',$param);
+ foreach ( $fileURIs as $URI ) {
+ switch( $val ) {
+ case "file":
+ $newURI = $URI;
+ break;
+ case "gzip":
+ $newURI = "compress.zlib://$URI";
+ break;
+ case "bzip2":
+ $newURI = "compress.bzip2://$URI";
+ break;
+ case "7zip":
+ $newURI = "mediawiki.compress.7z://$URI";
+ break;
+ default:
+ $newURI = $URI;
+ }
+ $newFileURIs[] = $newURI;
}
+ $val = implode( ';', $newFileURIs );
+ return $val;
}
/**
* Overridden to include prefetch ratio if enabled.
*/
function showReport() {
- if( !$this->prefetch ) {
- return parent::showReport();
+ if ( !$this->prefetch ) {
+ parent::showReport();
+ return;
}
-
- if( $this->reporting ) {
- $delta = wfTime() - $this->startTime;
+
+ if ( $this->reporting ) {
$now = wfTimestamp( TS_DB );
- if( $delta ) {
- $rate = $this->pageCount / $delta;
- $revrate = $this->revCount / $delta;
+ $nowts = wfTime();
+ $deltaAll = wfTime() - $this->startTime;
+ $deltaPart = wfTime() - $this->lastTime;
+ $this->pageCountPart = $this->pageCount - $this->pageCountLast;
+ $this->revCountPart = $this->revCount - $this->revCountLast;
+
+ if ( $deltaAll ) {
$portion = $this->revCount / $this->maxCount;
- $eta = $this->startTime + $delta / $portion;
+ $eta = $this->startTime + $deltaAll / $portion;
$etats = wfTimestamp( TS_DB, intval( $eta ) );
- $fetchrate = 100.0 * $this->prefetchCount / $this->fetchCount;
+ if ( $this->fetchCount ) {
+ $fetchRate = 100.0 * $this->prefetchCount / $this->fetchCount;
+ } else {
+ $fetchRate = '-';
+ }
+ $pageRate = $this->pageCount / $deltaAll;
+ $revRate = $this->revCount / $deltaAll;
} else {
- $rate = '-';
- $revrate = '-';
+ $pageRate = '-';
+ $revRate = '-';
$etats = '-';
- $fetchrate = '-';
+ $fetchRate = '-';
+ }
+ if ( $deltaPart ) {
+ if ( $this->fetchCountLast ) {
+ $fetchRatePart = 100.0 * $this->prefetchCountLast / $this->fetchCountLast;
+ } else {
+ $fetchRatePart = '-';
+ }
+ $pageRatePart = $this->pageCountPart / $deltaPart;
+ $revRatePart = $this->revCountPart / $deltaPart;
+
+ } else {
+ $fetchRatePart = '-';
+ $pageRatePart = '-';
+ $revRatePart = '-';
+ }
+ $this->progress( sprintf( "%s: %s (ID %d) %d pages (%0.1f|%0.1f/sec all|curr), %d revs (%0.1f|%0.1f/sec all|curr), %0.1f%%|%0.1f%% prefetched (all|curr), ETA %s [max %d]",
+ $now, wfWikiID(), $this->ID, $this->pageCount, $pageRate, $pageRatePart, $this->revCount, $revRate, $revRatePart, $fetchRate, $fetchRatePart, $etats, $this->maxCount ) );
+ $this->lastTime = $nowts;
+ $this->revCountLast = $this->revCount;
+ $this->prefetchCountLast = $this->prefetchCount;
+ $this->fetchCountLast = $this->fetchCount;
+ }
+ }
+
+ function setTimeExceeded() {
+ $this->timeExceeded = True;
+ }
+
+ function checkIfTimeExceeded() {
+ if ( $this->maxTimeAllowed && ( $this->lastTime - $this->timeOfCheckpoint > $this->maxTimeAllowed ) ) {
+ return true;
+ }
+ return false;
+ }
+
+ function finalOptionCheck() {
+ if ( ( $this->checkpointFiles && ! $this->maxTimeAllowed ) ||
+ ( $this->maxTimeAllowed && !$this->checkpointFiles ) ) {
+ throw new MWException("Options checkpointfile and maxtime must be specified together.\n");
+ }
+ foreach ($this->checkpointFiles as $checkpointFile) {
+ $count = substr_count ( $checkpointFile,"%s" );
+ if ( $count != 2 ) {
+ throw new MWException("Option checkpointfile must contain two '%s' for substitution of first and last pageids, count is $count instead, file is $checkpointFile.\n");
+ }
+ }
+
+ if ( $this->checkpointFiles ) {
+ $filenameList = (array)$this->egress->getFilenames();
+ if ( count( $filenameList ) != count( $this->checkpointFiles ) ) {
+ throw new MWException("One checkpointfile must be specified for each output option, if maxtime is used.\n");
}
- global $wgDBname;
- $this->progress( sprintf( "%s: %s %d pages (%0.3f/sec), %d revs (%0.3f/sec), %0.1f%% prefetched, ETA %s [max %d]",
- $now, $wgDBname, $this->pageCount, $rate, $this->revCount, $revrate, $fetchrate, $etats, $this->maxCount ) );
}
}
$offset = 0; // for context extraction on error reporting
$bufferSize = 512 * 1024;
do {
+ if ($this->checkIfTimeExceeded()) {
+ $this->setTimeExceeded();
+ }
$chunk = fread( $input, $bufferSize );
- if( !xml_parse( $parser, $chunk, feof( $input ) ) ) {
+ if ( !xml_parse( $parser, $chunk, feof( $input ) ) ) {
wfDebug( "TextDumpPass::readDump encountered XML parsing error\n" );
return new WikiXmlError( $parser, 'XML import parse failure', $chunk, $offset );
}
$offset += strlen( $chunk );
- } while( $chunk !== false && !feof( $input ) );
+ } while ( $chunk !== false && !feof( $input ) );
+ if ($this->maxTimeAllowed) {
+ $filenameList = (array)$this->egress->getFilenames();
+ // we wrote some stuff after last checkpoint that needs renamed
+ if (file_exists($filenameList[0])) {
+ $newFilenames = array();
+ # we might have just written the header and footer and had no
+ # pages or revisions written... perhaps they were all deleted
+ # there's no pageID 0 so we use that. the caller is responsible
+ # for deciding what to do with a file containing only the
+ # siteinfo information and the mw tags.
+ if (! $this->firstPageWritten) {
+ $firstPageID = str_pad(0,9,"0",STR_PAD_LEFT);
+ $lastPageID = str_pad(0,9,"0",STR_PAD_LEFT);
+ }
+ else {
+ $firstPageID = str_pad($this->firstPageWritten,9,"0",STR_PAD_LEFT);
+ $lastPageID = str_pad($this->lastPageWritten,9,"0",STR_PAD_LEFT);
+ }
+ for ( $i = 0; $i < count( $filenameList ); $i++ ) {
+ $checkpointNameFilledIn = sprintf( $this->checkpointFiles[$i], $firstPageID, $lastPageID );
+ $fileinfo = pathinfo($filenameList[$i]);
+ $newFilenames[] = $fileinfo['dirname'] . '/' . $checkpointNameFilledIn;
+ }
+ $this->egress->closeAndRename( $newFilenames );
+ }
+ }
xml_parser_free( $parser );
-
+
return true;
}
+ /**
+ * Tries to get the revision text for a revision id.
+ *
+ * Upon errors, retries (Up to $this->maxFailures tries each call).
+ * If still no good revision get could be found even after this retrying, "" is returned.
+ * If no good revision text could be returned for
+ * $this->maxConsecutiveFailedTextRetrievals consecutive calls to getText, MWException
+ * is thrown.
+ *
+ * @param $id string The revision id to get the text for
+ *
+ * @return string The revision text for $id, or ""
+ * @throws MWException
+ */
function getText( $id ) {
+ $prefetchNotTried = true; // Whether or not we already tried to get the text via prefetch.
+ $text = false; // The candidate for a good text. false if no proper value.
+ $failures = 0; // The number of times, this invocation of getText already failed.
+
+ static $consecutiveFailedTextRetrievals = 0; // The number of times getText failed without
+ // yielding a good text in between.
+
$this->fetchCount++;
- if( isset( $this->prefetch ) ) {
- $text = $this->prefetch->prefetch( $this->thisPage, $this->thisRev );
- if( $text === null ) {
- // Entry missing from prefetch dump
- } elseif( $text === "" ) {
- // Blank entries may indicate that the prior dump was broken.
- // To be safe, reload it.
- } else {
- $this->prefetchCount++;
- return $text;
+
+ // To allow to simply return on success and do not have to worry about book keeping,
+ // we assume, this fetch works (possible after some retries). Nevertheless, we koop
+ // the old value, so we can restore it, if problems occur (See after the while loop).
+ $oldConsecutiveFailedTextRetrievals = $consecutiveFailedTextRetrievals;
+ $consecutiveFailedTextRetrievals = 0;
+
+ while ( $failures < $this->maxFailures ) {
+
+ // As soon as we found a good text for the $id, we will return immediately.
+ // Hence, if we make it past the try catch block, we know that we did not
+ // find a good text.
+
+ try {
+ // Step 1: Get some text (or reuse from previous iteratuon if checking
+ // for plausibility failed)
+
+ // Trying to get prefetch, if it has not been tried before
+ if ( $text === false && isset( $this->prefetch ) && $prefetchNotTried ) {
+ $prefetchNotTried = false;
+ $tryIsPrefetch = true;
+ $text = $this->prefetch->prefetch( $this->thisPage, $this->thisRev );
+ if ( $text === null ) {
+ $text = false;
+ }
+ }
+
+ if ( $text === false ) {
+ // Fallback to asking the database
+ $tryIsPrefetch = false;
+ if ( $this->spawn ) {
+ $text = $this->getTextSpawned( $id );
+ } else {
+ $text = $this->getTextDb( $id );
+ }
+ }
+
+ if ( $text === false ) {
+ throw new MWException( "Generic error while obtaining text for id " . $id );
+ }
+
+ // We received a good candidate for the text of $id via some method
+
+ // Step 2: Checking for plausibility and return the text if it is
+ // plausible
+ $revID = intval( $this->thisRev );
+ if ( ! isset( $this->db ) ) {
+ throw new MWException( "No database available" );
+ }
+ $revLength = $this->db->selectField( 'revision', 'rev_len', array( 'rev_id' => $revID ) );
+ if( strlen( $text ) == $revLength ) {
+ if ( $tryIsPrefetch ) {
+ $this->prefetchCount++;
+ }
+ return $text;
+ }
+
+ $text = false;
+ throw new MWException( "Received text is unplausible for id " . $id );
+
+ } catch (Exception $e) {
+ $msg = "getting/checking text " . $id . " failed (".$e->getMessage().")";
+ if ( $failures + 1 < $this->maxFailures ) {
+ $msg .= " (Will retry " . ( $this->maxFailures - $failures - 1) . " more times)";
+ }
+ $this->progress( $msg );
}
- }
- while( true ) {
+
+ // Something went wrong; we did not a text that was plausible :(
+ $failures++;
+
+
+ // After backing off for some time, we try to reboot the whole process as
+ // much as possible to not carry over failures from one part to the other
+ // parts
+ sleep( $this->failureTimeout );
try {
- return $this->doGetText( $id );
- } catch (DBQueryError $ex) {
- $this->failures++;
- if( $this->failures > $this->maxFailures ) {
- throw $ex;
- } else {
- $this->progress( "Database failure $this->failures " .
- "of allowed $this->maxFailures! " .
- "Pausing $this->failureTimeout seconds..." );
- sleep( $this->failureTimeout );
+ $this->rotateDb();
+ if ( $this->spawn ) {
+ $this->closeSpawn();
+ $this->openSpawn();
}
+ } catch (Exception $e) {
+ $this->progress( "Rebooting getText infrastructure failed (".$e->getMessage().")" .
+ " Trying to continue anyways" );
}
}
+
+ // Retirieving a good text for $id failed (at least) maxFailures times.
+ // We abort for this $id.
+
+ // Restoring the consecutive failures, and maybe aborting, if the dump
+ // is too broken.
+ $consecutiveFailedTextRetrievals = $oldConsecutiveFailedTextRetrievals + 1;
+ if ( $consecutiveFailedTextRetrievals > $this->maxConsecutiveFailedTextRetrievals ) {
+ throw new MWException( "Graceful storage failure" );
+ }
+
+ return "";
}
-
+
+
/**
* May throw a database error if, say, the server dies during query.
+ * @param $id
+ * @return bool|string
+ * @throws MWException
*/
- private function doGetText( $id ) {
- $id = intval( $id );
+ private function getTextDb( $id ) {
+ global $wgContLang;
+ if ( ! isset( $this->db ) ) {
+ throw new MWException( __METHOD__ . "No database available" );
+ }
$row = $this->db->selectRow( 'text',
array( 'old_text', 'old_flags' ),
array( 'old_id' => $id ),
- 'TextPassDumper::getText' );
+ __METHOD__ );
$text = Revision::getRevisionText( $row );
+ if ( $text === false ) {
+ return false;
+ }
$stripped = str_replace( "\r", "", $text );
- $normalized = UtfNormal::cleanUp( $stripped );
+ $normalized = $wgContLang->normalize( $stripped );
+ return $normalized;
+ }
+
+ private function getTextSpawned( $id ) {
+ wfSuppressWarnings();
+ if ( !$this->spawnProc ) {
+ // First time?
+ $this->openSpawn();
+ }
+ $text = $this->getTextSpawnedOnce( $id );
+ wfRestoreWarnings();
+ return $text;
+ }
+
+ function openSpawn() {
+ global $IP;
+
+ if ( file_exists( "$IP/../multiversion/MWScript.php" ) ) {
+ $cmd = implode( " ",
+ array_map( 'wfEscapeShellArg',
+ array(
+ $this->php,
+ "$IP/../multiversion/MWScript.php",
+ "fetchText.php",
+ '--wiki', wfWikiID() ) ) );
+ }
+ else {
+ $cmd = implode( " ",
+ array_map( 'wfEscapeShellArg',
+ array(
+ $this->php,
+ "$IP/maintenance/fetchText.php",
+ '--wiki', wfWikiID() ) ) );
+ }
+ $spec = array(
+ 0 => array( "pipe", "r" ),
+ 1 => array( "pipe", "w" ),
+ 2 => array( "file", "/dev/null", "a" ) );
+ $pipes = array();
+
+ $this->progress( "Spawning database subprocess: $cmd" );
+ $this->spawnProc = proc_open( $cmd, $spec, $pipes );
+ if ( !$this->spawnProc ) {
+ // shit
+ $this->progress( "Subprocess spawn failed." );
+ return false;
+ }
+ list(
+ $this->spawnWrite, // -> stdin
+ $this->spawnRead, // <- stdout
+ ) = $pipes;
+
+ return true;
+ }
+
+ private function closeSpawn() {
+ wfSuppressWarnings();
+ if ( $this->spawnRead )
+ fclose( $this->spawnRead );
+ $this->spawnRead = false;
+ if ( $this->spawnWrite )
+ fclose( $this->spawnWrite );
+ $this->spawnWrite = false;
+ if ( $this->spawnErr )
+ fclose( $this->spawnErr );
+ $this->spawnErr = false;
+ if ( $this->spawnProc )
+ pclose( $this->spawnProc );
+ $this->spawnProc = false;
+ wfRestoreWarnings();
+ }
+
+ private function getTextSpawnedOnce( $id ) {
+ global $wgContLang;
+
+ $ok = fwrite( $this->spawnWrite, "$id\n" );
+ // $this->progress( ">> $id" );
+ if ( !$ok ) return false;
+
+ $ok = fflush( $this->spawnWrite );
+ // $this->progress( ">> [flush]" );
+ if ( !$ok ) return false;
+
+ // check that the text id they are sending is the one we asked for
+ // this avoids out of sync revision text errors we have encountered in the past
+ $newId = fgets( $this->spawnRead );
+ if ( $newId === false ) {
+ return false;
+ }
+ if ( $id != intval( $newId ) ) {
+ return false;
+ }
+
+ $len = fgets( $this->spawnRead );
+ // $this->progress( "<< " . trim( $len ) );
+ if ( $len === false ) return false;
+
+ $nbytes = intval( $len );
+ // actual error, not zero-length text
+ if ($nbytes < 0 ) return false;
+
+ $text = "";
+
+ // Subprocess may not send everything at once, we have to loop.
+ while ( $nbytes > strlen( $text ) ) {
+ $buffer = fread( $this->spawnRead, $nbytes - strlen( $text ) );
+ if ( $buffer === false ) break;
+ $text .= $buffer;
+ }
+
+ $gotbytes = strlen( $text );
+ if ( $gotbytes != $nbytes ) {
+ $this->progress( "Expected $nbytes bytes from database subprocess, got $gotbytes " );
+ return false;
+ }
+
+ // Do normalization in the dump thread...
+ $stripped = str_replace( "\r", "", $text );
+ $normalized = $wgContLang->normalize( $stripped );
return $normalized;
}
function startElement( $parser, $name, $attribs ) {
+ $this->checkpointJustWritten = false;
+
$this->clearOpenElement( null );
$this->lastName = $name;
- if( $name == 'revision' ) {
+ if ( $name == 'revision' ) {
$this->state = $name;
$this->egress->writeOpenPage( null, $this->buffer );
$this->buffer = "";
- } elseif( $name == 'page' ) {
+ } elseif ( $name == 'page' ) {
$this->state = $name;
- if( $this->atStart ) {
+ if ( $this->atStart ) {
$this->egress->writeOpenStream( $this->buffer );
$this->buffer = "";
$this->atStart = false;
}
}
- if( $name == "text" && isset( $attribs['id'] ) ) {
+ if ( $name == "text" && isset( $attribs['id'] ) ) {
$text = $this->getText( $attribs['id'] );
$this->openElement = array( $name, array( 'xml:space' => 'preserve' ) );
- if( strlen( $text ) > 0 ) {
+ if ( strlen( $text ) > 0 ) {
$this->characterData( $parser, $text );
}
} else {
}
function endElement( $parser, $name ) {
- if( $this->openElement ) {
+ $this->checkpointJustWritten = false;
+
+ if ( $this->openElement ) {
$this->clearOpenElement( "" );
} else {
$this->buffer .= "</$name>";
}
- if( $name == 'revision' ) {
+ if ( $name == 'revision' ) {
$this->egress->writeRevision( null, $this->buffer );
$this->buffer = "";
$this->thisRev = "";
- } elseif( $name == 'page' ) {
- $this->egress->writeClosePage( $this->buffer );
- $this->buffer = "";
- $this->thisPage = "";
- } elseif( $name == 'mediawiki' ) {
+ } elseif ( $name == 'page' ) {
+ if (! $this->firstPageWritten) {
+ $this->firstPageWritten = trim($this->thisPage);
+ }
+ $this->lastPageWritten = trim($this->thisPage);
+ if ($this->timeExceeded) {
+ $this->egress->writeClosePage( $this->buffer );
+ // nasty hack, we can't just write the chardata after the
+ // page tag, it will include leading blanks from the next line
+ $this->egress->sink->write("\n");
+
+ $this->buffer = $this->xmlwriterobj->closeStream();
+ $this->egress->writeCloseStream( $this->buffer );
+
+ $this->buffer = "";
+ $this->thisPage = "";
+ // this could be more than one file if we had more than one output arg
+
+ $filenameList = (array)$this->egress->getFilenames();
+ $newFilenames = array();
+ $firstPageID = str_pad($this->firstPageWritten,9,"0",STR_PAD_LEFT);
+ $lastPageID = str_pad($this->lastPageWritten,9,"0",STR_PAD_LEFT);
+ for ( $i = 0; $i < count( $filenameList ); $i++ ) {
+ $checkpointNameFilledIn = sprintf( $this->checkpointFiles[$i], $firstPageID, $lastPageID );
+ $fileinfo = pathinfo($filenameList[$i]);
+ $newFilenames[] = $fileinfo['dirname'] . '/' . $checkpointNameFilledIn;
+ }
+ $this->egress->closeRenameAndReopen( $newFilenames );
+ $this->buffer = $this->xmlwriterobj->openStream();
+ $this->timeExceeded = false;
+ $this->timeOfCheckpoint = $this->lastTime;
+ $this->firstPageWritten = false;
+ $this->checkpointJustWritten = true;
+ }
+ else {
+ $this->egress->writeClosePage( $this->buffer );
+ $this->buffer = "";
+ $this->thisPage = "";
+ }
+
+ } elseif ( $name == 'mediawiki' ) {
$this->egress->writeCloseStream( $this->buffer );
$this->buffer = "";
}
function characterData( $parser, $data ) {
$this->clearOpenElement( null );
- if( $this->lastName == "id" ) {
- if( $this->state == "revision" ) {
+ if ( $this->lastName == "id" ) {
+ if ( $this->state == "revision" ) {
$this->thisRev .= $data;
- } elseif( $this->state == "page" ) {
+ } elseif ( $this->state == "page" ) {
$this->thisPage .= $data;
}
}
+ // have to skip the newline left over from closepagetag line of
+ // end of checkpoint files. nasty hack!!
+ if ($this->checkpointJustWritten) {
+ if ($data[0] == "\n") {
+ $data = substr($data,1);
+ }
+ $this->checkpointJustWritten = false;
+ }
$this->buffer .= htmlspecialchars( $data );
}
function clearOpenElement( $style ) {
- if( $this->openElement ) {
- $this->buffer .= wfElement( $this->openElement[0], $this->openElement[1], $style );
+ if ( $this->openElement ) {
+ $this->buffer .= Xml::element( $this->openElement[0], $this->openElement[1], $style );
$this->openElement = false;
}
}
$dumper = new TextPassDumper( $argv );
-if( true ) {
- $dumper->dump();
+if ( !isset( $options['help'] ) ) {
+ $dumper->dump( true );
} else {
- $dumper->progress( <<<END
+ $dumper->progress( <<<ENDS
This script postprocesses XML dumps from dumpBackup.php to add
page text which was stubbed out (using --stub).
Options:
--stub=<type>:<file> To load a compressed stub dump instead of stdin
--prefetch=<type>:<file> Use a prior dump file as a text source, to save
- pressure on the database.
- (Requires PHP 5.0+ and the XMLReader PECL extension)
- --quiet Don't dump status reports to stderr.
+ pressure on the database.
+ (Requires the XMLReader extension)
+ --maxtime=<minutes> Write out checkpoint file after this many minutes (writing
+ out complete page, closing xml file properly, and opening new one
+ with header). This option requires the checkpointfile option.
+ --checkpointfile=<filenamepattern> Use this string for checkpoint filenames,
+ substituting first pageid written for the first %s (required) and the
+ last pageid written for the second %s if it exists.
+ --quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
- (Default: 100)
+ (Default: 100)
--server=h Force reading from MySQL server h
- --current Base ETA on number of pages in database instead of all revisions
-END
+ --current Base ETA on number of pages in database instead of all revisions
+ --spawn Spawn a subprocess for loading text records
+ --help Display this help message
+ENDS
);
}
-?>
+