<?php
/**
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Script that postprocesses XML dumps from dumpBackup.php to add page text
+ *
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>, 2010 Alexandre Emsenhuber
* http://www.mediawiki.org/
- *
+ *
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
+ * the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
- *
+ *
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
- *
+ *
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
- * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
- * @package MediaWiki
- * @subpackage SpecialPage
+ * @file
+ * @ingroup Maintenance
*/
$originalDir = getcwd();
-$optionsWithArgs = array( 'server', 'pagelist', 'start', 'end' );
-
-require_once( 'commandLine.inc' );
-require_once( 'SpecialExport.php' );
-require_once( 'maintenance/backup.inc' );
+require_once( dirname( __FILE__ ) . '/commandLine.inc' );
+require_once( 'backup.inc' );
+/**
+ * @ingroup Maintenance
+ */
class TextPassDumper extends BackupDumper {
var $prefetch = null;
-
- function dump() {
+ var $input = "php://stdin";
+ var $fetchCount = 0;
+ var $prefetchCount = 0;
+
+ var $failures = 0;
+ var $maxFailures = 5;
+ var $failedTextRetrievals = 0;
+ var $maxConsecutiveFailedTextRetrievals = 200;
+ var $failureTimeout = 5; // Seconds to sleep after db failure
+
+ var $php = "php";
+ var $spawn = false;
+ var $spawnProc = false;
+ var $spawnWrite = false;
+ var $spawnRead = false;
+ var $spawnErr = false;
+
+ function dump( $history, $text = WikiExporter::TEXT ) {
# This shouldn't happen if on console... ;)
header( 'Content-type: text/html; charset=UTF-8' );
-
+
# Notice messages will foul up your XML output even if they're
# relatively harmless.
-// ini_set( 'display_errors', false );
-
- $this->startTime = wfTime();
-
- $this->db =& wfGetDB( DB_SLAVE );
- $this->maxCount = $this->db->selectField( 'page', 'MAX(page_id)', '', 'BackupDumper::dump' );
- $this->startTime = wfTime();
-
- $this->egress = new ExportProgressFilter( $this->sink, $this );
-
- $input = fopen( "php://stdin", "rt" );
- $result = $this->readDump( $input );
-
- if( WikiError::isError( $result ) ) {
- $this->progress( $result->getMessage() );
- }
-
+ if ( ini_get( 'display_errors' ) )
+ ini_set( 'display_errors', 'stderr' );
+
+ $this->initProgress( $history );
+
+ $this->db = $this->backupDb();
+
+ $this->readDump();
+
+ if ( $this->spawnProc ) {
+ $this->closeSpawn();
+ }
+
$this->report( true );
}
-
+
function processOption( $opt, $val, $param ) {
- if( $opt == 'prefetch' ) {
- require_once 'maintenance/backupPrefetch.inc';
- $this->prefetch = new BaseDump( $val );
+ global $IP;
+ $url = $this->processFileOpt( $val, $param );
+
+ switch( $opt ) {
+ case 'prefetch':
+ require_once "$IP/maintenance/backupPrefetch.inc";
+ $this->prefetch = new BaseDump( $url );
+ break;
+ case 'stub':
+ $this->input = $url;
+ break;
+ case 'spawn':
+ $this->spawn = true;
+ if ( $val ) {
+ $this->php = $val;
+ }
+ break;
}
}
-
- function readDump( $input ) {
- $this->buffer = "";
- $this->openElement = false;
- $this->atStart = true;
- $this->state = "";
- $this->lastName = "";
+
+ function processFileOpt( $val, $param ) {
+ $fileURIs = explode(';',$param);
+ foreach ( $fileURIs as $URI ) {
+ switch( $val ) {
+ case "file":
+ $newURI = $URI;
+ break;
+ case "gzip":
+ $newURI = "compress.zlib://$URI";
+ break;
+ case "bzip2":
+ $newURI = "compress.bzip2://$URI";
+ break;
+ case "7zip":
+ $newURI = "mediawiki.compress.7z://$URI";
+ break;
+ default:
+ $newURI = $URI;
+ }
+ $newFileURIs[] = $newURI;
+ }
+ $val = implode( ';', $newFileURIs );
+ return $val;
+ }
+
+ /**
+ * Overridden to include prefetch ratio if enabled.
+ */
+ function showReport() {
+ if ( !$this->prefetch ) {
+ return parent::showReport();
+ }
+
+ if ( $this->reporting ) {
+ $delta = wfTime() - $this->startTime;
+ $now = wfTimestamp( TS_DB );
+ if ( $delta ) {
+ $rate = $this->pageCount / $delta;
+ $revrate = $this->revCount / $delta;
+ $portion = $this->revCount / $this->maxCount;
+ $eta = $this->startTime + $delta / $portion;
+ $etats = wfTimestamp( TS_DB, intval( $eta ) );
+ $fetchrate = 100.0 * $this->prefetchCount / $this->fetchCount;
+ } else {
+ $rate = '-';
+ $revrate = '-';
+ $etats = '-';
+ $fetchrate = '-';
+ }
+ $this->progress( sprintf( "%s: %s %d pages (%0.3f/sec), %d revs (%0.3f/sec), %0.1f%% prefetched, ETA %s [max %d]",
+ $now, wfWikiID(), $this->pageCount, $rate, $this->revCount, $revrate, $fetchrate, $etats, $this->maxCount ) );
+ }
+ }
+
+ function readDump() {
+ $state = '';
+ $lastName = '';
$this->thisPage = 0;
$this->thisRev = 0;
-
- $parser = xml_parser_create( "UTF-8" );
- xml_parser_set_option( $parser, XML_OPTION_CASE_FOLDING, false );
-
- xml_set_element_handler( $parser, array( &$this, 'startElement' ), array( &$this, 'endElement' ) );
- xml_set_character_data_handler( $parser, array( &$this, 'characterData' ) );
-
- $offset = 0; // for context extraction on error reporting
- $bufferSize = 512 * 1024;
- do {
- $chunk = fread( $input, $bufferSize );
- if( !xml_parse( $parser, $chunk, feof( $input ) ) ) {
- wfDebug( "TextDumpPass::readDump encountered XML parsing error\n" );
- return new WikiXmlError( $parser, 'XML import parse failure', $chunk, $offset );
+
+ $reader = new XMLReader();
+ $reader->open( $this->input );
+ $writer = new XMLWriter();
+ $writer->openMemory();
+
+
+ while ( $reader->read() ) {
+ $tag = $reader->name;
+ $type = $reader->nodeType;
+
+ if ( $type == XmlReader::END_ELEMENT ) {
+ $writer->endElement();
+
+ if ( $tag == 'revision' ) {
+ $this->revCount();
+ $this->thisRev = '';
+ } elseif ( $tag == 'page' ) {
+ $this->reportPage();
+ $this->thisPage = '';
+ }
+ } elseif ( $type == XmlReader::ELEMENT ) {
+ $attribs = array();
+ if ( $reader->hasAttributes ) {
+ for ( $i = 0; $reader->moveToAttributeNo( $i ); $i++ ) {
+ $attribs[$reader->name] = $reader->value;
+ }
+ }
+
+ if ( $reader->isEmptyElement && $tag == 'text' && isset( $attribs['id'] ) ) {
+ $writer->startElement( 'text' );
+ $writer->writeAttribute( 'xml:space', 'preserve' );
+ $text = $this->getText( $attribs['id'] );
+ if ( strlen( $text ) ) {
+ $writer->text( $text );
+ }
+ $writer->endElement();
+ } else {
+ $writer->startElement( $tag );
+ foreach( $attribs as $name => $val ) {
+ $writer->writeAttribute( $name, $val );
+ }
+ if ( $reader->isEmptyElement ) {
+ $writer->endElement();
+ }
+ }
+
+ $lastName = $tag;
+ if ( $tag == 'revision' ) {
+ $state = 'revision';
+ } elseif ( $tag == 'page' ) {
+ $state = 'page';
+ }
+ } elseif ( $type == XMLReader::SIGNIFICANT_WHITESPACE || $type = XMLReader::TEXT ) {
+ if ( $lastName == 'id' ) {
+ if ( $state == 'revision' ) {
+ $this->thisRev .= $reader->value;
+ } elseif ( $state == 'page' ) {
+ $this->thisPage .= $reader->value;
+ }
+ }
+ $writer->text( $reader->value );
}
- $offset += strlen( $chunk );
- } while( $chunk !== false && !feof( $input ) );
- xml_parser_free( $parser );
+ $this->sink->write( $writer->outputMemory() );
+ }
}
-
+
function getText( $id ) {
- if( isset( $this->prefetch ) ) {
+ $this->fetchCount++;
+ if ( isset( $this->prefetch ) ) {
$text = $this->prefetch->prefetch( $this->thisPage, $this->thisRev );
- if( !is_null( $text ) )
- return $text;
+ if ( $text !== null ) { // Entry missing from prefetch dump
+ $dbr = wfGetDB( DB_SLAVE );
+ $revID = intval( $this->thisRev );
+ $revLength = $dbr->selectField( 'revision', 'rev_len', array( 'rev_id' => $revID ) );
+ // if length of rev text in file doesn't match length in db, we reload
+ // this avoids carrying forward broken data from previous xml dumps
+ if( strlen( $text ) == $revLength ) {
+ $this->prefetchCount++;
+ return $text;
+ }
+ }
}
- $id = intval( $id );
- $row = $this->db->selectRow( 'text',
- array( 'old_text', 'old_flags' ),
- array( 'old_id' => $id ),
- 'TextPassDumper::getText' );
- return UtfNormal::cleanUp( strval( Revision::getRevisionText( $row ) ) );
+ return $this->doGetText( $id );
}
-
- function startElement( $parser, $name, $attribs ) {
- $this->clearOpenElement( null );
- $this->lastName = $name;
-
- if( $name == 'revision' ) {
- $this->state = $name;
- $this->egress->writeOpenPage( null, $this->buffer );
- $this->buffer = "";
- } elseif( $name == 'page' ) {
- $this->state = $name;
- if( $this->atStart ) {
- $this->egress->writeOpenStream( $this->buffer );
- $this->buffer = "";
- $this->atStart = false;
+
+ private function doGetText( $id ) {
+ $id = intval( $id );
+ $this->failures = 0;
+ $ex = new MWException( "Graceful storage failure" );
+ while (true) {
+ if ( $this->spawn ) {
+ if ($this->failures) {
+ // we don't know why it failed, could be the child process
+ // borked, could be db entry busted, could be db server out to lunch,
+ // so cover all bases
+ $this->closeSpawn();
+ $this->openSpawn();
+ }
+ $text = $this->getTextSpawned( $id );
+ } else {
+ $text = $this->getTextDbSafe( $id );
+ }
+ if ( $text === false ) {
+ $this->failures++;
+ if ( $this->failures > $this->maxFailures) {
+ $this->progress( "Failed to retrieve revision text for text id ".
+ "$id after $this->maxFailures tries, giving up" );
+ // were there so many bad retrievals in a row we want to bail?
+ // at some point we have to declare the dump irretrievably broken
+ $this->failedTextRetrievals++;
+ if ($this->failedTextRetrievals > $this->maxConsecutiveFailedTextRetrievals) {
+ throw $ex;
+ }
+ else {
+ // would be nice to return something better to the caller someday,
+ // log what we know about the failure and about the revision
+ return("");
+ }
+ } else {
+ $this->progress( "Error $this->failures " .
+ "of allowed $this->maxFailures retrieving revision text for text id $id! " .
+ "Pausing $this->failureTimeout seconds before retry..." );
+ sleep( $this->failureTimeout );
+ }
+ } else {
+ $this->failedTextRetrievals= 0;
+ return( $text );
}
}
-
- if( $name == "text" && isset( $attribs['id'] ) ) {
- $text = $this->getText( $attribs['id'] );
- $this->openElement = array( $name, array( 'xml:space' => 'preserve' ) );
- if( strlen( $text ) > 0 ) {
- $this->characterData( $parser, $text );
+
+ }
+
+ /**
+ * Fetch a text revision from the database, retrying in case of failure.
+ * This may survive some transitory errors by reconnecting, but
+ * may not survive a long-term server outage.
+ */
+ private function getTextDbSafe( $id ) {
+ while ( true ) {
+ try {
+ $text = $this->getTextDb( $id );
+ } catch ( DBQueryError $ex ) {
+ $text = false;
}
- } else {
- $this->openElement = array( $name, $attribs );
+ return $text;
}
}
-
- function endElement( $parser, $name ) {
- if( $this->openElement ) {
- $this->clearOpenElement( "" );
- } else {
- $this->buffer .= "</$name>";
- }
-
- if( $name == 'revision' ) {
- $this->egress->writeRevision( null, $this->buffer );
- $this->buffer = "";
- } elseif( $name == 'page' ) {
- $this->egress->writeClosePage( $this->buffer );
- $this->buffer = "";
- } elseif( $name == 'mediawiki' ) {
- $this->egress->writeCloseStream( $this->buffer );
- $this->buffer = "";
+
+ /**
+ * May throw a database error if, say, the server dies during query.
+ */
+ private function getTextDb( $id ) {
+ global $wgContLang;
+ $row = $this->db->selectRow( 'text',
+ array( 'old_text', 'old_flags' ),
+ array( 'old_id' => $id ),
+ __METHOD__ );
+ $text = Revision::getRevisionText( $row );
+ if ( $text === false ) {
+ return false;
}
+ $stripped = str_replace( "\r", "", $text );
+ $normalized = $wgContLang->normalize( $stripped );
+ return $normalized;
}
-
- function characterData( $parser, $data ) {
- $this->clearOpenElement( null );
- if( $this->lastName == "id" ) {
- if( $this->state == "revision" ) {
- $this->thisRev = intval( $data );
- } elseif( $this->state == "page" ) {
- $this->thisPage = intval( $data );
- }
+
+ private function getTextSpawned( $id ) {
+ wfSuppressWarnings();
+ if ( !$this->spawnProc ) {
+ // First time?
+ $this->openSpawn();
}
- $this->buffer .= htmlspecialchars( $data );
+ $text = $this->getTextSpawnedOnce( $id );
+ wfRestoreWarnings();
+ return $text;
}
-
- function clearOpenElement( $style ) {
- if( $this->openElement ) {
- $this->buffer .= wfElement( $this->openElement[0], $this->openElement[1], $style );
- $this->openElement = false;
+
+ function openSpawn() {
+ global $IP;
+
+ $cmd = implode( " ",
+ array_map( 'wfEscapeShellArg',
+ array(
+ $this->php,
+ "$IP/maintenance/fetchText.php",
+ '--wiki', wfWikiID() ) ) );
+ $spec = array(
+ 0 => array( "pipe", "r" ),
+ 1 => array( "pipe", "w" ),
+ 2 => array( "file", "/dev/null", "a" ) );
+ $pipes = array();
+
+ $this->progress( "Spawning database subprocess: $cmd" );
+ $this->spawnProc = proc_open( $cmd, $spec, $pipes );
+ if ( !$this->spawnProc ) {
+ // shit
+ $this->progress( "Subprocess spawn failed." );
+ return false;
}
+ list(
+ $this->spawnWrite, // -> stdin
+ $this->spawnRead, // <- stdout
+ ) = $pipes;
+
+ return true;
+ }
+
+ private function closeSpawn() {
+ wfSuppressWarnings();
+ if ( $this->spawnRead )
+ fclose( $this->spawnRead );
+ $this->spawnRead = false;
+ if ( $this->spawnWrite )
+ fclose( $this->spawnWrite );
+ $this->spawnWrite = false;
+ if ( $this->spawnErr )
+ fclose( $this->spawnErr );
+ $this->spawnErr = false;
+ if ( $this->spawnProc )
+ pclose( $this->spawnProc );
+ $this->spawnProc = false;
+ wfRestoreWarnings();
+ }
+
+ private function getTextSpawnedOnce( $id ) {
+ global $wgContLang;
+
+ $ok = fwrite( $this->spawnWrite, "$id\n" );
+ // $this->progress( ">> $id" );
+ if ( !$ok ) return false;
+
+ $ok = fflush( $this->spawnWrite );
+ // $this->progress( ">> [flush]" );
+ if ( !$ok ) return false;
+
+ // check that the text id they are sending is the one we asked for
+ // this avoids out of sync revision text errors we have encountered in the past
+ $newId = fgets( $this->spawnRead );
+ if ( $newId === false ) {
+ return false;
+ }
+ if ( $id != intval( $newId ) ) {
+ return false;
+ }
+
+ $len = fgets( $this->spawnRead );
+ // $this->progress( "<< " . trim( $len ) );
+ if ( $len === false ) return false;
+
+ $nbytes = intval( $len );
+ // actual error, not zero-length text
+ if ($nbytes < 0 ) return false;
+
+ $text = "";
+
+ // Subprocess may not send everything at once, we have to loop.
+ while ( $nbytes > strlen( $text ) ) {
+ $buffer = fread( $this->spawnRead, $nbytes - strlen( $text ) );
+ if ( $buffer === false ) break;
+ $text .= $buffer;
+ }
+
+ $gotbytes = strlen( $text );
+ if ( $gotbytes != $nbytes ) {
+ $this->progress( "Expected $nbytes bytes from database subprocess, got $gotbytes " );
+ return false;
+ }
+
+ // Do normalization in the dump thread...
+ $stripped = str_replace( "\r", "", $text );
+ $normalized = $wgContLang->normalize( $stripped );
+ return $normalized;
}
}
$dumper = new TextPassDumper( $argv );
-if( true ) {
- $dumper->dump();
+if ( !isset( $options['help'] ) ) {
+ $dumper->dump( WikiExporter::FULL );
} else {
- $dumper->progress( <<<END
+ $dumper->progress( <<<ENDS
This script postprocesses XML dumps from dumpBackup.php to add
page text which was stubbed out (using --stub).
Usage: php dumpTextPass.php [<options>]
Options:
- --prefetch <file> Use a prior dump file as a text source where possible.
- (Requires PHP 5.0+ and the XMLReader PECL extension)
- --quiet Don't dump status reports to stderr.
+ --stub=<type>:<file> To load a compressed stub dump instead of stdin
+ --prefetch=<type>:<file> Use a prior dump file as a text source, to save
+ pressure on the database.
+ --quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
- (Default: 100)
-END
+ (Default: 100)
+ --server=h Force reading from MySQL server h
+ --output=<type>:<file> Write to a file instead of stdout
+ <type>s: file, gzip, bzip2, 7zip
+ --current Base ETA on number of pages in database instead of all revisions
+ --spawn Spawn a subprocess for loading text records
+ --help Display this help message
+ENDS
);
}
-?>
+