3 * BackupDumper that postprocesses XML dumps from dumpBackup.php to add page text
5 * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
6 * https://www.mediawiki.org/
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
13 * This program is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
21 * http://www.gnu.org/copyleft/gpl.html
25 * @ingroup Maintenance
28 require_once __DIR__
. '/BackupDumper.php';
29 require_once __DIR__
. '/SevenZipStream.php';
30 require_once __DIR__
. '/../../includes/export/WikiExporter.php';
32 use MediaWiki\MediaWikiServices
;
33 use MediaWiki\Shell\Shell
;
34 use MediaWiki\Storage\BlobAccessException
;
35 use MediaWiki\Storage\SqlBlobStore
;
36 use Wikimedia\Rdbms\IMaintainableDatabase
;
39 * @ingroup Maintenance
41 class TextPassDumper
extends BackupDumper
{
43 public $prefetch = null;
44 /** @var string|bool */
46 /** @var string|bool */
49 // when we spend more than maxTimeAllowed seconds on this run, we continue
50 // processing until we write out the next complete page, then save output file(s),
51 // rename it/them and open new one(s)
52 public $maxTimeAllowed = 0; // 0 = no limit
54 protected $input = "php://stdin";
55 protected $history = WikiExporter
::FULL
;
56 protected $fetchCount = 0;
57 protected $prefetchCount = 0;
58 protected $prefetchCountLast = 0;
59 protected $fetchCountLast = 0;
61 protected $maxFailures = 5;
62 protected $maxConsecutiveFailedTextRetrievals = 200;
63 protected $failureTimeout = 5; // Seconds to sleep after db failure
65 protected $bufferSize = 524288; // In bytes. Maximum size to read from the stub in on go.
67 protected $php = "php";
68 protected $spawn = false;
73 protected $spawnProc = false;
78 protected $spawnWrite = false;
83 protected $spawnRead = false;
88 protected $spawnErr = false;
91 * @var bool|XmlDumpWriter
93 protected $xmlwriterobj = false;
95 protected $timeExceeded = false;
96 protected $firstPageWritten = false;
97 protected $lastPageWritten = false;
98 protected $checkpointJustWritten = false;
99 protected $checkpointFiles = [];
102 * @var IMaintainableDatabase
107 * @param array|null $args For backward compatibility
109 function __construct( $args = null ) {
110 parent
::__construct();
112 $this->addDescription( <<<TEXT
113 This script postprocesses XML dumps from dumpBackup.php to add
114 page text which was stubbed out (using --stub).
116 XML input is accepted on stdin.
117 XML output is sent to stdout; progress reports are sent to stderr.
120 $this->stderr
= fopen( "php://stderr", "wt" );
122 $this->addOption( 'stub', 'To load a compressed stub dump instead of stdin. ' .
123 'Specify as --stub=<type>:<file>.', false, true );
124 $this->addOption( 'prefetch', 'Use a prior dump file as a text source, to savepressure on the ' .
125 'database. (Requires the XMLReader extension). Specify as --prefetch=<type>:<file>',
127 $this->addOption( 'maxtime', 'Write out checkpoint file after this many minutes (writing' .
128 'out complete page, closing xml file properly, and opening new one' .
129 'with header). This option requires the checkpointfile option.', false, true );
130 $this->addOption( 'checkpointfile', 'Use this string for checkpoint filenames,substituting ' .
131 'first pageid written for the first %s (required) and the last pageid written for the ' .
132 'second %s if it exists.', false, true, false, true ); // This can be specified multiple times
133 $this->addOption( 'quiet', 'Don\'t dump status reports to stderr.' );
134 $this->addOption( 'full', 'Dump all revisions of every page' );
135 $this->addOption( 'current', 'Base ETA on number of pages in database instead of all revisions' );
136 $this->addOption( 'spawn', 'Spawn a subprocess for loading text records, optionally specify ' .
137 'php[,mwscript] paths' );
138 $this->addOption( 'buffersize', 'Buffer size in bytes to use for reading the stub. ' .
139 '(Default: 512KB, Minimum: 4KB)', false, true );
142 $this->loadWithArgv( $args );
143 $this->processOptions();
148 * @return SqlBlobStore
150 private function getBlobStore() {
151 return MediaWikiServices
::getInstance()->getBlobStore();
155 $this->processOptions();
159 function processOptions() {
160 parent
::processOptions();
162 if ( $this->hasOption( 'buffersize' ) ) {
163 $this->bufferSize
= max( intval( $this->getOption( 'buffersize' ) ), 4 * 1024 );
166 if ( $this->hasOption( 'prefetch' ) ) {
167 $url = $this->processFileOpt( $this->getOption( 'prefetch' ) );
168 $this->prefetch
= new BaseDump( $url );
171 if ( $this->hasOption( 'stub' ) ) {
172 $this->input
= $this->processFileOpt( $this->getOption( 'stub' ) );
175 if ( $this->hasOption( 'maxtime' ) ) {
176 $this->maxTimeAllowed
= intval( $this->getOption( 'maxtime' ) ) * 60;
179 if ( $this->hasOption( 'checkpointfile' ) ) {
180 $this->checkpointFiles
= $this->getOption( 'checkpointfile' );
183 if ( $this->hasOption( 'current' ) ) {
184 $this->history
= WikiExporter
::CURRENT
;
187 if ( $this->hasOption( 'full' ) ) {
188 $this->history
= WikiExporter
::FULL
;
191 if ( $this->hasOption( 'spawn' ) ) {
193 $val = $this->getOption( 'spawn' );
195 $this->php
= explode( ',', $val, 2 );
201 * Drop the database connection $this->db and try to get a new one.
203 * This function tries to get a /different/ connection if this is
204 * possible. Hence, (if this is possible) it switches to a different
205 * failover upon each call.
207 * This function resets $this->lb and closes all connections on it.
209 * @throws MWException
211 function rotateDb() {
212 // Cleaning up old connections
213 if ( isset( $this->lb
) ) {
214 $this->lb
->closeAll();
218 if ( $this->forcedDb
!== null ) {
219 $this->db
= $this->forcedDb
;
224 if ( isset( $this->db
) && $this->db
->isOpen() ) {
225 throw new MWException( 'DB is set and has not been closed by the Load Balancer' );
230 // Trying to set up new connection.
231 // We do /not/ retry upon failure, but delegate to encapsulating logic, to avoid
232 // individually retrying at different layers of code.
235 $lbFactory = MediaWikiServices
::getInstance()->getDBLoadBalancerFactory();
236 $this->lb
= $lbFactory->newMainLB();
237 } catch ( Exception
$e ) {
238 throw new MWException( __METHOD__
239 . " rotating DB failed to obtain new load balancer (" . $e->getMessage() . ")" );
243 $this->db
= $this->lb
->getMaintenanceConnectionRef( DB_REPLICA
, 'dump' );
244 } catch ( Exception
$e ) {
245 throw new MWException( __METHOD__
246 . " rotating DB failed to obtain new database (" . $e->getMessage() . ")" );
250 function initProgress( $history = WikiExporter
::FULL
) {
251 parent
::initProgress();
252 $this->timeOfCheckpoint
= $this->startTime
;
255 function dump( $history, $text = WikiExporter
::TEXT
) {
256 // Notice messages will foul up your XML output even if they're
257 // relatively harmless.
258 if ( ini_get( 'display_errors' ) ) {
259 ini_set( 'display_errors', 'stderr' );
262 $this->initProgress( $this->history
);
264 // We are trying to get an initial database connection to avoid that the
265 // first try of this request's first call to getText fails. However, if
266 // obtaining a good DB connection fails it's not a serious issue, as
267 // getText does retry upon failure and can start without having a working
271 } catch ( Exception
$e ) {
272 // We do not even count this as failure. Just let eventual
274 $this->progress( "Getting initial DB connection failed (" .
275 $e->getMessage() . ")" );
278 $this->egress
= new ExportProgressFilter( $this->sink
, $this );
280 // it would be nice to do it in the constructor, oh well. need egress set
281 $this->finalOptionCheck();
283 // we only want this so we know how to close a stream :-P
284 $this->xmlwriterobj
= new XmlDumpWriter( XmlDumpWriter
::WRITE_CONTENT
, $this->schemaVersion
);
286 $input = fopen( $this->input
, "rt" );
287 $this->readDump( $input );
289 if ( $this->spawnProc
) {
293 $this->report( true );
296 function processFileOpt( $opt ) {
297 $split = explode( ':', $opt, 2 );
300 if ( count( $split ) === 2 ) {
303 $fileURIs = explode( ';', $param );
304 foreach ( $fileURIs as $URI ) {
310 $newURI = "compress.zlib://$URI";
313 $newURI = "compress.bzip2://$URI";
316 $newURI = "mediawiki.compress.7z://$URI";
321 $newFileURIs[] = $newURI;
323 $val = implode( ';', $newFileURIs );
329 * Overridden to include prefetch ratio if enabled.
331 function showReport() {
332 if ( !$this->prefetch
) {
333 parent
::showReport();
338 if ( $this->reporting
) {
339 $now = wfTimestamp( TS_DB
);
340 $nowts = microtime( true );
341 $deltaAll = $nowts - $this->startTime
;
342 $deltaPart = $nowts - $this->lastTime
;
343 $this->pageCountPart
= $this->pageCount
- $this->pageCountLast
;
344 $this->revCountPart
= $this->revCount
- $this->revCountLast
;
347 $portion = $this->revCount
/ $this->maxCount
;
348 $eta = $this->startTime +
$deltaAll / $portion;
349 $etats = wfTimestamp( TS_DB
, intval( $eta ) );
350 if ( $this->fetchCount
) {
351 $fetchRate = 100.0 * $this->prefetchCount
/ $this->fetchCount
;
355 $pageRate = $this->pageCount
/ $deltaAll;
356 $revRate = $this->revCount
/ $deltaAll;
364 if ( $this->fetchCountLast
) {
365 $fetchRatePart = 100.0 * $this->prefetchCountLast
/ $this->fetchCountLast
;
367 $fetchRatePart = '-';
369 $pageRatePart = $this->pageCountPart
/ $deltaPart;
370 $revRatePart = $this->revCountPart
/ $deltaPart;
372 $fetchRatePart = '-';
377 $dbDomain = WikiMap
::getCurrentWikiDbDomain()->getId();
378 $this->progress( sprintf(
379 "%s: %s (ID %d) %d pages (%0.1f|%0.1f/sec all|curr), "
380 . "%d revs (%0.1f|%0.1f/sec all|curr), %0.1f%%|%0.1f%% "
381 . "prefetched (all|curr), ETA %s [max %d]",
382 $now, $dbDomain, $this->ID
, $this->pageCount
, $pageRate,
383 $pageRatePart, $this->revCount
, $revRate, $revRatePart,
384 $fetchRate, $fetchRatePart, $etats, $this->maxCount
386 $this->lastTime
= $nowts;
387 $this->revCountLast
= $this->revCount
;
388 $this->prefetchCountLast
= $this->prefetchCount
;
389 $this->fetchCountLast
= $this->fetchCount
;
393 function setTimeExceeded() {
394 $this->timeExceeded
= true;
397 function checkIfTimeExceeded() {
398 if ( $this->maxTimeAllowed
399 && ( $this->lastTime
- $this->timeOfCheckpoint
> $this->maxTimeAllowed
)
407 function finalOptionCheck() {
408 if ( ( $this->checkpointFiles
&& !$this->maxTimeAllowed
)
409 ||
( $this->maxTimeAllowed
&& !$this->checkpointFiles
)
411 throw new MWException( "Options checkpointfile and maxtime must be specified together.\n" );
413 foreach ( $this->checkpointFiles
as $checkpointFile ) {
414 $count = substr_count( $checkpointFile, "%s" );
416 throw new MWException( "Option checkpointfile must contain two '%s' "
417 . "for substitution of first and last pageids, count is $count instead, "
418 . "file is $checkpointFile.\n" );
422 if ( $this->checkpointFiles
) {
423 $filenameList = (array)$this->egress
->getFilenames();
424 if ( count( $filenameList ) != count( $this->checkpointFiles
) ) {
425 throw new MWException( "One checkpointfile must be specified "
426 . "for each output option, if maxtime is used.\n" );
432 * @throws MWException Failure to parse XML input
433 * @param string $input
436 function readDump( $input ) {
438 $this->openElement
= false;
439 $this->atStart
= true;
441 $this->lastName
= "";
444 $this->thisRevModel
= null;
445 $this->thisRevFormat
= null;
447 $parser = xml_parser_create( "UTF-8" );
448 xml_parser_set_option( $parser, XML_OPTION_CASE_FOLDING
, false );
450 xml_set_element_handler(
452 [ $this, 'startElement' ],
453 [ $this, 'endElement' ]
455 xml_set_character_data_handler( $parser, [ $this, 'characterData' ] );
457 $offset = 0; // for context extraction on error reporting
459 if ( $this->checkIfTimeExceeded() ) {
460 $this->setTimeExceeded();
462 $chunk = fread( $input, $this->bufferSize
);
463 if ( !xml_parse( $parser, $chunk, feof( $input ) ) ) {
464 wfDebug( "TextDumpPass::readDump encountered XML parsing error\n" );
466 $byte = xml_get_current_byte_index( $parser );
467 $msg = wfMessage( 'xml-error-string',
468 'XML import parse failure',
469 xml_get_current_line_number( $parser ),
470 xml_get_current_column_number( $parser ),
471 $byte . ( is_null( $chunk ) ?
null : ( '; "' . substr( $chunk, $byte - $offset, 16 ) . '"' ) ),
472 xml_error_string( xml_get_error_code( $parser ) ) )->escaped();
474 xml_parser_free( $parser );
476 throw new MWException( $msg );
478 $offset +
= strlen( $chunk );
479 } while ( $chunk !== false && !feof( $input ) );
480 if ( $this->maxTimeAllowed
) {
481 $filenameList = (array)$this->egress
->getFilenames();
482 // we wrote some stuff after last checkpoint that needs renamed
483 if ( file_exists( $filenameList[0] ) ) {
485 # we might have just written the header and footer and had no
486 # pages or revisions written... perhaps they were all deleted
487 # there's no pageID 0 so we use that. the caller is responsible
488 # for deciding what to do with a file containing only the
489 # siteinfo information and the mw tags.
490 if ( !$this->firstPageWritten
) {
491 $firstPageID = str_pad( 0, 9, "0", STR_PAD_LEFT
);
492 $lastPageID = str_pad( 0, 9, "0", STR_PAD_LEFT
);
494 $firstPageID = str_pad( $this->firstPageWritten
, 9, "0", STR_PAD_LEFT
);
495 $lastPageID = str_pad( $this->lastPageWritten
, 9, "0", STR_PAD_LEFT
);
498 $filenameCount = count( $filenameList );
499 for ( $i = 0; $i < $filenameCount; $i++
) {
500 $checkpointNameFilledIn = sprintf( $this->checkpointFiles
[$i], $firstPageID, $lastPageID );
501 $fileinfo = pathinfo( $filenameList[$i] );
502 $newFilenames[] = $fileinfo['dirname'] . '/' . $checkpointNameFilledIn;
504 $this->egress
->closeAndRename( $newFilenames );
507 xml_parser_free( $parser );
513 * Applies applicable export transformations to $text.
515 * @param string $text
516 * @param string $model
517 * @param string|null $format
521 private function exportTransform( $text, $model, $format = null ) {
523 $handler = ContentHandler
::getForModelID( $model );
524 $text = $handler->exportTransform( $text, $format );
526 catch ( MWException
$ex ) {
528 "Unable to apply export transformation for content model '$model': " .
537 * Tries to load revision text.
538 * Export transformations are applied if the content model is given or can be
539 * determined from the database.
541 * Upon errors, retries (Up to $this->maxFailures tries each call).
542 * If still no good revision could be found even after this retrying, "" is returned.
543 * If no good revision text could be returned for
544 * $this->maxConsecutiveFailedTextRetrievals consecutive calls to getText, MWException
547 * @param int|string $id Content address, or text row ID.
548 * @param string|bool|null $model The content model used to determine
549 * applicable export transformations.
550 * If $model is null, it will be determined from the database.
551 * @param string|null $format The content format used when applying export transformations.
553 * @throws MWException
554 * @return string The revision text for $id, or ""
556 function getText( $id, $model = null, $format = null ) {
557 global $wgContentHandlerUseDB;
559 $prefetchNotTried = true; // Whether or not we already tried to get the text via prefetch.
560 $text = false; // The candidate for a good text. false if no proper value.
561 $failures = 0; // The number of times, this invocation of getText already failed.
563 // The number of times getText failed without yielding a good text in between.
564 static $consecutiveFailedTextRetrievals = 0;
568 // To allow to simply return on success and do not have to worry about book keeping,
569 // we assume, this fetch works (possible after some retries). Nevertheless, we koop
570 // the old value, so we can restore it, if problems occur (See after the while loop).
571 $oldConsecutiveFailedTextRetrievals = $consecutiveFailedTextRetrievals;
572 $consecutiveFailedTextRetrievals = 0;
574 if ( $model === null && $wgContentHandlerUseDB ) {
575 // TODO: MCR: use content table
576 $row = $this->db
->selectRow(
578 [ 'rev_content_model', 'rev_content_format' ],
579 [ 'rev_id' => $this->thisRev
],
584 $model = $row->rev_content_model
;
585 $format = $row->rev_content_format
;
589 if ( $model === null ||
$model === '' ) {
593 while ( $failures < $this->maxFailures
) {
594 // As soon as we found a good text for the $id, we will return immediately.
595 // Hence, if we make it past the try catch block, we know that we did not
599 // Step 1: Get some text (or reuse from previous iteratuon if checking
600 // for plausibility failed)
602 // Trying to get prefetch, if it has not been tried before
603 if ( $text === false && isset( $this->prefetch
) && $prefetchNotTried ) {
604 $prefetchNotTried = false;
605 $tryIsPrefetch = true;
606 $text = $this->prefetch
->prefetch( (int)$this->thisPage
, (int)$this->thisRev
);
608 if ( $text === null ) {
612 if ( is_string( $text ) && $model !== false ) {
613 // Apply export transformation to text coming from an old dump.
614 // The purpose of this transformation is to convert up from legacy
615 // formats, which may still be used in the older dump that is used
616 // for pre-fetching. Applying the transformation again should not
617 // interfere with content that is already in the correct form.
618 $text = $this->exportTransform( $text, $model, $format );
622 if ( $text === false ) {
623 // Fallback to asking the database
624 $tryIsPrefetch = false;
625 if ( $this->spawn
) {
626 $text = $this->getTextSpawned( $id );
628 $text = $this->getTextDb( $id );
631 if ( $text !== false && $model !== false ) {
632 // Apply export transformation to text coming from the database.
633 // Prefetched text should already have transformations applied.
634 $text = $this->exportTransform( $text, $model, $format );
637 // No more checks for texts from DB for now.
638 // If we received something that is not false,
639 // We treat it as good text, regardless of whether it actually is or is not
640 if ( $text !== false ) {
645 if ( $text === false ) {
646 throw new MWException( "Generic error while obtaining text for id " . $id );
649 // We received a good candidate for the text of $id via some method
651 // Step 2: Checking for plausibility and return the text if it is
653 $revID = intval( $this->thisRev
);
654 if ( !isset( $this->db
) ) {
655 throw new MWException( "No database available" );
658 if ( $model !== CONTENT_MODEL_WIKITEXT
) {
659 $revLength = strlen( $text );
661 $revLength = $this->db
->selectField( 'revision', 'rev_len', [ 'rev_id' => $revID ] );
664 if ( strlen( $text ) == $revLength ) {
665 if ( $tryIsPrefetch ) {
666 $this->prefetchCount++
;
673 throw new MWException( "Received text is unplausible for id " . $id );
674 } catch ( Exception
$e ) {
675 $msg = "getting/checking text " . $id . " failed (" . $e->getMessage() . ")";
676 if ( $failures +
1 < $this->maxFailures
) {
677 $msg .= " (Will retry " . ( $this->maxFailures
- $failures - 1 ) . " more times)";
679 $this->progress( $msg );
682 // Something went wrong; we did not a text that was plausible :(
685 // A failure in a prefetch hit does not warrant resetting db connection etc.
686 if ( !$tryIsPrefetch ) {
687 // After backing off for some time, we try to reboot the whole process as
688 // much as possible to not carry over failures from one part to the other
690 sleep( $this->failureTimeout
);
693 if ( $this->spawn
) {
697 } catch ( Exception
$e ) {
698 $this->progress( "Rebooting getText infrastructure failed (" . $e->getMessage() . ")" .
699 " Trying to continue anyways" );
704 // Retirieving a good text for $id failed (at least) maxFailures times.
705 // We abort for this $id.
707 // Restoring the consecutive failures, and maybe aborting, if the dump
709 $consecutiveFailedTextRetrievals = $oldConsecutiveFailedTextRetrievals +
1;
710 if ( $consecutiveFailedTextRetrievals > $this->maxConsecutiveFailedTextRetrievals
) {
711 throw new MWException( "Graceful storage failure" );
718 * Loads the serialized content from storage.
720 * @param int|string $id Content address, or text row ID.
721 * @return bool|string
723 private function getTextDb( $id ) {
724 $store = $this->getBlobStore();
725 $address = ( is_int( $id ) ||
strpos( $id, ':' ) === false )
726 ? SqlBlobStore
::makeAddressFromTextId( (int)$id )
730 $text = $store->getBlob( $address );
732 $stripped = str_replace( "\r", "", $text );
733 $normalized = MediaWikiServices
::getInstance()->getContentLanguage()
734 ->normalize( $stripped );
737 } catch ( BlobAccessException
$ex ) {
738 // XXX: log a warning?
744 * @param int|string $address Content address, or text row ID.
745 * @return bool|string
747 private function getTextSpawned( $address ) {
748 Wikimedia\
suppressWarnings();
749 if ( !$this->spawnProc
) {
753 $text = $this->getTextSpawnedOnce( $address );
754 Wikimedia\restoreWarnings
();
759 function openSpawn() {
762 $wiki = WikiMap
::getWikiIdFromDbDomain( WikiMap
::getCurrentWikiDbDomain() );
763 if ( count( $this->php
) == 2 ) {
764 $mwscriptpath = $this->php
[1];
766 $mwscriptpath = "$IP/../multiversion/MWScript.php";
768 if ( file_exists( $mwscriptpath ) ) {
770 array_map( [ Shell
::class, 'escape' ],
775 '--wiki', $wiki ] ) );
778 array_map( [ Shell
::class, 'escape' ],
781 "$IP/maintenance/fetchText.php",
782 '--wiki', $wiki ] ) );
785 0 => [ "pipe", "r" ],
786 1 => [ "pipe", "w" ],
787 2 => [ "file", "/dev/null", "a" ] ];
790 $this->progress( "Spawning database subprocess: $cmd" );
791 $this->spawnProc
= proc_open( $cmd, $spec, $pipes );
792 if ( !$this->spawnProc
) {
793 $this->progress( "Subprocess spawn failed." );
798 $this->spawnWrite
, // -> stdin
799 $this->spawnRead
, // <- stdout
805 private function closeSpawn() {
806 Wikimedia\
suppressWarnings();
807 if ( $this->spawnRead
) {
808 fclose( $this->spawnRead
);
810 $this->spawnRead
= false;
811 if ( $this->spawnWrite
) {
812 fclose( $this->spawnWrite
);
814 $this->spawnWrite
= false;
815 if ( $this->spawnErr
) {
816 fclose( $this->spawnErr
);
818 $this->spawnErr
= false;
819 if ( $this->spawnProc
) {
820 pclose( $this->spawnProc
);
822 $this->spawnProc
= false;
823 Wikimedia\restoreWarnings
();
827 * @param int|string $address Content address, or text row ID.
828 * @return bool|string
830 private function getTextSpawnedOnce( $address ) {
831 if ( is_int( $address ) ||
intval( $address ) ) {
832 $address = SqlBlobStore
::makeAddressFromTextId( (int)$address );
835 $ok = fwrite( $this->spawnWrite
, "$address\n" );
836 // $this->progress( ">> $id" );
841 $ok = fflush( $this->spawnWrite
);
842 // $this->progress( ">> [flush]" );
847 // check that the text address they are sending is the one we asked for
848 // this avoids out of sync revision text errors we have encountered in the past
849 $newAddress = fgets( $this->spawnRead
);
850 if ( $newAddress === false ) {
853 $newAddress = trim( $newAddress );
854 if ( strpos( $newAddress, ':' ) === false ) {
855 $newAddress = SqlBlobStore
::makeAddressFromTextId( intval( $newAddress ) );
858 if ( $newAddress !== $address ) {
862 $len = fgets( $this->spawnRead
);
863 // $this->progress( "<< " . trim( $len ) );
864 if ( $len === false ) {
868 $nbytes = intval( $len );
869 // actual error, not zero-length text
876 // Subprocess may not send everything at once, we have to loop.
877 while ( $nbytes > strlen( $text ) ) {
878 $buffer = fread( $this->spawnRead
, $nbytes - strlen( $text ) );
879 if ( $buffer === false ) {
885 $gotbytes = strlen( $text );
886 if ( $gotbytes != $nbytes ) {
887 $this->progress( "Expected $nbytes bytes from database subprocess, got $gotbytes " );
892 // Do normalization in the dump thread...
893 $stripped = str_replace( "\r", "", $text );
894 $normalized = MediaWikiServices
::getInstance()->getContentLanguage()->
895 normalize( $stripped );
900 function startElement( $parser, $name, $attribs ) {
901 $this->checkpointJustWritten
= false;
903 $this->clearOpenElement( null );
904 $this->lastName
= $name;
906 if ( $name == 'revision' ) {
907 $this->state
= $name;
908 $this->egress
->writeOpenPage( null, $this->buffer
);
910 } elseif ( $name == 'page' ) {
911 $this->state
= $name;
912 if ( $this->atStart
) {
913 $this->egress
->writeOpenStream( $this->buffer
);
915 $this->atStart
= false;
919 if ( $name == "text" && isset( $attribs['id'] ) ) {
920 $id = $attribs['id'];
921 $model = trim( $this->thisRevModel
);
922 $format = trim( $this->thisRevFormat
);
924 $model = $model === '' ?
null : $model;
925 $format = $format === '' ?
null : $format;
927 $text = $this->getText( $id, $model, $format );
928 $this->openElement
= [ $name, [ 'xml:space' => 'preserve' ] ];
929 if ( strlen( $text ) > 0 ) {
930 $this->characterData( $parser, $text );
933 $this->openElement
= [ $name, $attribs ];
937 function endElement( $parser, $name ) {
938 $this->checkpointJustWritten
= false;
940 if ( $this->openElement
) {
941 $this->clearOpenElement( "" );
943 $this->buffer
.= "</$name>";
946 if ( $name == 'revision' ) {
947 $this->egress
->writeRevision( null, $this->buffer
);
950 $this->thisRevModel
= null;
951 $this->thisRevFormat
= null;
952 } elseif ( $name == 'page' ) {
953 if ( !$this->firstPageWritten
) {
954 $this->firstPageWritten
= trim( $this->thisPage
);
956 $this->lastPageWritten
= trim( $this->thisPage
);
957 if ( $this->timeExceeded
) {
958 $this->egress
->writeClosePage( $this->buffer
);
959 // nasty hack, we can't just write the chardata after the
960 // page tag, it will include leading blanks from the next line
961 $this->egress
->sink
->write( "\n" );
963 $this->buffer
= $this->xmlwriterobj
->closeStream();
964 $this->egress
->writeCloseStream( $this->buffer
);
967 $this->thisPage
= "";
968 // this could be more than one file if we had more than one output arg
970 $filenameList = (array)$this->egress
->getFilenames();
972 $firstPageID = str_pad( $this->firstPageWritten
, 9, "0", STR_PAD_LEFT
);
973 $lastPageID = str_pad( $this->lastPageWritten
, 9, "0", STR_PAD_LEFT
);
974 $filenamesCount = count( $filenameList );
975 for ( $i = 0; $i < $filenamesCount; $i++
) {
976 $checkpointNameFilledIn = sprintf( $this->checkpointFiles
[$i], $firstPageID, $lastPageID );
977 $fileinfo = pathinfo( $filenameList[$i] );
978 $newFilenames[] = $fileinfo['dirname'] . '/' . $checkpointNameFilledIn;
980 $this->egress
->closeRenameAndReopen( $newFilenames );
981 $this->buffer
= $this->xmlwriterobj
->openStream();
982 $this->timeExceeded
= false;
983 $this->timeOfCheckpoint
= $this->lastTime
;
984 $this->firstPageWritten
= false;
985 $this->checkpointJustWritten
= true;
987 $this->egress
->writeClosePage( $this->buffer
);
989 $this->thisPage
= "";
991 } elseif ( $name == 'mediawiki' ) {
992 $this->egress
->writeCloseStream( $this->buffer
);
997 function characterData( $parser, $data ) {
998 $this->clearOpenElement( null );
999 if ( $this->lastName
== "id" ) {
1000 if ( $this->state
== "revision" ) {
1001 $this->thisRev
.= $data;
1002 } elseif ( $this->state
== "page" ) {
1003 $this->thisPage
.= $data;
1005 } elseif ( $this->lastName
== "model" ) {
1006 $this->thisRevModel
.= $data;
1007 } elseif ( $this->lastName
== "format" ) {
1008 $this->thisRevFormat
.= $data;
1011 // have to skip the newline left over from closepagetag line of
1012 // end of checkpoint files. nasty hack!!
1013 if ( $this->checkpointJustWritten
) {
1014 if ( $data[0] == "\n" ) {
1015 $data = substr( $data, 1 );
1017 $this->checkpointJustWritten
= false;
1019 $this->buffer
.= htmlspecialchars( $data );
1022 function clearOpenElement( $style ) {
1023 if ( $this->openElement
) {
1024 $this->buffer
.= Xml
::element( $this->openElement
[0], $this->openElement
[1], $style );
1025 $this->openElement
= false;