$wgOut->disable();
header( "Content-type: application/xml; charset=utf-8" );
$pages = explode( "\n", $page );
- $xml = pages2xml( $pages, $curonly );
- echo $xml;
+
+ $db =& wfGetDB( DB_SLAVE );
+ $history = $curonly ? MW_EXPORT_CURRENT : MW_EXPORT_FULL;
+ $exporter = new WikiExporter( $db, $history );
+ $exporter->openStream();
+ $exporter->pagesByName( $pages );
+ $exporter->closeStream();
return;
}
" );
}
-function pages2xml( $pages, $curonly = false ) {
- $fname = 'pages2xml';
- wfProfileIn( $fname );
+define( 'MW_EXPORT_FULL', 0 );
+define( 'MW_EXPORT_CURRENT', 1 );
+
+define( 'MW_EXPORT_BUFFER', 0 );
+define( 'MW_EXPORT_STREAM', 1 );
+
+class WikiExporter {
+ var $pageCallback = null;
+ var $revCallback = null;
- global $wgContLanguageCode, $wgInputEncoding, $wgContLang;
- $xml = '<?xml version="1.0" encoding="UTF-8" ?>' . "\n" .
- '<mediawiki version="0.1" xml:lang="' . $wgContLanguageCode . '">' . "\n";
- foreach( $pages as $page ) {
- $xml .= page2xml( $page, $curonly );
+ /**
+ * If using MW_EXPORT_STREAM to stream a large amount of data,
+ * provide a database connection which is not managed by
+ * LoadBalancer to read from: some history blob types will
+ * make additional queries to pull source data while the
+ * main query is still running.
+ *
+ * @param Database $db
+ * @param int $history one of MW_EXPORT_FULL or MW_EXPORT_CURRENT
+ * @param int $buffer one of MW_EXPORT_BUFFER or MW_EXPORT_STREAM
+ */
+ function WikiExporter( &$db, $history = MW_EXPORT_CURRENT,
+ $buffer = MW_EXPORT_BUFFER ) {
+ $this->db =& $db;
+ $this->history = $history;
+ $this->buffer = $buffer;
}
- $xml .= "</mediawiki>\n";
- if($wgInputEncoding != "utf-8")
- $xml = $wgContLang->iconv( $wgInputEncoding, "utf-8", $xml );
- wfProfileOut( $fname );
- return $xml;
-}
-
-function page2xml( $page, $curonly, $full = false ) {
- global $wgLang;
- $fname = 'page2xml';
- wfProfileIn( $fname );
+ /**
+ * Set a callback to be called after each page in the output
+ * stream is closed. The callback will be passed a database row
+ * object with the last revision output.
+ *
+ * A set callback can be removed by passing null here.
+ *
+ * @param mixed $callback
+ */
+ function setPageCallback( $callback ) {
+ $this->pageCallback = $callback;
+ }
- $title = Title::NewFromText( $page );
- if( !$title ) {
- wfProfileOut( $fname );
- return "";
+ /**
+ * Set a callback to be called after each revision in the output
+ * stream is closed. The callback will be passed a database row
+ * object with the revision data.
+ *
+ * A set callback can be removed by passing null here.
+ *
+ * @param mixed $callback
+ */
+ function setRevCallback( $callback ) {
+ $this->revCallback = $callback;
}
-
- $dbr =& wfGetDB( DB_SLAVE );
- $s = $dbr->selectRow( 'page',
- array( 'page_id', 'page_restrictions' ),
- array( 'page_namespace' => $title->getNamespace(),
- 'page_title' => $title->getDbkey() ) );
- if( $s ) {
- $tl = xmlsafe( $title->getPrefixedText() );
- $xml = " <page>\n";
- $xml .= " <title>$tl</title>\n";
-
- if( $full ) {
- $xml .= " <id>$s->page_id</id>\n";
+
+ /**
+ * Opens the XML output stream's root <mediawiki> element.
+ * This does not include an xml directive, so is safe to include
+ * as a subelement in a larger XML stream. Namespace and XML Schema
+ * references are included.
+ *
+ * To capture the stream to a string, use PHP's output buffering
+ * functions. Output will be encoded in UTF-8.
+ */
+ function openStream() {
+ global $wgContLanguageCode;
+ print wfElement( 'mediawiki', array(
+ 'xmlns' => 'http://www.mediawiki.org/xml/export-0.1/',
+ 'xmlns:xsi' => 'http://www.w3.org/2001/XMLSchema-instance',
+ 'xsi:schemaLocation' => 'http://www.mediawiki.org/xml/export-0.1/ ' .
+ 'http://www.mediawiki.org/xml/export-0.1.xsd',
+ 'version' => '0.1',
+ 'xml:lang' => $wgContLanguageCode ),
+ null ) . "\n";
+ }
+
+ /**
+ * Closes the output stream with the closing root element.
+ * Call when finished dumping things.
+ */
+ function closeStream() {
+ print "</mediawiki>\n";
+ }
+
+ /**
+ * Dumps a series of page and revision records for all pages
+ * in the database, either including complete history or only
+ * the most recent version.
+ *
+ *
+ * @param Database $db
+ */
+ function allPages() {
+ return $this->dumpFrom( '' );
+ }
+
+ /**
+ * @param Title $title
+ */
+ function pageByTitle( $title ) {
+ return $this->dumpFrom(
+ 'page_namespace=' . $title->getNamespace() .
+ ' AND page_title=' . $this->db->addQuotes( $title->getDbKey() ) );
+ }
+
+ function pageByName( $name ) {
+ $title = Title::newFromText( $name );
+ if( is_null( $title ) ) {
+ return WikiError( "Can't export invalid title" );
+ } else {
+ return $this->pageByTitle( $title );
}
- if( $s->page_restrictions ) {
- $xml .= " <restrictions>" . xmlsafe( $s->page_restrictions ) . "</restrictions>\n";
+ }
+
+ function pagesByName( $names ) {
+ foreach( $names as $name ) {
+ $this->pageByName( $name );
}
+ }
- if( $curonly ) {
- $res = Revision::fetchRevision( $title );
+
+ // -------------------- private implementation below --------------------
+
+ function dumpFrom( $cond = '' ) {
+ $fname = 'WikiExporter::dumpFrom';
+ wfProfileIn( $fname );
+
+ $page = $this->db->tableName( 'page' );
+ $revision = $this->db->tableName( 'revision' );
+ $text = $this->db->tableName( 'text' );
+
+ if( $this->history == MW_EXPORT_FULL ) {
+ $join = 'page_id=rev_page';
+ } elseif( $this->history == MW_EXPORT_CURRENT ) {
+ $join = 'page_id=rev_page AND page_latest=rev_id';
} else {
- $res = Revision::fetchAllRevisions( $title );
+ wfProfileOut( $fname );
+ return new WikiError( "$fname given invalid history dump type." );
}
- if( $res ) {
- while( $s = $res->fetchObject() ) {
- $rev = new Revision( $s );
- $xml .= revision2xml( $rev, $full, false );
- }
- $res->free();
+ $where = ( $cond == '' ) ? '' : "$cond AND";
+
+ if( $this->buffer == MW_EXPORT_STREAM ) {
+ $prev = $this->db->bufferResults( false );
+ }
+ $result = $this->db->query(
+ "SELECT * FROM
+ $page FORCE INDEX (PRIMARY),
+ $revision FORCE INDEX(page_timestamp),
+ $text
+ WHERE $where $join AND rev_text_id=old_id
+ ORDER BY page_id", $fname );
+ $wrapper = $this->db->resultObject( $result );
+ $this->outputStream( $wrapper );
+
+ if( $this->buffer == MW_EXPORT_STREAM ) {
+ $this->db->bufferResults( $prev );
}
- $xml .= " </page>\n";
- wfProfileOut( $fname );
- return $xml;
- } else {
wfProfileOut( $fname );
- return "";
}
-}
-
-/**
- * @return string
- * @param Revision $rev
- * @param bool $full
- * @access private
- */
-function revision2xml( $rev, $full ) {
- $fname = 'revision2xml';
- wfProfileIn( $fname );
-
- $xml = " <revision>\n";
- if( $full )
- $xml .= " <id>" . $rev->getId() . "</id>\n";
- $ts = wfTimestamp2ISO8601( $rev->getTimestamp() );
- $xml .= " <timestamp>$ts</timestamp>\n";
+ /**
+ * Runs through a query result set dumping page and revision records.
+ * The result set should be sorted/grouped by page to avoid duplicate
+ * page records in the output.
+ *
+ * The result set will be freed once complete. Should be safe for
+ * streaming (non-buffered) queries, as long as it was made on a
+ * separate database connection not managed by LoadBalancer; some
+ * blob storage types will make queries to pull source data.
+ *
+ * @param ResultWrapper $resultset
+ * @access private
+ */
+ function outputStream( $resultset ) {
+ $last = null;
+ while( $row = $resultset->fetchObject() ) {
+ if( is_null( $last ) ||
+ $last->page_namespace != $row->page_namespace ||
+ $last->page_title != $row->page_title ) {
+ if( isset( $last ) ) {
+ $this->closePage( $last );
+ }
+ $this->openPage( $row );
+ $last = $row;
+ }
+ $this->dumpRev( $row );
+ }
+ if( isset( $last ) ) {
+ $this->closePage( $last );
+ }
+ $resultset->free();
+ }
- if( $rev->getUser() ) {
- $u = "<username>" . xmlsafe( $rev->getUserText() ) . "</username>";
- if( $full )
- $u .= "<id>" . $rev->getUser() . "</id>";
- } else {
- $u = "<ip>" . xmlsafe( $rev->getUserText() ) . "</ip>";
+ /**
+ * Opens a <page> section on the output stream, with data
+ * from the given database row.
+ *
+ * @param object $row
+ * @access private
+ */
+ function openPage( $row ) {
+ print "<page>\n";
+ $title = Title::makeTitle( $row->page_namespace, $row->page_title );
+ print ' ' . wfElementClean( 'title', array(), $title->getPrefixedText() ) . "\n";
+ print ' ' . wfElement( 'id', array(), $row->page_id ) . "\n";
+ if( '' != $row->page_restrictions ) {
+ print ' ' . wfElement( 'restrictions', array(),
+ $row->page_restrictions ) . "\n";
+ }
}
- $xml .= " <contributor>$u</contributor>\n";
- if( $rev->isMinor() ) {
- $xml .= " <minor/>\n";
+ /**
+ * Closes a <page> section on the output stream.
+ * If a per-page callback has been set, it will be called
+ * and passed the last database row used for this page.
+ *
+ * @param object $row
+ * @access private
+ */
+ function closePage( $row ) {
+ print "</page>\n";
+ if( isset( $this->pageCallback ) ) {
+ call_user_func( $this->pageCallback, $row );
+ }
}
- if($rev->getComment() != "") {
- $c = xmlsafe( $rev->getComment() );
- $xml .= " <comment>$c</comment>\n";
+
+ /**
+ * Dumps a <revision> section on the output stream, with
+ * data filled in from the given database row.
+ *
+ * @param object $row
+ * @access private
+ */
+ function dumpRev( $row ) {
+ $fname = 'WikiExporter::dumpRev';
+ wfProfileIn( $fname );
+
+ print " <revision>\n";
+ print " " . wfElement( 'id', null, $row->rev_id ) . "\n";
+
+ $ts = wfTimestamp2ISO8601( $row->rev_timestamp );
+ print " " . wfElement( 'timestamp', null, $ts ) . "\n";
+
+ print " <contributor>";
+ if( $row->rev_user ) {
+ print wfElementClean( 'username', null, $row->rev_user_text );
+ print wfElement( 'id', null, $row->rev_user );
+ } else {
+ print wfElementClean( 'ip', null, $row->rev_user_text );
+ }
+ print "</contributor>\n";
+
+ if( $row->rev_minor_edit ) {
+ print " <minor/>\n";
+ }
+ if( $row->rev_comment != '' ) {
+ print " " . wfElementClean( 'comment', null, $row->rev_comment ) . "\n";
+ }
+
+ $text = Revision::getRevisionText( $row );
+ print " " . wfElementClean( 'text', array(), $text ) . "\n";
+ print " </revision>\n";
+
+ wfProfileOut( $fname );
+
+ if( isset( $this->revCallback ) ) {
+ call_user_func( $this->revCallback, $row );
+ }
}
- $t = xmlsafe( $rev->getText() );
-
- $xml .= " <text>$t</text>\n";
- $xml .= " </revision>\n";
- wfProfileOut( $fname );
- return $xml;
}
function wfTimestamp2ISO8601( $ts ) {
wfProfileOut( $fname );
return $string;
}
+
?>
--- /dev/null
+<?php
+/**
+ * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * http://www.mediawiki.org/
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @package MediaWiki
+ * @subpackage SpecialPage
+ */
+
+$options = array( 'full', 'current' );
+
+require_once( 'commandLine.inc' );
+require_once( 'SpecialExport.php' );
+
+class BackupDumper {
+ var $reportingInterval = 100;
+ var $reporting = true;
+ var $pageCount = 0;
+ var $revCount = 0;
+
+ function BackupDumper() {
+ $this->stderr = fopen( "php://stderr", "wt" );
+ }
+
+ function dump( $history ) {
+ # This shouldn't happen if on console... ;)
+ header( 'Content-type: text/html; charset=UTF-8' );
+
+ # Notice messages will foul up your XML output even if they're
+ # relatively harmless.
+ ini_set( 'display_errors', false );
+
+ $this->startTime = wfTime();
+
+ $db =& $this->backupDb();
+ $exporter = new WikiExporter( $db, $history, MW_EXPORT_STREAM );
+ $exporter->setPageCallback( array( &$this, 'reportPage' ) );
+ $exporter->setRevCallback( array( &$this, 'revCount' ) );
+
+ $exporter->openStream();
+ $exporter->allPages();
+ $exporter->closeStream();
+
+ $this->report( true );
+ }
+
+ function &backupDb() {
+ global $wgDBadminuser, $wgDBadminpassword;
+ global $wgDBserver, $wgDBname;
+ $db =& new Database( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname );
+ return $db;
+ }
+
+ function reportPage( $page ) {
+ $this->pageCount++;
+ $this->report();
+ }
+
+ function revCount( $rev ) {
+ $this->revCount++;
+ }
+
+ function report( $final = false ) {
+ if( $final xor ( $this->pageCount % $this->reportingInterval == 0 ) ) {
+ $this->showReport();
+ }
+ }
+
+ function showReport() {
+ if( $this->reporting ) {
+ $delta = wfTime() - $this->startTime;
+ if( $delta ) {
+ $rate = $this->pageCount / $delta;
+ $revrate = $this->revCount / $delta;
+ } else {
+ $rate = '-';
+ $revrate = '-';
+ }
+ $this->progress( "$this->pageCount ($rate pages/sec $revrate revs/sec)" );
+ }
+ }
+
+ function progress( $string ) {
+ fwrite( $this->stderr, $string . "\n" );
+ }
+}
+
+$dumper = new BackupDumper();
+if( isset( $options['quiet'] ) ) {
+ $dumper->reporting = false;
+}
+if( isset( $options['report'] ) ) {
+ $dumper->reportingInterval = IntVal( $options['report'] );
+}
+if( isset( $options['full'] ) ) {
+ $dumper->dump( MW_EXPORT_FULL );
+} elseif( isset( $options['current'] ) ) {
+ $dumper->dump( MW_EXPORT_CURRENT );
+} else {
+ $dumper->progress( <<<END
+This script dumps the wiki page database into an XML interchange wrapper
+format for export or backup.
+
+XML output is sent to stdout; progress reports are sent to stderr.
+
+Usage: php dumpBackup.php <action> [<options>]
+Actions:
+ --full Dump complete history of every page.
+ --current Includes only the latest revision of each page.
+Options:
+ --quiet Don't dump status reports to stderr.
+ --report=n Report position and speed after every n pages processed.
+ (Default: 100)
+END
+);
+}
+
+?>
\ No newline at end of file