for ($id = $start; $id <= $end; $id++) {
+ wfWaitForSlaves( 20 );
if ( !($id % REPORTING_INTERVAL) ) {
print "Processing ID: $id\r";
}
+ if ( !($id % (REPORTING_INTERVAL*10) ) ) {
+ print "\n";
+ }
$title = Title::newFromID( $id );
if ( $title ) {
$ns = $title->getNamespace() ;
print "Writing image description pages for local images\n";
$num = $dbr->numRows( $res );
while ( $row = $dbr->fetchObject( $res ) ) {
+ wfWaitForSlaves( 10 );
if ( !( ++$i % REPORTING_INTERVAL ) ) {
print "Done $i of $num\r";
}
print "\nWriting " . $dbr->numRows( $res ). " category pages\n";
$i = 0;
while ( $row = $dbr->fetchObject( $res ) ) {
+ wfWaitForSlaves( 10 );
if ( !(++$i % REPORTING_INTERVAL ) ) {
print "$i\r";
}
$wgHideInterlangageLinks = !$this->interwiki;
$wgThumbnailScriptPath = $wgSharedThumbnailScriptPath = false;
$wgEnableParserCache = false;
+ $wgMathPath = "$wgScriptPath/math";
$wgUser = new User;
$wgUser->setOption( 'skin', 'htmldump' );
/** Reads the content of a title object, executes the skin and captures the result */
function getArticleHTML( &$title ) {
- global $wgOut, $wgTitle, $wgArticle, $wgUser, $wgUseCategoryMagic;
+ global $wgOut, $wgTitle, $wgArticle, $wgUser, $wgUseCategoryMagic, $wgLinkCache;
$wgOut = new OutputPage;
$wgOut->setParserOptions( new ParserOptions );
+ $wgLinkCache = new LinkCache;
$wgTitle = $title;
if ( is_null( $wgTitle ) ) {
} else {
$wgArticle = new Article( $wgTitle );
}
- $wgArticle->view();
+ $rt = Title::newFromRedirect( $wgArticle->fetchContent() );
+ if ( $rt != NULL ) {
+ $wgOut->addMeta( 'http:Refresh', '3;url=' . $rt->escapeLocalURL() );
+ $wgOut->setPageTitle( $wgTitle->getPrefixedText() );
+ $wgOut->addWikiText( wfMsg( 'redirectingto', $rt->getPrefixedText() ) );
+ } else {
+ $wgArticle->view();
+ }
}
$sk =& $wgUser->getSkin();
* This is necessary even if you intend to distribute all of commons, because
* the directory contents is used to work out which image description pages
* are needed.
+ *
+ * Also copies math images
+ *
*/
function copyImages( $images ) {
- global $wgSharedUploadPath, $wgSharedUploadDirectory;
+ global $wgSharedUploadPath, $wgSharedUploadDirectory, $wgMathPath, $wgMathDirectory;
# Find shared uploads and copy them into the static directory
$sharedPathLength = strlen( $wgSharedUploadPath );
- foreach ( $images as $image => $dummy ) {
+ $mathPathLength = strlen( $wgMathPath );
+ foreach ( $images as $escapedImage => $dummy ) {
+ $image = urldecode( $escapedImage );
+
# Is it shared?
if ( substr( $image, 0, $sharedPathLength ) == $wgSharedUploadPath ) {
# Reconstruct full filename
}
}
}
+ } else
+ # Is it math?
+ if ( substr( $image, 0, $mathPathLength ) == $wgMathPath ) {
+ $rel = substr( $image, $mathPathLength + 1 ); // +1 for slash
+ $source = "$wgMathDirectory/$rel";
+ $dest = "{$this->dest}/math/$rel";
+ if ( !file_exists( $dest ) ) {
+ copy( $source, $dest );
+ }
}
}
}
$optionsWithArgs = array( 's', 'd', 'e' );
+$profiling = false;
+
+if ( $profiling ) {
+ define( 'MW_CMDLINE_CALLBACK', 'wfSetupDump' );
+ function wfSetupDump() {
+ global $wgProfiling, $wgProfileToDatabase, $wgProfileSampleRate;
+ $wgProfiling = true;
+ $wgProfileToDatabase = false;
+ $wgProfileSampleRate = 1;
+ }
+}
+
require_once( "commandLine.inc" );
require_once( "dumpHTML.inc" );
} else {
print("Creating static HTML dump in directory $dest. \n".
"Starting from page_id $start of $end.\n");
+
+ $dbr =& wfGetDB( DB_SLAVE );
+ print "Using database {$dbr->mServer}\n";
+
$d->doArticles( $start, $end );
- $d->doImageDescriptions();
- $d->doCategories();
- $d->doSpecials();
+ if ( !isset( $options['e'] ) ) {
+ $d->doImageDescriptions();
+ $d->doCategories();
+ $d->doSpecials();
+ }
/*
if ( $end - $start > CHUNK_SIZE * 2 ) {
*/
}
-exit();
+if ( isset( $options['debug'] ) ) {
+ print_r($GLOBALS);
+}
+
+if ( $profiling ) {
+ echo $wgProfiler->getOutput();
+}
?>