X-Git-Url: http://git.cyclocoop.org/?a=blobdiff_plain;f=maintenance%2FimportImages.php;h=ae70441f1b509ec57a004ed1edd05a0edac5f48d;hb=fb7b3eebeb8de47eb42e8d6ccf204106a2d6d9e4;hp=54fd4e2d88ded8a5655ae5d5f7ce5c20e64d3cfb;hpb=bcb8fee4a9524cb0e47e8334c1963ac9e721d963;p=lhc%2Fweb%2Fwiklou.git diff --git a/maintenance/importImages.php b/maintenance/importImages.php index 54fd4e2d88..f0c6569ace 100644 --- a/maintenance/importImages.php +++ b/maintenance/importImages.php @@ -4,7 +4,8 @@ * using the web-based interface. * * "Smart import" additions: - * - aim: preserve the essential metadata (user, description) when importing medias from an existing wiki + * - aim: preserve the essential metadata (user, description) when importing media + * files from an existing wiki. * - process: * - interface with the source wiki, don't use bare files only (see --source-wiki-url). * - fetch metadata from source wiki for each file to import. @@ -31,10 +32,10 @@ * @author Mij */ -$optionsWithArgs = array( +$optionsWithArgs = [ 'extensions', 'comment', 'comment-file', 'comment-ext', 'summary', 'user', 'license', 'sleep', 'limit', 'from', 'source-wiki-url', 'timestamp', -); +]; require_once __DIR__ . '/commandLine.inc'; require_once __DIR__ . '/importImages.inc'; $processed = $added = $ignored = $skipped = $overwritten = $failed = 0; @@ -69,9 +70,9 @@ $files = findFiles( $dir, $extensions, isset( $options['search-recursively'] ) ) # Initialise the user for this operation $user = isset( $options['user'] ) ? User::newFromName( $options['user'] ) - : User::newFromName( 'Maintenance script' ); + : User::newSystemUser( 'Maintenance script', [ 'steal' => true ] ); if ( !$user instanceof User ) { - $user = User::newFromName( 'Maintenance script' ); + $user = User::newSystemUser( 'Maintenance script', [ 'steal' => true ] ); } $wgUser = $user; @@ -87,16 +88,24 @@ if ( isset( $options['check-userblock'] ) ) { } # Get --from -$from = @$options['from']; +MediaWiki\suppressWarnings(); +$from = $options['from']; +MediaWiki\restoreWarnings(); # Get sleep time. -$sleep = @$options['sleep']; +MediaWiki\suppressWarnings(); +$sleep = $options['sleep']; +MediaWiki\restoreWarnings(); + if ( $sleep ) { $sleep = (int)$sleep; } # Get limit number -$limit = @$options['limit']; +MediaWiki\suppressWarnings(); +$limit = $options['limit']; +MediaWiki\restoreWarnings(); + if ( $limit ) { $limit = (int)$limit; } @@ -127,7 +136,7 @@ $count = count( $files ); if ( $count > 0 ) { foreach ( $files as $file ) { - $base = wfBaseName( $file ); + $base = UtfNormal\Validator::cleanUp( wfBaseName( $file ) ); # Validate a title $title = Title::makeTitleSafe( NS_FILE, $base ); @@ -167,7 +176,8 @@ if ( $count > 0 ) { } else { if ( isset( $options['skip-dupes'] ) ) { $repo = $image->getRepo(); - $sha1 = File::sha1Base36( $file ); # XXX: we end up calculating this again when actually uploading. that sucks. + # XXX: we end up calculating this again when actually uploading. that sucks. + $sha1 = FSFile::getSha1Base36FromPath( $file ); $dupes = $repo->findBySha1( $sha1 ); @@ -210,7 +220,8 @@ if ( $count > 0 ) { if ( $commentExt ) { $f = findAuxFile( $file, $commentExt ); if ( !$f ) { - echo " No comment file with extension {$commentExt} found for {$file}, using default comment. "; + echo " No comment file with extension {$commentExt} found " + . "for {$file}, using default comment. "; } else { $commentText = file_get_contents( $f ); if ( !$commentText ) { @@ -230,12 +241,12 @@ if ( $count > 0 ) { } else { $props = FSFile::getPropsFromPath( $file ); $flags = 0; - $publishOptions = array(); + $publishOptions = []; $handler = MediaHandler::getHandler( $props['mime'] ); if ( $handler ) { $publishOptions['headers'] = $handler->getStreamHeaders( $props['metadata'] ); } else { - $publishOptions['headers'] = array(); + $publishOptions['headers'] = []; } $archive = $image->publish( $file, $flags, $publishOptions ); if ( !$archive->isGood() ) { @@ -254,7 +265,13 @@ if ( $count > 0 ) { if ( isset( $options['dry'] ) ) { echo "done.\n"; - } elseif ( $image->recordUpload2( $archive->value, $summary, $commentText, $props, $timestamp ) ) { + } elseif ( $image->recordUpload2( + $archive->value, + $summary, + $commentText, + $props, + $timestamp + ) ) { # We're done! echo "done.\n"; @@ -273,25 +290,24 @@ if ( $count > 0 ) { } if ( $doProtect ) { - # Protect the file - echo "\nWaiting for slaves...\n"; - // Wait for slaves. - sleep( 2.0 ); # Why this sleep? - wfWaitForSlaves(); - - echo "\nSetting image restrictions ... "; - - $cascade = false; - $restrictions = array(); - foreach ( $title->getRestrictionTypes() as $type ) { - $restrictions[$type] = $protectLevel; - } + # Protect the file + echo "\nWaiting for slaves...\n"; + // Wait for slaves. + sleep( 2.0 ); # Why this sleep? + wfWaitForSlaves(); + + echo "\nSetting image restrictions ... "; + + $cascade = false; + $restrictions = []; + foreach ( $title->getRestrictionTypes() as $type ) { + $restrictions[$type] = $protectLevel; + } - $page = WikiPage::factory( $title ); - $status = $page->doUpdateRestrictions( $restrictions, array(), $cascade, '', $user ); - echo ( $status->isOK() ? 'done' : 'failed' ) . "\n"; + $page = WikiPage::factory( $title ); + $status = $page->doUpdateRestrictions( $restrictions, [], $cascade, '', $user ); + echo ( $status->isOK() ? 'done' : 'failed' ) . "\n"; } - } else { echo "failed. (at recordUpload stage)\n"; $svar = 'failed'; @@ -311,14 +327,21 @@ if ( $count > 0 ) { # Print out some statistics echo "\n"; - foreach ( array( 'count' => 'Found', 'limit' => 'Limit', 'ignored' => 'Ignored', - 'added' => 'Added', 'skipped' => 'Skipped', 'overwritten' => 'Overwritten', - 'failed' => 'Failed' ) as $var => $desc ) { + foreach ( + [ + 'count' => 'Found', + 'limit' => 'Limit', + 'ignored' => 'Ignored', + 'added' => 'Added', + 'skipped' => 'Skipped', + 'overwritten' => 'Overwritten', + 'failed' => 'Failed' + ] as $var => $desc + ) { if ( $$var > 0 ) { echo "{$desc}: {$$var}\n"; } } - } else { echo "No suitable files could be found for import.\n"; } @@ -337,28 +360,37 @@ USAGE: php importImages.php [options] : Path to the directory containing images to be imported Options: ---extensions= Comma-separated list of allowable extensions, defaults to \$wgFileExtensions ---overwrite Overwrite existing images with the same name (default is to skip them) ---limit= Limit the number of images to process. Ignored or skipped images are not counted. ---from= Ignore all files until the one with the given name. Useful for resuming - aborted imports. should be the file's canonical database form. ---skip-dupes Skip images that were already uploaded under a different name (check SHA1) ---search-recursively Search recursively for files in subdirectories +--extensions= Comma-separated list of allowable extensions, defaults + to \$wgFileExtensions. +--overwrite Overwrite existing images with the same name (default + is to skip them). +--limit= Limit the number of images to process. Ignored or + skipped images are not counted. +--from= Ignore all files until the one with the given name. + Useful for resuming aborted imports. should be + the file's canonical database form. +--skip-dupes Skip images that were already uploaded under a different + name (check SHA1). +--search-recursively Search recursively for files in subdirectories. --sleep= Sleep between files. Useful mostly for debugging. ---user= Set username of uploader, default 'Maintenance script' +--user= Set username of uploader, default 'Maintenance script'. --check-userblock Check if the user got blocked during import. --comment= Set file description, default 'Importing file'. --comment-file= Set description to the content of . ---comment-ext= Causes the description for each file to be loaded from a file with the same name - but the extension . If a global description is also given, it is appended. ---license= Use an optional license template ---dry Dry run, don't import anything ---protect= Specify the protect value (autoconfirmed,sysop) ---summary= Upload summary, description will be used if not provided ---timestamp= Override upload time/date, all MediaWiki timestamp formats are accepted ---unprotect Unprotects all uploaded images ---source-wiki-url If specified, take User and Comment data for each imported file from this URL. - For example, --source-wiki-url="http://en.wikipedia.org/" +--comment-ext= Causes the description for each file to be loaded from a + file with the same name, but the extension . If a + global description is also given, it is appended. +--license= Use an optional license template. +--dry Dry run, don't import anything. +--protect= Specify the protect value (autoconfirmed,sysop). +--summary= Upload summary, description will be used if not + provided. +--timestamp= Override upload time/date, all MediaWiki timestamp + formats are accepted. +--unprotect Unprotects all uploaded images. +--source-wiki-url If specified, take User and Comment data for each + imported file from this URL. For example, + --source-wiki-url="http://en.wikipedia.org/." TEXT; exit( 1 );