From: Ori Livneh Date: Mon, 5 Oct 2015 17:56:32 +0000 (-0700) Subject: Small optimization for FileContentsHasher X-Git-Tag: 1.31.0-rc.0~9510^2 X-Git-Url: https://git.cyclocoop.org/%242?a=commitdiff_plain;h=3621ad0f82f8b6b26f034f79d60ac0007327cd79;p=lhc%2Fweb%2Fwiklou.git Small optimization for FileContentsHasher Make sure that PHP warnings are suppressed and restored once per invocation of FileContentsHasher::getFileContentsHash(), rather than once or twice per file. Change-Id: I814093f226d62e5e479411d0c3a7bbbe4998255a --- diff --git a/includes/utils/FileContentsHasher.php b/includes/utils/FileContentsHasher.php index 655c1d0b07..c86691903b 100644 --- a/includes/utils/FileContentsHasher.php +++ b/includes/utils/FileContentsHasher.php @@ -57,7 +57,7 @@ class FileContentsHasher { * @return string|bool Hash of file contents, or false if the file could not be read. */ public function getFileContentsHashInternal( $filePath, $algo = 'md4' ) { - $mtime = MediaWiki\quietCall( 'filemtime', $filePath ); + $mtime = filemtime( $filePath ); if ( $mtime === false ) { return false; } @@ -69,7 +69,7 @@ class FileContentsHasher { return $hash; } - $contents = MediaWiki\quietCall( 'file_get_contents', $filePath ); + $contents = file_get_contents( $filePath ); if ( $contents === false ) { return false; } @@ -96,8 +96,12 @@ class FileContentsHasher { $filePaths = (array)$filePaths; } + MediaWiki\suppressWarnings(); + if ( count( $filePaths ) === 1 ) { - return $instance->getFileContentsHashInternal( $filePaths[0], $algo ); + $hash = $instance->getFileContentsHashInternal( $filePaths[0], $algo ); + MediaWiki\restoreWarnings(); + return $hash; } sort( $filePaths ); @@ -105,6 +109,8 @@ class FileContentsHasher { return $instance->getFileContentsHashInternal( $filePath, $algo ) ?: ''; }, $filePaths ); + MediaWiki\restoreWarnings(); + $hashes = implode( '', $hashes ); return $hashes ? hash( $algo, $hashes ) : false; }