From 64d4e75208953bda72cee850f5287a6929c119f3 Mon Sep 17 00:00:00 2001 From: Brion Vibber Date: Mon, 4 Apr 2011 20:59:04 +0000 Subject: [PATCH] Workaround for bug 28146: running out of memory during Unicode validation/normalization when uploading DjVu file with lots of embedded page text This provisional workaround runs a page at a time through UtfNormal::cleanUp() instead of running the entire file's dumped text at once. This avoids exploding memory too much during the preg_match_all() used to divide up ASCII and non-ASCII runs for validation, which is very wasteful for long texts in Latin languages with many mixed-in non-ASCII characters (like French and German text). Won't fix legit cases of huge texts, such as realllllllllly long page text, which would still be subject to getting run through at web input time in a giant chunk. --- includes/DjVuImage.php | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/includes/DjVuImage.php b/includes/DjVuImage.php index 7a8b5e4e80..f2effcb666 100644 --- a/includes/DjVuImage.php +++ b/includes/DjVuImage.php @@ -254,8 +254,7 @@ class DjVuImage { $txt = wfShellExec( $cmd, $retval ); wfProfileOut( 'djvutxt' ); if( $retval == 0) { - # Get rid of invalid UTF-8, strip control characters - $txt = UtfNormal::cleanUp( $txt ); + # Strip some control characters $txt = preg_replace( "/[\013\035\037]/", "", $txt ); $reg = <<'; + # Get rid of invalid UTF-8, strip control characters + return ''; } /** -- 2.20.1