From edde0a423bcb5635b1b8a807e026a1106f91775c Mon Sep 17 00:00:00 2001 From: Catrope Date: Fri, 6 Apr 2012 11:20:04 -0700 Subject: [PATCH] Remove duplicate URLs in SquidUpdate.php This has to be done in 3 places, because there are 3 public entry points. I originally submitted this to fight duplicates I thought I was seeing in production, but it turns out I'm blind and the URLs weren't duplicates after all. Nevertheless, preventing duplicate purges in the SquidUpdate class is a good idea. Change-Id: Idc21dd7d0b3b79572853b787fac746454d9178ea --- includes/cache/SquidUpdate.php | 3 +++ 1 file changed, 3 insertions(+) diff --git a/includes/cache/SquidUpdate.php b/includes/cache/SquidUpdate.php index d47b5b5e24..bd70095b84 100644 --- a/includes/cache/SquidUpdate.php +++ b/includes/cache/SquidUpdate.php @@ -19,6 +19,7 @@ class SquidUpdate { } else { $this->mMaxTitles = $maxTitles; } + $urlArr = array_unique( $urlArr ); // Remove duplicates if ( count( $urlArr ) > $this->mMaxTitles ) { $urlArr = array_slice( $urlArr, 0, $this->mMaxTitles ); } @@ -119,6 +120,7 @@ class SquidUpdate { wfProfileIn( __METHOD__ ); + $urlArr = array_unique( $urlArr ); // Remove duplicates $maxSocketsPerSquid = 8; // socket cap per Squid $urlsPerSocket = 400; // 400 seems to be a good tradeoff, opening a socket takes a while $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket ); @@ -168,6 +170,7 @@ class SquidUpdate { socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL, $wgHTCPMulticastTTL ); + $urlArr = array_unique( $urlArr ); // Remove duplicates foreach ( $urlArr as $url ) { if( !is_string( $url ) ) { throw new MWException( 'Bad purge URL' ); -- 2.20.1