'CdbException' => __DIR__ . '/includes/compat/CdbCompat.php',
'CdbReader' => __DIR__ . '/includes/compat/CdbCompat.php',
'CdbWriter' => __DIR__ . '/includes/compat/CdbCompat.php',
+ 'CdnCacheUpdate' => __DIR__ . '/includes/deferred/CdnCacheUpdate.php',
'CentralIdLookup' => __DIR__ . '/includes/user/CentralIdLookup.php',
'CgzCopyTransaction' => __DIR__ . '/maintenance/storage/recompressTracked.php',
'ChangePassword' => __DIR__ . '/maintenance/changePassword.php',
'GanConverter' => __DIR__ . '/languages/classes/LanguageGan.php',
'GenderCache' => __DIR__ . '/includes/cache/GenderCache.php',
'GenerateCollationData' => __DIR__ . '/maintenance/language/generateCollationData.php',
+ 'GenerateCommonPassword' => __DIR__ . '/maintenance/createCommonPasswordCdb.php',
'GenerateJsonI18n' => __DIR__ . '/maintenance/generateJsonI18n.php',
'GenerateNormalizerDataAr' => __DIR__ . '/maintenance/language/generateNormalizerDataAr.php',
'GenerateNormalizerDataMl' => __DIR__ . '/maintenance/language/generateNormalizerDataMl.php',
'SqliteUpdater' => __DIR__ . '/includes/installer/SqliteUpdater.php',
'SquidPurgeClient' => __DIR__ . '/includes/clientpool/SquidPurgeClient.php',
'SquidPurgeClientPool' => __DIR__ . '/includes/clientpool/SquidPurgeClientPool.php',
- 'SquidUpdate' => __DIR__ . '/includes/deferred/SquidUpdate.php',
+ 'SquidUpdate' => __DIR__ . '/includes/deferred/CdnCacheUpdate.php',
'SrConverter' => __DIR__ . '/languages/classes/LanguageSr.php',
'StatsOutput' => __DIR__ . '/maintenance/language/StatOutputs.php',
'Status' => __DIR__ . '/includes/Status.php',
*/
public function purgeSquid() {
DeferredUpdates::addUpdate(
- new SquidUpdate( $this->getSquidURLs() ),
+ new CdnCacheUpdate( $this->getSquidURLs() ),
DeferredUpdates::PRESEND
);
}
*/
public function queuePurge( $url ) {
global $wgSquidPurgeUseHostHeader;
- $url = SquidUpdate::expand( str_replace( "\n", '', $url ) );
+ $url = CdnCacheUpdate::expand( str_replace( "\n", '', $url ) );
$request = array();
if ( $wgSquidPurgeUseHostHeader ) {
$url = wfParseUrl( $url );
--- /dev/null
+<?php
+/**
+ * Squid cache purging.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @file
+ * @ingroup Cache
+ */
+
+use Wikimedia\Assert\Assert;
+
+/**
+ * Handles purging appropriate Squid URLs given a title (or titles)
+ * @ingroup Cache
+ */
+class CdnCacheUpdate implements DeferrableUpdate, MergeableUpdate {
+ /** @var string[] Collection of URLs to purge */
+ protected $urls = array();
+
+ /**
+ * @param string[] $urlArr Collection of URLs to purge
+ */
+ public function __construct( array $urlArr ) {
+ $this->urls = $urlArr;
+ }
+
+ /**
+ * Create an update object from an array of Title objects, or a TitleArray object
+ *
+ * @param Traversable|array $titles
+ * @param string[] $urlArr
+ * @return CdnCacheUpdate
+ */
+ public static function newFromTitles( $titles, $urlArr = array() ) {
+ /** @var Title $title */
+ foreach ( $titles as $title ) {
+ $urlArr = array_merge( $urlArr, $title->getSquidURLs() );
+ }
+
+ return new CdnCacheUpdate( $urlArr );
+ }
+
+ /**
+ * @param Title $title
+ * @return CdnCacheUpdate
+ * @deprecated 1.27
+ */
+ public static function newSimplePurge( Title $title ) {
+ return new CdnCacheUpdate( $title->getSquidURLs() );
+ }
+
+ /**
+ * Purges the list of URLs passed to the constructor.
+ */
+ public function doUpdate() {
+ self::purge( $this->urls );
+ }
+
+ public function merge( MergeableUpdate $update ) {
+ /** @var CdnCacheUpdate $update */
+ Assert::parameterType( __CLASS__, $update, '$update' );
+
+ $this->urls = array_merge( $this->urls, $update->urls );
+ }
+
+ /**
+ * Purges a list of Squids defined in $wgSquidServers.
+ * $urlArr should contain the full URLs to purge as values
+ * (example: $urlArr[] = 'http://my.host/something')
+ * XXX report broken Squids per mail or log
+ *
+ * @param string[] $urlArr List of full URLs to purge
+ */
+ public static function purge( array $urlArr ) {
+ global $wgSquidServers, $wgHTCPRouting;
+
+ if ( !$urlArr ) {
+ return;
+ }
+
+ // Remove duplicate URLs from list
+ $urlArr = array_unique( $urlArr );
+
+ wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
+
+ if ( $wgHTCPRouting ) {
+ self::HTCPPurge( $urlArr );
+ }
+
+ if ( $wgSquidServers ) {
+ // Maximum number of parallel connections per squid
+ $maxSocketsPerSquid = 8;
+ // Number of requests to send per socket
+ // 400 seems to be a good tradeoff, opening a socket takes a while
+ $urlsPerSocket = 400;
+ $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
+ if ( $socketsPerSquid > $maxSocketsPerSquid ) {
+ $socketsPerSquid = $maxSocketsPerSquid;
+ }
+
+ $pool = new SquidPurgeClientPool;
+ $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
+ foreach ( $wgSquidServers as $server ) {
+ foreach ( $chunks as $chunk ) {
+ $client = new SquidPurgeClient( $server );
+ foreach ( $chunk as $url ) {
+ $client->queuePurge( $url );
+ }
+ $pool->addClient( $client );
+ }
+ }
+
+ $pool->run();
+ }
+ }
+
+ /**
+ * Send Hyper Text Caching Protocol (HTCP) CLR requests.
+ *
+ * @throws MWException
+ * @param string[] $urlArr Collection of URLs to purge
+ */
+ private static function HTCPPurge( array $urlArr ) {
+ global $wgHTCPRouting, $wgHTCPMulticastTTL;
+
+ // HTCP CLR operation
+ $htcpOpCLR = 4;
+
+ // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
+ if ( !defined( "IPPROTO_IP" ) ) {
+ define( "IPPROTO_IP", 0 );
+ define( "IP_MULTICAST_LOOP", 34 );
+ define( "IP_MULTICAST_TTL", 33 );
+ }
+
+ // pfsockopen doesn't work because we need set_sock_opt
+ $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
+ if ( !$conn ) {
+ $errstr = socket_strerror( socket_last_error() );
+ wfDebugLog( 'squid', __METHOD__ .
+ ": Error opening UDP socket: $errstr" );
+
+ return;
+ }
+
+ // Set socket options
+ socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
+ if ( $wgHTCPMulticastTTL != 1 ) {
+ // Set multicast time to live (hop count) option on socket
+ socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
+ $wgHTCPMulticastTTL );
+ }
+
+ // Get sequential trx IDs for packet loss counting
+ $ids = UIDGenerator::newSequentialPerNodeIDs(
+ 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
+ );
+
+ foreach ( $urlArr as $url ) {
+ if ( !is_string( $url ) ) {
+ throw new MWException( 'Bad purge URL' );
+ }
+ $url = self::expand( $url );
+ $conf = self::getRuleForURL( $url, $wgHTCPRouting );
+ if ( !$conf ) {
+ wfDebugLog( 'squid', __METHOD__ .
+ "No HTCP rule configured for URL {$url} , skipping" );
+ continue;
+ }
+
+ if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
+ // Normalize single entries
+ $conf = array( $conf );
+ }
+ foreach ( $conf as $subconf ) {
+ if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
+ throw new MWException( "Invalid HTCP rule for URL $url\n" );
+ }
+ }
+
+ // Construct a minimal HTCP request diagram
+ // as per RFC 2756
+ // Opcode 'CLR', no response desired, no auth
+ $htcpTransID = current( $ids );
+ next( $ids );
+
+ $htcpSpecifier = pack( 'na4na*na8n',
+ 4, 'HEAD', strlen( $url ), $url,
+ 8, 'HTTP/1.0', 0 );
+
+ $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
+ $htcpLen = 4 + $htcpDataLen + 2;
+
+ // Note! Squid gets the bit order of the first
+ // word wrong, wrt the RFC. Apparently no other
+ // implementation exists, so adapt to Squid
+ $htcpPacket = pack( 'nxxnCxNxxa*n',
+ $htcpLen, $htcpDataLen, $htcpOpCLR,
+ $htcpTransID, $htcpSpecifier, 2 );
+
+ wfDebugLog( 'squid', __METHOD__ .
+ "Purging URL $url via HTCP" );
+ foreach ( $conf as $subconf ) {
+ socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
+ $subconf['host'], $subconf['port'] );
+ }
+ }
+ }
+
+ /**
+ * Expand local URLs to fully-qualified URLs using the internal protocol
+ * and host defined in $wgInternalServer. Input that's already fully-
+ * qualified will be passed through unchanged.
+ *
+ * This is used to generate purge URLs that may be either local to the
+ * main wiki or include a non-native host, such as images hosted on a
+ * second internal server.
+ *
+ * Client functions should not need to call this.
+ *
+ * @param string $url
+ * @return string
+ */
+ public static function expand( $url ) {
+ return wfExpandUrl( $url, PROTO_INTERNAL );
+ }
+
+ /**
+ * Find the HTCP routing rule to use for a given URL.
+ * @param string $url URL to match
+ * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
+ * @return mixed Element of $rules that matched, or false if nothing matched
+ */
+ private static function getRuleForURL( $url, $rules ) {
+ foreach ( $rules as $regex => $routing ) {
+ if ( $regex === '' || preg_match( $regex, $url ) ) {
+ return $routing;
+ }
+ }
+
+ return false;
+ }
+}
+
+/**
+ * @deprecated since 1.27
+ */
+class SquidUpdate extends CdnCacheUpdate {
+ // Keep class name for b/c
+}
+++ /dev/null
-<?php
-/**
- * Squid cache purging.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License along
- * with this program; if not, write to the Free Software Foundation, Inc.,
- * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * http://www.gnu.org/copyleft/gpl.html
- *
- * @file
- * @ingroup Cache
- */
-
-use Wikimedia\Assert\Assert;
-
-/**
- * Handles purging appropriate Squid URLs given a title (or titles)
- * @ingroup Cache
- */
-class SquidUpdate implements DeferrableUpdate, MergeableUpdate {
- /** @var string[] Collection of URLs to purge */
- protected $urls = array();
-
- /**
- * @param string[] $urlArr Collection of URLs to purge
- */
- public function __construct( array $urlArr ) {
- $this->urls = $urlArr;
- }
-
- /**
- * Create a SquidUpdate from an array of Title objects, or a TitleArray object
- *
- * @param Traversable|array $titles
- * @param string[] $urlArr
- * @return SquidUpdate
- */
- public static function newFromTitles( $titles, $urlArr = array() ) {
- /** @var Title $title */
- foreach ( $titles as $title ) {
- $urlArr = array_merge( $urlArr, $title->getSquidURLs() );
- }
-
- return new SquidUpdate( $urlArr );
- }
-
- /**
- * @param Title $title
- * @return SquidUpdate
- * @deprecated 1.27
- */
- public static function newSimplePurge( Title $title ) {
- return new SquidUpdate( $title->getSquidURLs() );
- }
-
- /**
- * Purges the list of URLs passed to the constructor.
- */
- public function doUpdate() {
- self::purge( $this->urls );
- }
-
- public function merge( MergeableUpdate $update ) {
- /** @var SquidUpdate $update */
- Assert::parameterType( __CLASS__, $update, '$update' );
-
- $this->urls = array_merge( $this->urls, $update->urls );
- }
-
- /**
- * Purges a list of Squids defined in $wgSquidServers.
- * $urlArr should contain the full URLs to purge as values
- * (example: $urlArr[] = 'http://my.host/something')
- * XXX report broken Squids per mail or log
- *
- * @param string[] $urlArr List of full URLs to purge
- */
- public static function purge( array $urlArr ) {
- global $wgSquidServers, $wgHTCPRouting;
-
- if ( !$urlArr ) {
- return;
- }
-
- // Remove duplicate URLs from list
- $urlArr = array_unique( $urlArr );
-
- wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
-
- if ( $wgHTCPRouting ) {
- self::HTCPPurge( $urlArr );
- }
-
- if ( $wgSquidServers ) {
- // Maximum number of parallel connections per squid
- $maxSocketsPerSquid = 8;
- // Number of requests to send per socket
- // 400 seems to be a good tradeoff, opening a socket takes a while
- $urlsPerSocket = 400;
- $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
- if ( $socketsPerSquid > $maxSocketsPerSquid ) {
- $socketsPerSquid = $maxSocketsPerSquid;
- }
-
- $pool = new SquidPurgeClientPool;
- $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
- foreach ( $wgSquidServers as $server ) {
- foreach ( $chunks as $chunk ) {
- $client = new SquidPurgeClient( $server );
- foreach ( $chunk as $url ) {
- $client->queuePurge( $url );
- }
- $pool->addClient( $client );
- }
- }
-
- $pool->run();
- }
- }
-
- /**
- * Send Hyper Text Caching Protocol (HTCP) CLR requests.
- *
- * @throws MWException
- * @param string[] $urlArr Collection of URLs to purge
- */
- private static function HTCPPurge( array $urlArr ) {
- global $wgHTCPRouting, $wgHTCPMulticastTTL;
-
- // HTCP CLR operation
- $htcpOpCLR = 4;
-
- // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
- if ( !defined( "IPPROTO_IP" ) ) {
- define( "IPPROTO_IP", 0 );
- define( "IP_MULTICAST_LOOP", 34 );
- define( "IP_MULTICAST_TTL", 33 );
- }
-
- // pfsockopen doesn't work because we need set_sock_opt
- $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
- if ( !$conn ) {
- $errstr = socket_strerror( socket_last_error() );
- wfDebugLog( 'squid', __METHOD__ .
- ": Error opening UDP socket: $errstr" );
-
- return;
- }
-
- // Set socket options
- socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
- if ( $wgHTCPMulticastTTL != 1 ) {
- // Set multicast time to live (hop count) option on socket
- socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
- $wgHTCPMulticastTTL );
- }
-
- // Get sequential trx IDs for packet loss counting
- $ids = UIDGenerator::newSequentialPerNodeIDs(
- 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
- );
-
- foreach ( $urlArr as $url ) {
- if ( !is_string( $url ) ) {
- throw new MWException( 'Bad purge URL' );
- }
- $url = self::expand( $url );
- $conf = self::getRuleForURL( $url, $wgHTCPRouting );
- if ( !$conf ) {
- wfDebugLog( 'squid', __METHOD__ .
- "No HTCP rule configured for URL {$url} , skipping" );
- continue;
- }
-
- if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
- // Normalize single entries
- $conf = array( $conf );
- }
- foreach ( $conf as $subconf ) {
- if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
- throw new MWException( "Invalid HTCP rule for URL $url\n" );
- }
- }
-
- // Construct a minimal HTCP request diagram
- // as per RFC 2756
- // Opcode 'CLR', no response desired, no auth
- $htcpTransID = current( $ids );
- next( $ids );
-
- $htcpSpecifier = pack( 'na4na*na8n',
- 4, 'HEAD', strlen( $url ), $url,
- 8, 'HTTP/1.0', 0 );
-
- $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
- $htcpLen = 4 + $htcpDataLen + 2;
-
- // Note! Squid gets the bit order of the first
- // word wrong, wrt the RFC. Apparently no other
- // implementation exists, so adapt to Squid
- $htcpPacket = pack( 'nxxnCxNxxa*n',
- $htcpLen, $htcpDataLen, $htcpOpCLR,
- $htcpTransID, $htcpSpecifier, 2 );
-
- wfDebugLog( 'squid', __METHOD__ .
- "Purging URL $url via HTCP" );
- foreach ( $conf as $subconf ) {
- socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
- $subconf['host'], $subconf['port'] );
- }
- }
- }
-
- /**
- * Expand local URLs to fully-qualified URLs using the internal protocol
- * and host defined in $wgInternalServer. Input that's already fully-
- * qualified will be passed through unchanged.
- *
- * This is used to generate purge URLs that may be either local to the
- * main wiki or include a non-native host, such as images hosted on a
- * second internal server.
- *
- * Client functions should not need to call this.
- *
- * @param string $url
- * @return string
- */
- public static function expand( $url ) {
- return wfExpandUrl( $url, PROTO_INTERNAL );
- }
-
- /**
- * Find the HTCP routing rule to use for a given URL.
- * @param string $url URL to match
- * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
- * @return mixed Element of $rules that matched, or false if nothing matched
- */
- private static function getRuleForURL( $url, $rules ) {
- foreach ( $rules as $regex => $routing ) {
- if ( $regex === '' || preg_match( $regex, $url ) ) {
- return $routing;
- }
- }
-
- return false;
- }
-}
// Purge squid cache for this file
DeferredUpdates::addUpdate(
- new SquidUpdate( array( $this->getUrl() ) ),
+ new CdnCacheUpdate( array( $this->getUrl() ) ),
DeferredUpdates::PRESEND
);
}
foreach ( $files as $file ) {
$urls[] = $this->getArchiveThumbUrl( $archiveName, $file );
}
- DeferredUpdates::addUpdate( new SquidUpdate( $urls ), DeferredUpdates::PRESEND );
+ DeferredUpdates::addUpdate( new CdnCacheUpdate( $urls ), DeferredUpdates::PRESEND );
}
/**
$this->purgeThumbList( $dir, $files );
// Purge the squid
- DeferredUpdates::addUpdate( new SquidUpdate( $urls ), DeferredUpdates::PRESEND );
+ DeferredUpdates::addUpdate( new CdnCacheUpdate( $urls ), DeferredUpdates::PRESEND );
}
/**
$that->purgeThumbnails();
# Remove the old file from the squid cache
DeferredUpdates::addUpdate(
- new SquidUpdate( array( $that->getUrl() ) ),
+ new CdnCacheUpdate( array( $that->getUrl() ) ),
DeferredUpdates::PRESEND
);
} else {
foreach ( $archiveNames as $archiveName ) {
$purgeUrls[] = $this->getArchiveUrl( $archiveName );
}
- DeferredUpdates::addUpdate( new SquidUpdate( $purgeUrls ), DeferredUpdates::PRESEND );
+ DeferredUpdates::addUpdate( new CdnCacheUpdate( $purgeUrls ), DeferredUpdates::PRESEND );
return $status;
}
}
DeferredUpdates::addUpdate(
- new SquidUpdate( array( $this->getArchiveUrl( $archiveName ) ) ),
+ new CdnCacheUpdate( array( $this->getArchiveUrl( $archiveName ) ) ),
DeferredUpdates::PRESEND
);
) );
// Update squid
- $u = SquidUpdate::newFromTitles( $titleArray );
+ $u = CdnCacheUpdate::newFromTitles( $titleArray );
$u->doUpdate();
// Update file cache
// Send purge after above page_touched update was committed
DeferredUpdates::addUpdate(
- new SquidUpdate( $title->getSquidURLs() ),
+ new CdnCacheUpdate( $title->getSquidURLs() ),
DeferredUpdates::PRESEND
);
$purgeUrls[] = $file->getArchiveUrl( $archiveName );
}
DeferredUpdates::addUpdate(
- new SquidUpdate( $purgeUrls ),
+ new CdnCacheUpdate( $purgeUrls ),
DeferredUpdates::PRESEND
);
private function benchSquid( $urls, $trials = 1 ) {
$start = microtime( true );
for ( $i = 0; $i < $trials; $i++ ) {
- SquidUpdate::purge( $urls );
+ CdnCacheUpdate::purge( $urls );
}
$delta = microtime( true ) - $start;
$pertrial = $delta / $trials;
}
// Send batch of purge requests out to squids
- $squid = new SquidUpdate( $urls, count( $urls ) );
+ $squid = new CdnCacheUpdate( $urls, count( $urls ) );
$squid->doUpdate();
if ( $this->hasOption( 'sleep-per-batch' ) ) {
if ( $this->hasOption( 'verbose' ) ) {
$this->output( $url . "\n" );
}
- $u = new SquidUpdate( array( $url ) );
+ $u = new CdnCacheUpdate( array( $url ) );
$u->doUpdate();
usleep( $delay * 1e6 );
}
if ( $this->hasOption( 'verbose' ) ) {
$this->output( implode( "\n", $urls ) . "\n" );
}
- $u = new SquidUpdate( $urls );
+ $u = new CdnCacheUpdate( $urls );
$u->doUpdate();
}
}
--- /dev/null
+<?php
+
+class CdnCacheUpdateTest extends MediaWikiTestCase {
+ public function testPurgeMergeWeb() {
+ $this->setMwGlobals( 'wgCommandLineMode', false );
+
+ $urls1 = array();
+ $title = Title::newMainPage();
+ $urls1[] = $title->getCanonicalURL( '?x=1' );
+ $urls1[] = $title->getCanonicalURL( '?x=2' );
+ $urls1[] = $title->getCanonicalURL( '?x=3' );
+ $update1 = new CdnCacheUpdate( $urls1 );
+ DeferredUpdates::addUpdate( $update1 );
+
+ $urls2 = array();
+ $urls2[] = $title->getCanonicalURL( '?x=2' );
+ $urls2[] = $title->getCanonicalURL( '?x=3' );
+ $urls2[] = $title->getCanonicalURL( '?x=4' );
+ $update2 = new CdnCacheUpdate( $urls2 );
+ DeferredUpdates::addUpdate( $update2 );
+
+ $wrapper = TestingAccessWrapper::newFromObject( $update1 );
+ $this->assertEquals( array_merge( $urls1, $urls2 ), $wrapper->urls );
+ }
+}
+++ /dev/null
-<?php
-
-class SquidUpdatesTest extends MediaWikiTestCase {
- public function testPurgeMergeWeb() {
- $this->setMwGlobals( 'wgCommandLineMode', false );
-
- $urls1 = array();
- $title = Title::newMainPage();
- $urls1[] = $title->getCanonicalURL( '?x=1' );
- $urls1[] = $title->getCanonicalURL( '?x=2' );
- $urls1[] = $title->getCanonicalURL( '?x=3' );
- $update1 = new SquidUpdate( $urls1 );
- DeferredUpdates::addUpdate( $update1 );
-
- $urls2 = array();
- $urls2[] = $title->getCanonicalURL( '?x=2' );
- $urls2[] = $title->getCanonicalURL( '?x=3' );
- $urls2[] = $title->getCanonicalURL( '?x=4' );
- $update2 = new SquidUpdate( $urls2 );
- DeferredUpdates::addUpdate( $update2 );
-
- $wrapper = TestingAccessWrapper::newFromObject( $update1 );
- $this->assertEquals( array_merge( $urls1, $urls2 ), $wrapper->urls );
- }
-}