Remove duplicate URLs in SquidUpdate.php
[lhc/web/wiklou.git] / includes / cache / SquidUpdate.php
1 <?php
2 /**
3 * See deferred.txt
4 * @file
5 * @ingroup Cache
6 */
7
8 /**
9 * Handles purging appropriate Squid URLs given a title (or titles)
10 * @ingroup Cache
11 */
12 class SquidUpdate {
13 var $urlArr, $mMaxTitles;
14
15 function __construct( $urlArr = Array(), $maxTitles = false ) {
16 global $wgMaxSquidPurgeTitles;
17 if ( $maxTitles === false ) {
18 $this->mMaxTitles = $wgMaxSquidPurgeTitles;
19 } else {
20 $this->mMaxTitles = $maxTitles;
21 }
22 $urlArr = array_unique( $urlArr ); // Remove duplicates
23 if ( count( $urlArr ) > $this->mMaxTitles ) {
24 $urlArr = array_slice( $urlArr, 0, $this->mMaxTitles );
25 }
26 $this->urlArr = $urlArr;
27 }
28
29 /**
30 * @param $title Title
31 *
32 * @return SquidUpdate
33 */
34 static function newFromLinksTo( &$title ) {
35 global $wgMaxSquidPurgeTitles;
36 wfProfileIn( __METHOD__ );
37
38 # Get a list of URLs linking to this page
39 $dbr = wfGetDB( DB_SLAVE );
40 $res = $dbr->select( array( 'links', 'page' ),
41 array( 'page_namespace', 'page_title' ),
42 array(
43 'pl_namespace' => $title->getNamespace(),
44 'pl_title' => $title->getDBkey(),
45 'pl_from=page_id' ),
46 __METHOD__ );
47 $blurlArr = $title->getSquidURLs();
48 if ( $dbr->numRows( $res ) <= $wgMaxSquidPurgeTitles ) {
49 foreach ( $res as $BL ) {
50 $tobj = Title::makeTitle( $BL->page_namespace, $BL->page_title ) ;
51 $blurlArr[] = $tobj->getInternalURL();
52 }
53 }
54
55 wfProfileOut( __METHOD__ );
56 return new SquidUpdate( $blurlArr );
57 }
58
59 /**
60 * Create a SquidUpdate from an array of Title objects, or a TitleArray object
61 *
62 * @param $titles array
63 * @param $urlArr array
64 *
65 * @return SquidUpdate
66 */
67 static function newFromTitles( $titles, $urlArr = array() ) {
68 global $wgMaxSquidPurgeTitles;
69 $i = 0;
70 foreach ( $titles as $title ) {
71 $urlArr[] = $title->getInternalURL();
72 if ( $i++ > $wgMaxSquidPurgeTitles ) {
73 break;
74 }
75 }
76 return new SquidUpdate( $urlArr );
77 }
78
79 /**
80 * @param $title Title
81 *
82 * @return SquidUpdate
83 */
84 static function newSimplePurge( &$title ) {
85 $urlArr = $title->getSquidURLs();
86 return new SquidUpdate( $urlArr );
87 }
88
89 /**
90 * Purges the list of URLs passed to the constructor
91 */
92 function doUpdate() {
93 SquidUpdate::purge( $this->urlArr );
94 }
95
96 /**
97 * Purges a list of Squids defined in $wgSquidServers.
98 * $urlArr should contain the full URLs to purge as values
99 * (example: $urlArr[] = 'http://my.host/something')
100 * XXX report broken Squids per mail or log
101 *
102 * @param $urlArr array
103 * @return void
104 */
105 static function purge( $urlArr ) {
106 global $wgSquidServers, $wgHTCPMulticastAddress, $wgHTCPPort;
107
108 /*if ( (@$wgSquidServers[0]) == 'echo' ) {
109 echo implode("<br />\n", $urlArr) . "<br />\n";
110 return;
111 }*/
112
113 if( !$urlArr ) {
114 return;
115 }
116
117 if ( $wgHTCPMulticastAddress && $wgHTCPPort ) {
118 SquidUpdate::HTCPPurge( $urlArr );
119 }
120
121 wfProfileIn( __METHOD__ );
122
123 $urlArr = array_unique( $urlArr ); // Remove duplicates
124 $maxSocketsPerSquid = 8; // socket cap per Squid
125 $urlsPerSocket = 400; // 400 seems to be a good tradeoff, opening a socket takes a while
126 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
127 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
128 $socketsPerSquid = $maxSocketsPerSquid;
129 }
130
131 $pool = new SquidPurgeClientPool;
132 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
133 foreach ( $wgSquidServers as $server ) {
134 foreach ( $chunks as $chunk ) {
135 $client = new SquidPurgeClient( $server );
136 foreach ( $chunk as $url ) {
137 $client->queuePurge( $url );
138 }
139 $pool->addClient( $client );
140 }
141 }
142 $pool->run();
143
144 wfProfileOut( __METHOD__ );
145 }
146
147 /**
148 * @throws MWException
149 * @param $urlArr array
150 */
151 static function HTCPPurge( $urlArr ) {
152 global $wgHTCPMulticastAddress, $wgHTCPMulticastTTL, $wgHTCPPort;
153 wfProfileIn( __METHOD__ );
154
155 $htcpOpCLR = 4; // HTCP CLR
156
157 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
158 if( !defined( "IPPROTO_IP" ) ) {
159 define( "IPPROTO_IP", 0 );
160 define( "IP_MULTICAST_LOOP", 34 );
161 define( "IP_MULTICAST_TTL", 33 );
162 }
163
164 // pfsockopen doesn't work because we need set_sock_opt
165 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
166 if ( $conn ) {
167 // Set socket options
168 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
169 if ( $wgHTCPMulticastTTL != 1 )
170 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
171 $wgHTCPMulticastTTL );
172
173 $urlArr = array_unique( $urlArr ); // Remove duplicates
174 foreach ( $urlArr as $url ) {
175 if( !is_string( $url ) ) {
176 throw new MWException( 'Bad purge URL' );
177 }
178 $url = SquidUpdate::expand( $url );
179
180 // Construct a minimal HTCP request diagram
181 // as per RFC 2756
182 // Opcode 'CLR', no response desired, no auth
183 $htcpTransID = rand();
184
185 $htcpSpecifier = pack( 'na4na*na8n',
186 4, 'HEAD', strlen( $url ), $url,
187 8, 'HTTP/1.0', 0 );
188
189 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
190 $htcpLen = 4 + $htcpDataLen + 2;
191
192 // Note! Squid gets the bit order of the first
193 // word wrong, wrt the RFC. Apparently no other
194 // implementation exists, so adapt to Squid
195 $htcpPacket = pack( 'nxxnCxNxxa*n',
196 $htcpLen, $htcpDataLen, $htcpOpCLR,
197 $htcpTransID, $htcpSpecifier, 2);
198
199 // Send out
200 wfDebug( "Purging URL $url via HTCP\n" );
201 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
202 $wgHTCPMulticastAddress, $wgHTCPPort );
203 }
204 } else {
205 $errstr = socket_strerror( socket_last_error() );
206 wfDebug( __METHOD__ . "(): Error opening UDP socket: $errstr\n" );
207 }
208 wfProfileOut( __METHOD__ );
209 }
210
211 /**
212 * Expand local URLs to fully-qualified URLs using the internal protocol
213 * and host defined in $wgInternalServer. Input that's already fully-
214 * qualified will be passed through unchanged.
215 *
216 * This is used to generate purge URLs that may be either local to the
217 * main wiki or include a non-native host, such as images hosted on a
218 * second internal server.
219 *
220 * Client functions should not need to call this.
221 *
222 * @param $url string
223 *
224 * @return string
225 */
226 static function expand( $url ) {
227 return wfExpandUrl( $url, PROTO_INTERNAL );
228 }
229 }