Merge "Replace "squid" with "CDN" in various comments"
[lhc/web/wiklou.git] / includes / deferred / CdnCacheUpdate.php
1 <?php
2 /**
3 * CDN cache purging.
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 *
20 * @file
21 * @ingroup Cache
22 */
23
24 use Wikimedia\Assert\Assert;
25
26 /**
27 * Handles purging appropriate CDN URLs given a title (or titles)
28 * @ingroup Cache
29 */
30 class CdnCacheUpdate implements DeferrableUpdate, MergeableUpdate {
31 /** @var string[] Collection of URLs to purge */
32 protected $urls = array();
33
34 /**
35 * @param string[] $urlArr Collection of URLs to purge
36 */
37 public function __construct( array $urlArr ) {
38 $this->urls = $urlArr;
39 }
40
41 /**
42 * Create an update object from an array of Title objects, or a TitleArray object
43 *
44 * @param Traversable|array $titles
45 * @param string[] $urlArr
46 * @return CdnCacheUpdate
47 */
48 public static function newFromTitles( $titles, $urlArr = array() ) {
49 /** @var Title $title */
50 foreach ( $titles as $title ) {
51 $urlArr = array_merge( $urlArr, $title->getSquidURLs() );
52 }
53
54 return new CdnCacheUpdate( $urlArr );
55 }
56
57 /**
58 * @param Title $title
59 * @return CdnCacheUpdate
60 * @deprecated 1.27
61 */
62 public static function newSimplePurge( Title $title ) {
63 return new CdnCacheUpdate( $title->getSquidURLs() );
64 }
65
66 /**
67 * Purges the list of URLs passed to the constructor.
68 */
69 public function doUpdate() {
70 self::purge( $this->urls );
71 }
72
73 public function merge( MergeableUpdate $update ) {
74 /** @var CdnCacheUpdate $update */
75 Assert::parameterType( __CLASS__, $update, '$update' );
76
77 $this->urls = array_merge( $this->urls, $update->urls );
78 }
79
80 /**
81 * Purges a list of CDN nodes defined in $wgSquidServers.
82 * $urlArr should contain the full URLs to purge as values
83 * (example: $urlArr[] = 'http://my.host/something')
84 *
85 * @param string[] $urlArr List of full URLs to purge
86 */
87 public static function purge( array $urlArr ) {
88 global $wgSquidServers, $wgHTCPRouting;
89
90 if ( !$urlArr ) {
91 return;
92 }
93
94 // Remove duplicate URLs from list
95 $urlArr = array_unique( $urlArr );
96
97 wfDebugLog( 'squid', __METHOD__ . ': ' . implode( ' ', $urlArr ) );
98
99 if ( $wgHTCPRouting ) {
100 self::HTCPPurge( $urlArr );
101 }
102
103 if ( $wgSquidServers ) {
104 // Maximum number of parallel connections per squid
105 $maxSocketsPerSquid = 8;
106 // Number of requests to send per socket
107 // 400 seems to be a good tradeoff, opening a socket takes a while
108 $urlsPerSocket = 400;
109 $socketsPerSquid = ceil( count( $urlArr ) / $urlsPerSocket );
110 if ( $socketsPerSquid > $maxSocketsPerSquid ) {
111 $socketsPerSquid = $maxSocketsPerSquid;
112 }
113
114 $pool = new SquidPurgeClientPool;
115 $chunks = array_chunk( $urlArr, ceil( count( $urlArr ) / $socketsPerSquid ) );
116 foreach ( $wgSquidServers as $server ) {
117 foreach ( $chunks as $chunk ) {
118 $client = new SquidPurgeClient( $server );
119 foreach ( $chunk as $url ) {
120 $client->queuePurge( $url );
121 }
122 $pool->addClient( $client );
123 }
124 }
125
126 $pool->run();
127 }
128 }
129
130 /**
131 * Send Hyper Text Caching Protocol (HTCP) CLR requests.
132 *
133 * @throws MWException
134 * @param string[] $urlArr Collection of URLs to purge
135 */
136 private static function HTCPPurge( array $urlArr ) {
137 global $wgHTCPRouting, $wgHTCPMulticastTTL;
138
139 // HTCP CLR operation
140 $htcpOpCLR = 4;
141
142 // @todo FIXME: PHP doesn't support these socket constants (include/linux/in.h)
143 if ( !defined( "IPPROTO_IP" ) ) {
144 define( "IPPROTO_IP", 0 );
145 define( "IP_MULTICAST_LOOP", 34 );
146 define( "IP_MULTICAST_TTL", 33 );
147 }
148
149 // pfsockopen doesn't work because we need set_sock_opt
150 $conn = socket_create( AF_INET, SOCK_DGRAM, SOL_UDP );
151 if ( !$conn ) {
152 $errstr = socket_strerror( socket_last_error() );
153 wfDebugLog( 'squid', __METHOD__ .
154 ": Error opening UDP socket: $errstr" );
155
156 return;
157 }
158
159 // Set socket options
160 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_LOOP, 0 );
161 if ( $wgHTCPMulticastTTL != 1 ) {
162 // Set multicast time to live (hop count) option on socket
163 socket_set_option( $conn, IPPROTO_IP, IP_MULTICAST_TTL,
164 $wgHTCPMulticastTTL );
165 }
166
167 // Get sequential trx IDs for packet loss counting
168 $ids = UIDGenerator::newSequentialPerNodeIDs(
169 'squidhtcppurge', 32, count( $urlArr ), UIDGenerator::QUICK_VOLATILE
170 );
171
172 foreach ( $urlArr as $url ) {
173 if ( !is_string( $url ) ) {
174 throw new MWException( 'Bad purge URL' );
175 }
176 $url = self::expand( $url );
177 $conf = self::getRuleForURL( $url, $wgHTCPRouting );
178 if ( !$conf ) {
179 wfDebugLog( 'squid', __METHOD__ .
180 "No HTCP rule configured for URL {$url} , skipping" );
181 continue;
182 }
183
184 if ( isset( $conf['host'] ) && isset( $conf['port'] ) ) {
185 // Normalize single entries
186 $conf = array( $conf );
187 }
188 foreach ( $conf as $subconf ) {
189 if ( !isset( $subconf['host'] ) || !isset( $subconf['port'] ) ) {
190 throw new MWException( "Invalid HTCP rule for URL $url\n" );
191 }
192 }
193
194 // Construct a minimal HTCP request diagram
195 // as per RFC 2756
196 // Opcode 'CLR', no response desired, no auth
197 $htcpTransID = current( $ids );
198 next( $ids );
199
200 $htcpSpecifier = pack( 'na4na*na8n',
201 4, 'HEAD', strlen( $url ), $url,
202 8, 'HTTP/1.0', 0 );
203
204 $htcpDataLen = 8 + 2 + strlen( $htcpSpecifier );
205 $htcpLen = 4 + $htcpDataLen + 2;
206
207 // Note! Squid gets the bit order of the first
208 // word wrong, wrt the RFC. Apparently no other
209 // implementation exists, so adapt to Squid
210 $htcpPacket = pack( 'nxxnCxNxxa*n',
211 $htcpLen, $htcpDataLen, $htcpOpCLR,
212 $htcpTransID, $htcpSpecifier, 2 );
213
214 wfDebugLog( 'squid', __METHOD__ .
215 "Purging URL $url via HTCP" );
216 foreach ( $conf as $subconf ) {
217 socket_sendto( $conn, $htcpPacket, $htcpLen, 0,
218 $subconf['host'], $subconf['port'] );
219 }
220 }
221 }
222
223 /**
224 * Expand local URLs to fully-qualified URLs using the internal protocol
225 * and host defined in $wgInternalServer. Input that's already fully-
226 * qualified will be passed through unchanged.
227 *
228 * This is used to generate purge URLs that may be either local to the
229 * main wiki or include a non-native host, such as images hosted on a
230 * second internal server.
231 *
232 * Client functions should not need to call this.
233 *
234 * @param string $url
235 * @return string
236 */
237 public static function expand( $url ) {
238 return wfExpandUrl( $url, PROTO_INTERNAL );
239 }
240
241 /**
242 * Find the HTCP routing rule to use for a given URL.
243 * @param string $url URL to match
244 * @param array $rules Array of rules, see $wgHTCPRouting for format and behavior
245 * @return mixed Element of $rules that matched, or false if nothing matched
246 */
247 private static function getRuleForURL( $url, $rules ) {
248 foreach ( $rules as $regex => $routing ) {
249 if ( $regex === '' || preg_match( $regex, $url ) ) {
250 return $routing;
251 }
252 }
253
254 return false;
255 }
256 }
257
258 /**
259 * @deprecated since 1.27
260 */
261 class SquidUpdate extends CdnCacheUpdate {
262 // Keep class name for b/c
263 }