<?php
/**
- * See deferred.doc
+ * See deferred.txt
* @package MediaWiki
*/
$id = $title->getArticleID();
$dbr =& wfGetDB( DB_SLAVE );
- $links = $dbr->tableName( 'links' );
- $page = $dbr->tableName( 'page' );
-
- $sql = "SELECT page_namespace,page_title FROM $links,$page WHERE l_to={$id} and l_from=page_id" ;
- $res = $dbr->query( $sql, $fname ) ;
+ $res = $dbr->select( array( 'links', 'page' ),
+ array( 'page_namespace', 'page_title' ),
+ array(
+ 'pl_namespace' => $title->getNamespace(),
+ 'pl_title' => $title->getDbKey(),
+ 'pl_from=page_id' ),
+ $fname );
$blurlArr = $title->getSquidURLs();
if ( $dbr->numRows( $res ) <= $this->mMaxTitles ) {
while ( $BL = $dbr->fetchObject ( $res ) )
{
- $tobj = Title::makeTitle( $BL->page_namespace, $BL->page_title ) ;
+ $tobj = Title::makeTitle( $BL->page_namespace, $BL->page_title ) ;
$blurlArr[] = $tobj->getInternalURL();
}
}
return new SquidUpdate( $blurlArr );
}
- /* static */ function newFromBrokenLinksTo( &$title ) {
- $fname = 'SquidUpdate::newFromBrokenLinksTo';
- wfProfileIn( $fname );
-
- # Get a list of URLs linking to this (currently non-existent) page
- $dbr =& wfGetDB( DB_SLAVE );
- $brokenlinks = $dbr->tableName( 'brokenlinks' );
- $page = $dbr->tableName( 'page' );
- $encTitle = $dbr->addQuotes( $title->getPrefixedDBkey() );
-
- $sql = "SELECT page_namespace,page_title FROM $brokenlinks,$cur WHERE bl_to={$encTitle} AND bl_from=page_id";
- $res = $dbr->query( $sql, $fname );
- $blurlArr = array();
- if ( $dbr->numRows( $res ) <= $this->mMaxTitles ) {
- while ( $BL = $dbr->fetchObject( $res ) )
- {
- $tobj = Title::makeTitle( $BL->page_namespace, $BL->page_title );
- $blurlArr[] = $tobj->getInternalURL();
- }
+ /* static */ function newFromTitles( &$titles, $urlArr = array() ) {
+ foreach ( $titles as $title ) {
+ $urlArr[] = $title->getInternalURL();
}
- $dbr->freeResult( $res );
- wfProfileOut( $fname );
- return new SquidUpdate( $blurlArr );
+ return new SquidUpdate( $urlArr );
}
/* static */ function newSimplePurge( &$title ) {
$urlArr = $title->getSquidURLs();
- return new SquidUpdate( $blurlArr );
+ return new SquidUpdate( $urlArr );
}
function doUpdate() {
}
/* Purges a list of Squids defined in $wgSquidServers.
- $urlArr should contain the full URLs to purge as values
+ $urlArr should contain the full URLs to purge as values
(example: $urlArr[] = 'http://my.host/something')
XXX report broken Squids per mail or log */
$fname = 'SquidUpdate::purge';
wfProfileIn( $fname );
-
+
$maxsocketspersquid = 8; // socket cap per Squid
$urlspersocket = 400; // 400 seems to be a good tradeoff, opening a socket takes a while
$firsturl = $urlArr[0];
@list($server, $port) = explode(':', $wgSquidServers[$ss]);
if(!isset($port)) $port = 80;
#$this->debug("Opening socket to $server:$port");
+ $error = $errstr = false;
$socket = @fsockopen($server, $port, $error, $errstr, 3);
#$this->debug("\n");
if (!$socket) {
#$this->debug("...");
$res = @fread($socket,512);
#$this->debug("\n");
- /* Squid only returns http headers with 200 or 404 status,
+ /* Squid only returns http headers with 200 or 404 status,
if there's more returned something's wrong */
if (strlen($res) > 250) {
fclose($socket);
@stream_set_blocking($socket,false);
$sockets[] = $socket;
}
- }
+ }
} else {
/* open the remaining sockets for this server */
list($server, $port) = explode(':', $wgSquidServers[$ss]);