4 * Class to invalidate the HTML cache of all the pages linking to a given title.
5 * Small numbers of links will be done immediately, large numbers are pushed onto
8 * This class is designed to work efficiently with small numbers of links, and
9 * to work reasonably well with up to ~10^5 links. Above ~10^6 links, the memory
10 * and time requirements of loading all backlinked IDs in doUpdate() might become
11 * prohibitive. The requirements measured at Wikimedia are approximately:
13 * memory: 48 bytes per row
14 * time: 16us per row for the query plus processing
16 * The reason this query is done is to support partitioning of the job
17 * by backlinked ID. The memory issue could be allieviated by doing this query in
18 * batches, but of course LIMIT with an offset is inefficient on the DB side.
20 * The class is nevertheless a vast improvement on the previous method of using
21 * File::getLinksTo() and Title::touchArray(), which uses about 2KB of memory per
33 public $mTable, $mPrefix, $mStart, $mEnd;
34 public $mRowsPerJob, $mRowsPerQuery;
36 function __construct( $titleTo, $table, $start = false, $end = false ) {
37 global $wgUpdateRowsPerJob, $wgUpdateRowsPerQuery;
39 $this->mTitle
= $titleTo;
40 $this->mTable
= $table;
41 $this->mStart
= $start;
43 $this->mRowsPerJob
= $wgUpdateRowsPerJob;
44 $this->mRowsPerQuery
= $wgUpdateRowsPerQuery;
45 $this->mCache
= $this->mTitle
->getBacklinkCache();
48 public function doUpdate() {
49 if ( $this->mStart ||
$this->mEnd
) {
50 $this->doPartialUpdate();
54 # Get an estimate of the number of rows from the BacklinkCache
55 $numRows = $this->mCache
->getNumLinks( $this->mTable
);
56 if ( $numRows > $this->mRowsPerJob
* 2 ) {
57 # Do fast cached partition
60 # Get the links from the DB
61 $titleArray = $this->mCache
->getLinks( $this->mTable
);
62 # Check if the row count estimate was correct
63 if ( $titleArray->count() > $this->mRowsPerJob
* 2 ) {
64 # Not correct, do accurate partition
65 wfDebug( __METHOD__
.": row count estimate was incorrect, repartitioning\n" );
66 $this->insertJobsFromTitles( $titleArray );
68 $this->invalidateTitles( $titleArray );
74 * Update some of the backlinks, defined by a page ID range
76 protected function doPartialUpdate() {
77 $titleArray = $this->mCache
->getLinks( $this->mTable
, $this->mStart
, $this->mEnd
);
78 if ( $titleArray->count() <= $this->mRowsPerJob
* 2 ) {
79 # This partition is small enough, do the update
80 $this->invalidateTitles( $titleArray );
82 # Partitioning was excessively inaccurate. Divide the job further.
83 # This can occur when a large number of links are added in a short
84 # period of time, say by updating a heavily-used template.
85 $this->insertJobsFromTitles( $titleArray );
90 * Partition the current range given by $this->mStart and $this->mEnd,
91 * using a pre-calculated title array which gives the links in that range.
92 * Queue the resulting jobs.
94 protected function insertJobsFromTitles( $titleArray ) {
95 # We make subpartitions in the sense that the start of the first job
96 # will be the start of the parent partition, and the end of the last
97 # job will be the end of the parent partition.
99 $start = $this->mStart
; # start of the current job
101 foreach ( $titleArray as $title ) {
102 $id = $title->getArticleID();
103 # $numTitles is now the number of titles in the current job not
104 # including the current ID
105 if ( $numTitles >= $this->mRowsPerJob
) {
106 # Add a job up to but not including the current ID
108 'table' => $this->mTable
,
112 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle
, $params );
120 'table' => $this->mTable
,
124 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle
, $params );
125 wfDebug( __METHOD__
.": repartitioning into " . count( $jobs ) . " jobs\n" );
127 if ( count( $jobs ) < 2 ) {
128 # I don't think this is possible at present, but handling this case
129 # makes the code a bit more robust against future code updates and
130 # avoids a potential infinite loop of repartitioning
131 wfDebug( __METHOD__
.": repartitioning failed!\n" );
132 $this->invalidateTitles( $titleArray );
136 Job
::batchInsert( $jobs );
139 protected function insertJobs() {
140 $batches = $this->mCache
->partition( $this->mTable
, $this->mRowsPerJob
);
145 foreach ( $batches as $batch ) {
147 'table' => $this->mTable
,
148 'start' => $batch[0],
151 $jobs[] = new HTMLCacheUpdateJob( $this->mTitle
, $params );
153 Job
::batchInsert( $jobs );
157 * Invalidate a range of pages, right now
160 public function invalidate( $startId = false, $endId = false ) {
161 $titleArray = $this->mCache
->getLinks( $this->mTable
, $startId, $endId );
162 $this->invalidateTitles( $titleArray );
166 * Invalidate an array (or iterator) of Title objects, right now
168 protected function invalidateTitles( $titleArray ) {
169 global $wgUseFileCache, $wgUseSquid;
171 $dbw = wfGetDB( DB_MASTER
);
172 $timestamp = $dbw->timestamp();
174 # Get all IDs in this query into an array
176 foreach ( $titleArray as $title ) {
177 $ids[] = $title->getArticleID();
184 # Update page_touched
185 $batches = array_chunk( $ids, $this->mRowsPerQuery
);
186 foreach ( $batches as $batch ) {
187 $dbw->update( 'page',
188 array( 'page_touched' => $timestamp ),
189 array( 'page_id IN (' . $dbw->makeList( $batch ) . ')' ),
196 $u = SquidUpdate
::newFromTitles( $titleArray );
201 if ( $wgUseFileCache ) {
202 foreach ( $titleArray as $title ) {
203 HTMLFileCache
::clearFileCache( $title );
211 * Job wrapper for HTMLCacheUpdate. Gets run whenever a related
212 * job gets called from the queue.
216 class HTMLCacheUpdateJob
extends Job
{
217 var $table, $start, $end;
221 * @param $title Title: the title linked to
222 * @param $params Array: job parameters (table, start and end page_ids)
223 * @param $id Integer: job id
225 function __construct( $title, $params, $id = 0 ) {
226 parent
::__construct( 'htmlCacheUpdate', $title, $params, $id );
227 $this->table
= $params['table'];
228 $this->start
= $params['start'];
229 $this->end
= $params['end'];
232 public function run() {
233 $update = new HTMLCacheUpdate( $this->title
, $this->table
, $this->start
, $this->end
);