Only start new session for anon users on submit, not edit
[lhc/web/wiklou.git] / maintenance / compressOld.inc
1 <?php
2 /**
3 * @package MediaWiki
4 * @subpackage Maintenance
5 */
6
7 require_once( '../includes/Revision.php' );
8
9 /** */
10 function compressOldPages( $start = 0 ) {
11 $fname = 'compressOldPages';
12
13 $chunksize = 50;
14 print "Starting from old_id $start...\n";
15 $dbw =& wfGetDB( DB_MASTER );
16 $old = $dbw->tableName( 'old' );
17 do {
18 $end = $start + $chunksize;
19 $res = $dbw->select( 'old', array( 'old_id','old_flags','old_namespace','old_title','old_text' ),
20 "old_id>=$start", $fname, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) );
21 if( $dbw->numRows( $res ) == 0 ) {
22 break;
23 }
24 $last = $start;
25 while( $row = $dbw->fetchObject( $res ) ) {
26 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
27 compressPage( $row );
28 $last = $row->old_id;
29 }
30 $dbw->freeResult( $res );
31 $start = $last + 1; # Deletion may leave long empty stretches
32 print "$start...\n";
33 } while( true );
34 }
35
36 function compressPage( $row ) {
37 $fname = 'compressPage';
38 if( false !== strpos( $row->old_flags, "gzip" ) ) {
39 print "Already compressed row {$row->old_id}?\n";
40 return false;
41 }
42 $dbw =& wfGetDB( DB_MASTER );
43 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
44 $compress = gzdeflate( $row->old_text );
45 $dbw->update( 'old',
46 array( /* SET */
47 'old_flags' => $flags,
48 'old_text' => $compress
49 ), array( /* WHERE */
50 'old_id' => $row->old_id
51 ), $fname, 'LIMIT 1'
52 );
53 return true;
54 }
55
56 define( 'LS_INDIVIDUAL', 0 );
57 define( 'LS_CHUNKED', 1 );
58
59 function compressWithConcat( $startId, $maxChunkSize, $maxChunkFactor, $factorThreshold, $beginDate, $endDate )
60 {
61 $fname = 'compressWithConcat';
62 $loadStyle = LS_CHUNKED;
63
64 $dbw =& wfGetDB( DB_MASTER );
65
66 # First get a list of all pages
67 $pageRes = $dbw->select( 'page', 'page_id', false, $fname );
68
69 # For each of those, get a list of revisions which fit the criteria
70 $conds = array();
71 if ( $beginDate ) {
72 $conds[] = "rev_timestamp>'" . $beginDate . "'";
73 }
74 if ( $endDate ) {
75 $conds[] = "rev_timestamp<'" . $endDate . "'";
76 }
77 if ( $startId ) {
78 $conds[] = 'rev_id>=' . $startId;
79 }
80 if ( $loadStyle == LS_CHUNKED ) {
81 $tables = array( 'revision', 'text' );
82 $fields = array( 'rev_id', 'old_flags', 'old_text' );
83 $conds[] = 'rev_id=old_id';
84 $revLoadOptions = 'FOR UPDATE';
85 } else {
86 $tables = array( 'revision' );
87 $fields = array( 'rev_id' );
88 $revLoadOptions = array();
89 }
90
91 while ( $pageRow = $dbw->fetchObject( $pageRes ) ) {
92 # Display progress
93 $titleObj = Title::makeTitle( $pageRow->cur_namespace, $pageRow->cur_title );
94 print $titleObj->getPrefixedDBkey() . " ";
95
96 # Load revisions
97 $revRes = $dbw->select( 'old', $fields,
98 array( 'rev_page' => $pageRow->page_id ) + $conds,
99 $fname,
100 $revLoadOptions
101 );
102 $revs = array();
103 while ( $revRow = $dbw->fetchObject( $revRes ) ) {
104 $revs[] = $revRow;
105 }
106
107 if ( count( $revs ) < 2) {
108 # No revisions matching, no further processing
109 print "\n";
110 continue;
111 }
112
113 # For each chunk
114 $i = 0;
115 while ( $i < count( $revs ) ) {
116 if ( $i < count( $revs ) - $maxChunkSize ) {
117 $thisChunkSize = $maxChunkSize;
118 } else {
119 $thisChunkSize = count( $revs ) - $i;
120 }
121
122 $chunk = new ConcatenatedGzipHistoryBlob();
123 $stubs = array();
124 $dbw->begin();
125 $usedChunk = false;
126 $primaryOldid = $revs[$i]->rev_id;
127
128 # Get the text of each revision and add it to the object
129 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy( $maxChunkFactor, $factorThreshold ); $j++ ) {
130 $oldid = $revs[$i + $j]->rev_id;
131
132 # Get text
133 if ( $loadStyle == LS_INDIVIDUAL ) {
134 $textRow = $dbw->selectRow( 'text',
135 array( 'old_flags', 'old_text' ),
136 array( 'old_id' => $oldid ),
137 $fname,
138 'FOR UPDATE'
139 );
140 $text = Revision::getRevisionText( $textRow );
141 } else {
142 $text = Revision::getRevisionText( $revs[$i + $j] );
143 }
144
145 if ( $text === false ) {
146 print "\nError, unable to get text in old_id $oldid\n";
147 #$dbw->delete( 'old', array( 'old_id' => $oldid ) );
148 }
149
150 if ( $j == 0 ) {
151 $chunk->setText( $text );
152 print '.';
153 } else {
154 # Don't make a stub if it's going to be longer than the article
155 # Stubs are typically about 100 bytes
156 if ( strlen( $text ) < 120 ) {
157 $stub = false;
158 print 'x';
159 } else {
160 $stub = $chunk->addItem( $text );
161 $stub->setLocation( $primaryOldid );
162 $hash = $stub->getHash();
163 $stub = serialize( $stub );
164 print '.';
165 $usedChunk = true;
166 }
167 $stubs[$j] = $stub;
168 }
169 }
170 $thisChunkSize = $j;
171
172 # If we couldn't actually use any stubs because the pages were too small, do nothing
173 if ( $usedChunk ) {
174 # Store the main object
175 $dbw->update( 'text',
176 array( /* SET */
177 'old_text' => serialize( $chunk ),
178 'old_flags' => 'object',
179 ), array( /* WHERE */
180 'old_id' => $primaryOldid
181 )
182 );
183
184 # Store the stub objects
185 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
186 # Skip if not compressing
187 if ( $stubs[$j] !== false ) {
188 $dbw->update( 'text',
189 array( /* SET */
190 'old_text' => $stubs[$j],
191 'old_flags' => 'object',
192 ), array( /* WHERE */
193 'old_id' => $revs[$i + $j]->old_id
194 )
195 );
196 }
197 }
198 }
199 # Done, next
200 print "/";
201 $dbw->commit();
202 $i += $thisChunkSize;
203 }
204 print "\n";
205 }
206 return true;
207 }
208
209 ?>