Merge SCHEMA_WORK into HEAD. Lots of changes, some things are probably broken:
[lhc/web/wiklou.git] / maintenance / compressOld.inc
1 <?php
2 /**
3 * @package MediaWiki
4 * @subpackage Maintenance
5 */
6
7 /** */
8 function compressOldPages( $start = 0 ) {
9 $fname = 'compressOldPages';
10
11 $chunksize = 50;
12 print "Starting from old_id $start...\n";
13 $dbw =& wfGetDB( DB_MASTER );
14 $old = $dbw->tableName( 'old' );
15 do {
16 $end = $start + $chunksize;
17 $res = $dbw->select( 'old', array( 'old_id','old_flags','old_namespace','old_title','old_text' ),
18 "old_id>=$start", $fname, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) );
19 if( $dbw->numRows( $res ) == 0 ) {
20 break;
21 }
22 $last = $start;
23 while( $row = $dbw->fetchObject( $res ) ) {
24 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
25 compressPage( $row );
26 $last = $row->old_id;
27 }
28 $dbw->freeResult( $res );
29 $start = $last + 1; # Deletion may leave long empty stretches
30 print "$start...\n";
31 } while( true );
32 }
33
34 function compressPage( $row ) {
35 $fname = 'compressPage';
36 if( false !== strpos( $row->old_flags, "gzip" ) ) {
37 print "Already compressed row {$row->old_id}?\n";
38 return false;
39 }
40 $dbw =& wfGetDB( DB_MASTER );
41 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
42 $compress = gzdeflate( $row->old_text );
43 $dbw->update( 'old',
44 array( /* SET */
45 'old_flags' => $flags,
46 'old_text' => $compress
47 ), array( /* WHERE */
48 'old_id' => $row->old_id
49 ), $fname, 'LIMIT 1'
50 );
51 return true;
52 }
53
54 define( 'LS_INDIVIDUAL', 0 );
55 define( 'LS_CHUNKED', 1 );
56
57 function compressWithConcat( $startId, $maxChunkSize, $maxChunkFactor, $factorThreshold, $beginDate, $endDate )
58 {
59 $fname = 'compressWithConcat';
60 $loadStyle = LS_CHUNKED;
61
62 $dbw =& wfGetDB( DB_MASTER );
63
64 # First get a list of all pages
65 $pageRes = $dbw->select( 'page', 'page_id', false, $fname );
66
67 # For each of those, get a list of revisions which fit the criteria
68 $conds = array();
69 if ( $beginDate ) {
70 $conds[] = "rev_timestamp>'" . $beginDate . "'";
71 }
72 if ( $endDate ) {
73 $conds[] = "rev_timestamp<'" . $endDate . "'";
74 }
75 if ( $startId ) {
76 $conds[] = 'rev_id>=' . $startId;
77 }
78 if ( $loadStyle == LS_CHUNKED ) {
79 $tables = array( 'revision', 'text' );
80 $fields = array( 'rev_id', 'old_flags', 'old_text' );
81 $conds[] = 'rev_id=old_id';
82 $revLoadOptions = 'FOR UPDATE';
83 } else {
84 $tables = array( 'revision' );
85 $fields = array( 'rev_id' );
86 $revLoadOptions = array();
87 }
88
89 while ( $pageRow = $dbw->fetchObject( $pageRes ) ) {
90 # Display progress
91 $titleObj = Title::makeTitle( $pageRow->cur_namespace, $pageRow->cur_title );
92 print $titleObj->getPrefixedDBkey() . " ";
93
94 # Load revisions
95 $revRes = $dbw->select( 'old', $fields,
96 array( 'rev_page' => $pageRow->page_id ) + $conds,
97 $fname,
98 $revLoadOptions
99 );
100 $revs = array();
101 while ( $revRow = $dbw->fetchObject( $revRes ) ) {
102 $revs[] = $revRow;
103 }
104
105 if ( count( $revs ) < 2) {
106 # No revisions matching, no further processing
107 print "\n";
108 continue;
109 }
110
111 # For each chunk
112 $i = 0;
113 while ( $i < count( $revs ) ) {
114 if ( $i < count( $revs ) - $maxChunkSize ) {
115 $thisChunkSize = $maxChunkSize;
116 } else {
117 $thisChunkSize = count( $revs ) - $i;
118 }
119
120 $chunk = new ConcatenatedGzipHistoryBlob();
121 $stubs = array();
122 $dbw->begin();
123 $usedChunk = false;
124 $primaryOldid = $revs[$i]->rev_id;
125
126 # Get the text of each revision and add it to the object
127 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy( $maxChunkFactor, $factorThreshold ); $j++ ) {
128 $oldid = $revs[$i + $j]->rev_id;
129
130 # Get text
131 if ( $loadStyle == LS_INDIVIDUAL ) {
132 $textRow = $dbw->selectRow( 'text',
133 array( 'old_flags', 'old_text' ),
134 array( 'old_id' => $oldid ),
135 $fname,
136 'FOR UPDATE'
137 );
138 $text = Article::getRevisionText( $textRow );
139 } else {
140 $text = Article::getRevisionText( $revs[$i + $j] );
141 }
142
143 if ( $text === false ) {
144 print "\nError, unable to get text in old_id $oldid\n";
145 #$dbw->delete( 'old', array( 'old_id' => $oldid ) );
146 }
147
148 if ( $j == 0 ) {
149 $chunk->setText( $text );
150 print '.';
151 } else {
152 # Don't make a stub if it's going to be longer than the article
153 # Stubs are typically about 100 bytes
154 if ( strlen( $text ) < 120 ) {
155 $stub = false;
156 print 'x';
157 } else {
158 $stub = $chunk->addItem( $text );
159 $stub->setLocation( $primaryOldid );
160 $hash = $stub->getHash();
161 $stub = serialize( $stub );
162 print '.';
163 $usedChunk = true;
164 }
165 $stubs[$j] = $stub;
166 }
167 }
168 $thisChunkSize = $j;
169
170 # If we couldn't actually use any stubs because the pages were too small, do nothing
171 if ( $usedChunk ) {
172 # Store the main object
173 $dbw->update( 'text',
174 array( /* SET */
175 'old_text' => serialize( $chunk ),
176 'old_flags' => 'object',
177 ), array( /* WHERE */
178 'old_id' => $primaryOldid
179 )
180 );
181
182 # Store the stub objects
183 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
184 # Skip if not compressing
185 if ( $stubs[$j] !== false ) {
186 $dbw->update( 'text',
187 array( /* SET */
188 'old_text' => $stubs[$j],
189 'old_flags' => 'object',
190 ), array( /* WHERE */
191 'old_id' => $revs[$i + $j]->old_id
192 )
193 );
194 }
195 }
196 }
197 # Done, next
198 print "/";
199 $dbw->commit();
200 $i += $thisChunkSize;
201 }
202 print "\n";
203 }
204 return true;
205 }
206
207 ?>