Changed default groups to be more like Wikipedia
[lhc/web/wiklou.git] / maintenance / compressOld.inc
1 <?php
2 /**
3 * @package MediaWiki
4 * @subpackage Maintenance
5 */
6
7 /** */
8 require_once( '../includes/Revision.php' );
9
10 /** @todo document */
11 function compressOldPages( $start = 0 ) {
12 $fname = 'compressOldPages';
13
14 $chunksize = 50;
15 print "Starting from old_id $start...\n";
16 $dbw =& wfGetDB( DB_MASTER );
17 $old = $dbw->tableName( 'old' );
18 do {
19 $end = $start + $chunksize;
20 $res = $dbw->select( 'old', array( 'old_id','old_flags','old_namespace','old_title','old_text' ),
21 "old_id>=$start", $fname, array( 'ORDER BY' => 'old_id', 'LIMIT' => $chunksize, 'FOR UPDATE' ) );
22 if( $dbw->numRows( $res ) == 0 ) {
23 break;
24 }
25 $last = $start;
26 while( $row = $dbw->fetchObject( $res ) ) {
27 # print " {$row->old_id} - {$row->old_namespace}:{$row->old_title}\n";
28 compressPage( $row );
29 $last = $row->old_id;
30 }
31 $dbw->freeResult( $res );
32 $start = $last + 1; # Deletion may leave long empty stretches
33 print "$start...\n";
34 } while( true );
35 }
36
37 /** @todo document */
38 function compressPage( $row ) {
39 $fname = 'compressPage';
40 if( false !== strpos( $row->old_flags, "gzip" ) ) {
41 print "Already compressed row {$row->old_id}?\n";
42 return false;
43 }
44 $dbw =& wfGetDB( DB_MASTER );
45 $flags = $row->old_flags ? "{$row->old_flags},gzip" : "gzip";
46 $compress = gzdeflate( $row->old_text );
47 $dbw->update( 'old',
48 array( /* SET */
49 'old_flags' => $flags,
50 'old_text' => $compress
51 ), array( /* WHERE */
52 'old_id' => $row->old_id
53 ), $fname, 'LIMIT 1'
54 );
55 return true;
56 }
57
58 define( 'LS_INDIVIDUAL', 0 );
59 define( 'LS_CHUNKED', 1 );
60
61 /** @todo document */
62 function compressWithConcat( $startId, $maxChunkSize, $maxChunkFactor, $factorThreshold, $beginDate, $endDate )
63 {
64 $fname = 'compressWithConcat';
65 $loadStyle = LS_CHUNKED;
66
67 $dbw =& wfGetDB( DB_MASTER );
68
69 # First get a list of all pages
70 $pageRes = $dbw->select( 'page', 'page_id', false, $fname );
71
72 # For each of those, get a list of revisions which fit the criteria
73 $conds = array();
74 if ( $beginDate ) {
75 $conds[] = "rev_timestamp>'" . $beginDate . "'";
76 }
77 if ( $endDate ) {
78 $conds[] = "rev_timestamp<'" . $endDate . "'";
79 }
80 if ( $startId ) {
81 $conds[] = 'rev_id>=' . $startId;
82 }
83 if ( $loadStyle == LS_CHUNKED ) {
84 $tables = array( 'revision', 'text' );
85 $fields = array( 'rev_id', 'old_flags', 'old_text' );
86 $conds[] = 'rev_id=old_id';
87 $revLoadOptions = 'FOR UPDATE';
88 } else {
89 $tables = array( 'revision' );
90 $fields = array( 'rev_id' );
91 $revLoadOptions = array();
92 }
93
94 while ( $pageRow = $dbw->fetchObject( $pageRes ) ) {
95 # Display progress
96 $titleObj = Title::makeTitle( $pageRow->cur_namespace, $pageRow->cur_title );
97 print $titleObj->getPrefixedDBkey() . " ";
98
99 # Load revisions
100 $revRes = $dbw->select( 'old', $fields,
101 array( 'rev_page' => $pageRow->page_id ) + $conds,
102 $fname,
103 $revLoadOptions
104 );
105 $revs = array();
106 while ( $revRow = $dbw->fetchObject( $revRes ) ) {
107 $revs[] = $revRow;
108 }
109
110 if ( count( $revs ) < 2) {
111 # No revisions matching, no further processing
112 print "\n";
113 continue;
114 }
115
116 # For each chunk
117 $i = 0;
118 while ( $i < count( $revs ) ) {
119 if ( $i < count( $revs ) - $maxChunkSize ) {
120 $thisChunkSize = $maxChunkSize;
121 } else {
122 $thisChunkSize = count( $revs ) - $i;
123 }
124
125 $chunk = new ConcatenatedGzipHistoryBlob();
126 $stubs = array();
127 $dbw->begin();
128 $usedChunk = false;
129 $primaryOldid = $revs[$i]->rev_id;
130
131 # Get the text of each revision and add it to the object
132 for ( $j = 0; $j < $thisChunkSize && $chunk->isHappy( $maxChunkFactor, $factorThreshold ); $j++ ) {
133 $oldid = $revs[$i + $j]->rev_id;
134
135 # Get text
136 if ( $loadStyle == LS_INDIVIDUAL ) {
137 $textRow = $dbw->selectRow( 'text',
138 array( 'old_flags', 'old_text' ),
139 array( 'old_id' => $oldid ),
140 $fname,
141 'FOR UPDATE'
142 );
143 $text = Revision::getRevisionText( $textRow );
144 } else {
145 $text = Revision::getRevisionText( $revs[$i + $j] );
146 }
147
148 if ( $text === false ) {
149 print "\nError, unable to get text in old_id $oldid\n";
150 #$dbw->delete( 'old', array( 'old_id' => $oldid ) );
151 }
152
153 if ( $j == 0 ) {
154 $chunk->setText( $text );
155 print '.';
156 } else {
157 # Don't make a stub if it's going to be longer than the article
158 # Stubs are typically about 100 bytes
159 if ( strlen( $text ) < 120 ) {
160 $stub = false;
161 print 'x';
162 } else {
163 $stub = $chunk->addItem( $text );
164 $stub->setLocation( $primaryOldid );
165 $hash = $stub->getHash();
166 $stub = serialize( $stub );
167 print '.';
168 $usedChunk = true;
169 }
170 $stubs[$j] = $stub;
171 }
172 }
173 $thisChunkSize = $j;
174
175 # If we couldn't actually use any stubs because the pages were too small, do nothing
176 if ( $usedChunk ) {
177 # Store the main object
178 $dbw->update( 'text',
179 array( /* SET */
180 'old_text' => serialize( $chunk ),
181 'old_flags' => 'object',
182 ), array( /* WHERE */
183 'old_id' => $primaryOldid
184 )
185 );
186
187 # Store the stub objects
188 for ( $j = 1; $j < $thisChunkSize; $j++ ) {
189 # Skip if not compressing
190 if ( $stubs[$j] !== false ) {
191 $dbw->update( 'text',
192 array( /* SET */
193 'old_text' => $stubs[$j],
194 'old_flags' => 'object',
195 ), array( /* WHERE */
196 'old_id' => $revs[$i + $j]->old_id
197 )
198 );
199 }
200 }
201 }
202 # Done, next
203 print "/";
204 $dbw->commit();
205 $i += $thisChunkSize;
206 }
207 print "\n";
208 }
209 return true;
210 }
211 ?>