remove inverse_timestamp wherever it is found and disable wfInvertTimestamp()
[lhc/web/wiklou.git] / maintenance / importUseModWiki.php
1 <?php
2
3 /**
4 * Import data from a UseModWiki into a PediaWiki wiki
5 * 2003-02-09 Brion VIBBER <brion@pobox.com>
6 * Based loosely on Magnus's code from 2001-2002
7 *
8 * Updated limited version to get something working temporarily
9 * 2003-10-09
10 * Be sure to run the link & index rebuilding scripts!
11 *
12 * Some more munging for charsets etc
13 * 2003-11-28
14 *
15 * Partial fix for pages starting with lowercase letters (??)
16 * and CamelCase and /Subpage link conversion
17 * 2004-11-17
18 *
19 * @todo document
20 * @package MediaWiki
21 * @subpackage Maintenance
22 */
23
24 if( php_sapi_name() != 'cli' ) {
25 die( "Please customize the settings and run me from the command line." );
26 }
27
28 /** Set these correctly! */
29 $wgImportEncoding = "CP1252"; /* We convert all to UTF-8 */
30 $wgRootDirectory = "/kalman/Projects/wiki2002/wiki/lib-http/db/wiki";
31
32 /* On a large wiki, you might run out of memory */
33 @ini_set( 'memory_limit', '40M' );
34
35 /* globals */
36 $wgFieldSeparator = "\xb3"; # Some wikis may use different char
37 $FS = $wgFieldSeparator ;
38 $FS1 = $FS."1" ;
39 $FS2 = $FS."2" ;
40 $FS3 = $FS."3" ;
41
42 $conversiontime = wfTimestampNow(); # Conversions will be marked with this timestamp
43 $usercache = array();
44
45 wfSeedRandom();
46 importPages();
47
48 # ------------------------------------------------------------------------------
49
50 function importPages()
51 {
52 global $wgRootDirectory;
53
54 $letters = array(
55 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
56 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
57 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'other' );
58 foreach( $letters as $letter ) {
59 $dir = "$wgRootDirectory/page/$letter";
60 if( is_dir( $dir ) )
61 importPageDirectory( $dir );
62 }
63 }
64
65 function importPageDirectory( $dir, $prefix = "" )
66 {
67 echo "\n-- Checking page directory $dir\n";
68 $mydir = opendir( $dir );
69 while( $entry = readdir( $mydir ) ) {
70 if( preg_match( '/^(.+)\.db$/', $entry, $m ) ) {
71 echo importPage( $prefix . $m[1] );
72 } else {
73 if( is_dir( "$dir/$entry" ) ) {
74 if( $entry != '.' && $entry != '..' ) {
75 importPageDirectory( "$dir/$entry", "$entry/" );
76 }
77 } else {
78 echo "-- File '$entry' doesn't seem to contain an article. Skipping.\n";
79 }
80 }
81 }
82 }
83
84
85 # ------------------------------------------------------------------------------
86
87 /* fetch_ functions
88 Grab a given item from the database
89 */
90 function fetchUser( $uid )
91 {
92 die ("fetchUser not implemented" );
93
94 global $FS,$FS2,$FS3, $wgRootDirectory;
95
96 $fname = $wgRootDirectory . "/page/" . $title;
97 if( !file_exists( $fname ) ) return false;
98
99 $data = splitHash( implode( "", file( $fname ) ) );
100 # enough?
101
102 return $data;
103 }
104
105 function useModFilename( $title ) {
106 $c = substr( $title, 0, 1 );
107 if(preg_match( '/[A-Z]/i', $c ) ) {
108 return strtoupper( $c ) . "/$title";
109 }
110 return "other/$title";
111 }
112
113 function fetchPage( $title )
114 {
115 global $FS,$FS1,$FS2,$FS3, $wgRootDirectory;
116
117 $fname = $wgRootDirectory . "/page/" . useModFilename( $title ) . ".db";
118 if( !file_exists( $fname ) ) {
119 die( "Couldn't open file '$fname' for page '$title'.\n" );
120 }
121
122 $page = splitHash( $FS1, file_get_contents( $fname ) );
123 $section = splitHash( $FS2, $page["text_default"] );
124 $text = splitHash( $FS3, $section["data"] );
125
126 return array2object( array( "text" => $text["text"] , "summary" => $text["summary"] ,
127 "minor" => $text["minor"] , "ts" => $section["ts"] ,
128 "username" => $section["username"] , "host" => $section["host"] ) );
129 }
130
131 function fetchKeptPages( $title )
132 {
133 global $FS,$FS1,$FS2,$FS3, $wgRootDirectory, $wgTimezoneCorrection;
134
135 $fname = $wgRootDirectory . "/keep/" . useModFilename( $title ) . ".kp";
136 if( !file_exists( $fname ) ) return array();
137
138 $keptlist = explode( $FS1, file_get_contents( $fname ) );
139 array_shift( $keptlist ); # Drop the junk at beginning of file
140
141 $revisions = array();
142 foreach( $keptlist as $rev ) {
143 $section = splitHash( $FS2, $rev );
144 $text = splitHash( $FS3, $section["data"] );
145 if ( $text["text"] && $text["minor"] != "" && ( $section["ts"]*1 > 0 ) ) {
146 array_push( $revisions, array2object( array ( "text" => $text["text"] , "summary" => $text["summary"] ,
147 "minor" => $text["minor"] , "ts" => $section["ts"] ,
148 "username" => $section["username"] , "host" => $section["host"] ) ) );
149 } else {
150 echo "-- skipped a bad old revision\n";
151 }
152 }
153 return $revisions;
154 }
155
156 function splitHash ( $sep , $str ) {
157 $temp = explode ( $sep , $str ) ;
158 $ret = array () ;
159 for ( $i = 0; $i+1 < count ( $temp ) ; $i++ ) {
160 $ret[$temp[$i]] = $temp[++$i] ;
161 }
162 return $ret ;
163 }
164
165
166 /* import_ functions
167 Take a fetched item and produce SQL
168 */
169
170 /* importUser
171 $uid is the UseMod user id number.
172 The new ones will be assigned arbitrarily and are for internal use only.
173
174 THIS IS DELAYED SINCE PUBLIC DUMPS DONT INCLUDE USER DIR
175 */
176 function importUser( $uid )
177 {
178 global $last_uid, $user_list, $wgTimestampCorrection;
179 die("importUser NYI");
180 return "";
181
182 $stuff = fetchUser( $uid );
183 $last_uid++;
184
185 $name = wfStrencode( $stuff->username );
186 $hash = md5hash( $stuff->password ); # Doable?
187 $tzoffset = $stuff['tzoffset'] - ($wgTimestampCorrection / 3600); # -8 to 0; +9 to +1
188 $hideminor = ($stuff['rcall'] ? 0 : 1);
189 $options = "cols={$stuff['editcols']}
190 rows={$stuff['editrows']}
191 rcdays={$stuff['rcdays']}
192 timecorrection={$tzoffset}
193 hideminor={$hideminor}
194 ";
195
196 $sql = "INSERT
197 INTO user (user_id,user_name,user_password,user_options)
198 VALUES ({$last_uid},'{$name}','{$hash}','{$options}');\n";
199 return $sql;
200 }
201
202 function checkUserCache( $name, $host )
203 {
204 global $usercache;
205
206 if( $name ) {
207 if( in_array( $name, $usercache ) ) {
208 $userid = $usercache[$name];
209 } else {
210 # If we haven't imported user accounts
211 $userid = 0;
212 }
213 $username = wfStrencode( $name );
214 } else {
215 $userid = 0;
216 $username = wfStrencode( $host );
217 }
218 return array( $userid, $username );
219 }
220
221 function importPage( $title )
222 {
223 global $usercache;
224 global $conversiontime;
225
226 echo "\n-- Importing page $title\n";
227 $page = fetchPage( $title );
228
229 $newtitle = wfStrencode( recodeText( $title ) );
230 $namespace = 0;
231
232 $munged = mungeFormat( $page->text );
233 if( $munged != $page->text ) {
234 /**
235 * Save a *new* revision with the conversion, and put the
236 * previous last version into the history.
237 */
238 $text = wfStrencode( recodeText( $munged ) );
239 $comment = "link fix";
240 $minor = 1;
241 $userid = 0;
242 $username = "Conversion script";
243 $timestamp = wfUnix2Timestamp( time() );
244 $redirect = ( preg_match( '/^#REDIRECT/', $page->text ) ? 1 : 0 );
245 $random = mt_rand() / mt_getrandmax();
246
247 $revisions = array( $page );
248 } else {
249 /**
250 * Current revision:
251 */
252 $text = wfStrencode( recodeText( $page->text ) );
253 $comment = wfStrencode( recodeText( $page->summary ) );
254 $minor = ($page->minor ? 1 : 0);
255 list( $userid, $username ) = checkUserCache( $page->username, $page->host );
256 $username = wfStrencode( recodeText( $username ) );
257 $timestamp = wfUnix2Timestamp( $page->ts );
258 $redirect = ( preg_match( '/^#REDIRECT/', $page->text ) ? 1 : 0 );
259 $random = mt_rand() / mt_getrandmax();
260
261 $revisions = array();
262 }
263 $sql = "
264 INSERT
265 INTO cur (cur_namespace,cur_title,cur_text,cur_comment,cur_user,cur_user_text,cur_timestamp,cur_touched,cur_minor_edit,cur_is_redirect,cur_random) VALUES
266 ($namespace,'$newtitle','$text','$comment',$userid,'$username','$timestamp','$conversiontime',$minor,$redirect,$random);\n";
267
268 # History
269 $revisions = array_merge( $revisions, fetchKeptPages( $title ) );
270 if(count( $revisions ) == 0 ) {
271 return $sql;
272 }
273
274 $any = false;
275 $sql .= "INSERT
276 INTO old (old_namespace,old_title,old_text,old_comment,old_user,old_user_text,old_timestamp,old_minor_edit) VALUES\n";
277 foreach( $revisions as $rev ) {
278 $text = wfStrencode( recodeText( $rev->text ) );
279 $minor = ($rev->minor ? 1 : 0);
280 list( $userid, $username ) = checkUserCache( $rev->username, $rev->host );
281 $username = wfStrencode( recodeText( $username ) );
282 $timestamp = wfUnix2Timestamp( $rev->ts );
283 $comment = wfStrencode( recodeText( $rev->summary ) );
284
285 if($any) $sql .= ",";
286 $sql .= "\n\t($namespace,'$newtitle','$text','$comment',$userid,'$username','$timestamp',$minor)";
287 $any = true;
288 }
289 $sql .= ";\n\n";
290 return $sql;
291 }
292
293 # Whee!
294 function recodeText( $string ) {
295 global $wgImportEncoding;
296 # For currently latin-1 wikis
297 $string = str_replace( "\r\n", "\n", $string );
298 $string = @iconv( $wgImportEncoding, "UTF-8", $string );
299 $string = wfMungeToUtf8( $string ); # Any old &#1234; stuff
300 return $string;
301 }
302
303 function wfUtf8Sequence($codepoint) {
304 if($codepoint < 0x80) return chr($codepoint);
305 if($codepoint < 0x800) return chr($codepoint >> 6 & 0x3f | 0xc0) .
306 chr($codepoint & 0x3f | 0x80);
307 if($codepoint < 0x10000) return chr($codepoint >> 12 & 0x0f | 0xe0) .
308 chr($codepoint >> 6 & 0x3f | 0x80) .
309 chr($codepoint & 0x3f | 0x80);
310 if($codepoint < 0x100000) return chr($codepoint >> 18 & 0x07 | 0xf0) . # Double-check this
311 chr($codepoint >> 12 & 0x3f | 0x80) .
312 chr($codepoint >> 6 & 0x3f | 0x80) .
313 chr($codepoint & 0x3f | 0x80);
314 # Doesn't yet handle outside the BMP
315 return "&#$codepoint;";
316 }
317
318 function wfMungeToUtf8($string) {
319 $string = preg_replace ( '/&#([0-9]+);/e', 'wfUtf8Sequence($1)', $string );
320 $string = preg_replace ( '/&#x([0-9a-f]+);/ie', 'wfUtf8Sequence(0x$1)', $string );
321 # Should also do named entities here
322 return $string;
323 }
324
325 function wfStrencode( $string ) {
326 return mysql_escape_string( $string );
327 }
328
329 function wfUnix2Timestamp( $unixtime ) {
330 return gmdate( "YmdHis", $unixtime );
331 }
332
333 function wfTimestamp2Unix( $ts )
334 {
335 return gmmktime( ( (int)substr( $ts, 8, 2) ),
336 (int)substr( $ts, 10, 2 ), (int)substr( $ts, 12, 2 ),
337 (int)substr( $ts, 4, 2 ), (int)substr( $ts, 6, 2 ),
338 (int)substr( $ts, 0, 4 ) );
339 }
340
341 function wfTimestampNow() {
342 # return NOW
343 return gmdate( "YmdHis" );
344 }
345
346 # Sorting hack for MySQL 3, which doesn't use index sorts for DESC
347 function wfInvertTimestamp( $ts ) {
348 return strtr(
349 $ts,
350 "0123456789",
351 "9876543210"
352 );
353 }
354
355 function wfSeedRandom()
356 {
357 $seed = hexdec(substr(md5(microtime()),-8)) & 0x7fffffff;
358 mt_srand( $seed );
359 $wgRandomSeeded = true;
360 }
361
362 function array2object( $arr ) {
363 $o = (object)0;
364 foreach( $arr as $x => $y ) {
365 $o->$x = $y;
366 }
367 return $o;
368 }
369
370
371 /**
372 * Make CamelCase and /Talk links work
373 */
374 function mungeFormat( $text ) {
375 global $nowiki;
376 $nowiki = array();
377 $staged = preg_replace_callback(
378 '/(<nowiki>.*?<\\/nowiki>|(?:http|https|ftp):\\S+|\[\[[^]\\n]+]])/s',
379 'nowikiPlaceholder', $text );
380
381 # This is probably not 100% correct, I'm just
382 # glancing at the UseModWiki code.
383 $upper = "[A-Z]";
384 $lower = "[a-z_0-9]";
385 $any = "[A-Za-z_0-9]";
386 $camel = "(?:$upper+$lower+$upper+$any*)";
387 $subpage = "(?:\\/$any+)";
388 $substart = "(?:\\/$upper$any*)";
389
390 $munged = preg_replace( "/(?!\\[\\[)($camel$subpage*|$substart$subpage*)\\b(?!\\]\\]|>)/",
391 '[[$1]]', $staged );
392
393 $final = preg_replace( '/' . preg_quote( placeholder() ) . '/es',
394 'array_shift( $nowiki )', $munged );
395 return $final;
396 }
397
398
399 function placeholder( $x = null ) {
400 return '\xffplaceholder\xff';
401 }
402
403 function nowikiPlaceholder( $matches ) {
404 global $nowiki;
405 $nowiki[] = $matches[1];
406 return placeholder();
407 }
408
409 ?>