update all core usage of deprecated Title::userCanRead()
[lhc/web/wiklou.git] / includes / specials / SpecialExport.php
1 <?php
2 /**
3 * Implements Special:Export
4 *
5 * Copyright © 2003-2008 Brion Vibber <brion@pobox.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
21 *
22 * @file
23 * @ingroup SpecialPage
24 */
25
26 /**
27 * A special page that allows users to export pages in a XML file
28 *
29 * @ingroup SpecialPage
30 */
31 class SpecialExport extends SpecialPage {
32
33 private $curonly, $doExport, $pageLinkDepth, $templates;
34 private $images;
35
36 public function __construct() {
37 parent::__construct( 'Export' );
38 }
39
40 public function execute( $par ) {
41 global $wgSitename, $wgExportAllowListContributors, $wgExportFromNamespaces;
42 global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
43
44 $this->setHeaders();
45 $this->outputHeader();
46
47 // Set some variables
48 $this->curonly = true;
49 $this->doExport = false;
50 $request = $this->getRequest();
51 $this->templates = $request->getCheck( 'templates' );
52 $this->images = $request->getCheck( 'images' ); // Doesn't do anything yet
53 $this->pageLinkDepth = $this->validateLinkDepth(
54 $request->getIntOrNull( 'pagelink-depth' )
55 );
56 $nsindex = '';
57
58 if ( $request->getCheck( 'addcat' ) ) {
59 $page = $request->getText( 'pages' );
60 $catname = $request->getText( 'catname' );
61
62 if ( $catname !== '' && $catname !== null && $catname !== false ) {
63 $t = Title::makeTitleSafe( NS_MAIN, $catname );
64 if ( $t ) {
65 /**
66 * @todo FIXME: This can lead to hitting memory limit for very large
67 * categories. Ideally we would do the lookup synchronously
68 * during the export in a single query.
69 */
70 $catpages = $this->getPagesFromCategory( $t );
71 if ( $catpages ) {
72 $page .= "\n" . implode( "\n", $catpages );
73 }
74 }
75 }
76 }
77 elseif( $request->getCheck( 'addns' ) && $wgExportFromNamespaces ) {
78 $page = $request->getText( 'pages' );
79 $nsindex = $request->getText( 'nsindex', '' );
80
81 if ( strval( $nsindex ) !== '' ) {
82 /**
83 * Same implementation as above, so same @todo
84 */
85 $nspages = $this->getPagesFromNamespace( $nsindex );
86 if ( $nspages ) {
87 $page .= "\n" . implode( "\n", $nspages );
88 }
89 }
90 }
91 elseif( $request->wasPosted() && $par == '' ) {
92 $page = $request->getText( 'pages' );
93 $this->curonly = $request->getCheck( 'curonly' );
94 $rawOffset = $request->getVal( 'offset' );
95
96 if( $rawOffset ) {
97 $offset = wfTimestamp( TS_MW, $rawOffset );
98 } else {
99 $offset = null;
100 }
101
102 $limit = $request->getInt( 'limit' );
103 $dir = $request->getVal( 'dir' );
104 $history = array(
105 'dir' => 'asc',
106 'offset' => false,
107 'limit' => $wgExportMaxHistory,
108 );
109 $historyCheck = $request->getCheck( 'history' );
110
111 if ( $this->curonly ) {
112 $history = WikiExporter::CURRENT;
113 } elseif ( !$historyCheck ) {
114 if ( $limit > 0 && ($wgExportMaxHistory == 0 || $limit < $wgExportMaxHistory ) ) {
115 $history['limit'] = $limit;
116 }
117 if ( !is_null( $offset ) ) {
118 $history['offset'] = $offset;
119 }
120 if ( strtolower( $dir ) == 'desc' ) {
121 $history['dir'] = 'desc';
122 }
123 }
124
125 if( $page != '' ) {
126 $this->doExport = true;
127 }
128 } else {
129 // Default to current-only for GET requests.
130 $page = $request->getText( 'pages', $par );
131 $historyCheck = $request->getCheck( 'history' );
132
133 if( $historyCheck ) {
134 $history = WikiExporter::FULL;
135 } else {
136 $history = WikiExporter::CURRENT;
137 }
138
139 if( $page != '' ) {
140 $this->doExport = true;
141 }
142 }
143
144 if( !$wgExportAllowHistory ) {
145 // Override
146 $history = WikiExporter::CURRENT;
147 }
148
149 $list_authors = $request->getCheck( 'listauthors' );
150 if ( !$this->curonly || !$wgExportAllowListContributors ) {
151 $list_authors = false ;
152 }
153
154 if ( $this->doExport ) {
155 $this->getOutput()->disable();
156
157 // Cancel output buffering and gzipping if set
158 // This should provide safer streaming for pages with history
159 wfResetOutputBuffers();
160 $request->response()->header( "Content-type: application/xml; charset=utf-8" );
161
162 if( $request->getCheck( 'wpDownload' ) ) {
163 // Provide a sane filename suggestion
164 $filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
165 $request->response()->header( "Content-disposition: attachment;filename={$filename}" );
166 }
167
168 $this->doExport( $page, $history, $list_authors );
169
170 return;
171 }
172
173 $out = $this->getOutput();
174 $out->addWikiMsg( 'exporttext' );
175
176 $form = Xml::openElement( 'form', array( 'method' => 'post',
177 'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
178 $form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&#160;';
179 $form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
180
181 if ( $wgExportFromNamespaces ) {
182 $form .= Xml::namespaceSelector( $nsindex, null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&#160;';
183 $form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
184 }
185
186 $form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
187 $form .= '<br />';
188
189 if( $wgExportAllowHistory ) {
190 $form .= Xml::checkLabel(
191 wfMsg( 'exportcuronly' ),
192 'curonly',
193 'curonly',
194 $request->wasPosted() ? $request->getCheck( 'curonly' ) : true
195 ) . '<br />';
196 } else {
197 $out->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
198 }
199
200 $form .= Xml::checkLabel(
201 wfMsg( 'export-templates' ),
202 'templates',
203 'wpExportTemplates',
204 $request->wasPosted() ? $request->getCheck( 'templates' ) : false
205 ) . '<br />';
206
207 if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
208 $form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
209 }
210 // Enable this when we can do something useful exporting/importing image information. :)
211 //$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
212 $form .= Xml::checkLabel(
213 wfMsg( 'export-download' ),
214 'wpDownload',
215 'wpDownload',
216 $request->wasPosted() ? $request->getCheck( 'wpDownload' ) : true
217 ) . '<br />';
218
219 if ( $wgExportAllowListContributors ) {
220 $form .= Xml::checkLabel(
221 wfMsg( 'exportlistauthors' ),
222 'listauthors',
223 'listauthors',
224 $request->wasPosted() ? $request->getCheck( 'listauthors' ) : false
225 ) . '<br />';
226 }
227
228 $form .= Xml::submitButton( wfMsg( 'export-submit' ), Linker::tooltipAndAccesskeyAttribs( 'export' ) );
229 $form .= Xml::closeElement( 'form' );
230
231 $out->addHTML( $form );
232 }
233
234 /**
235 * @return bool
236 */
237 private function userCanOverrideExportDepth() {
238 return $this->getUser()->isAllowed( 'override-export-depth' );
239 }
240
241 /**
242 * Do the actual page exporting
243 *
244 * @param $page String: user input on what page(s) to export
245 * @param $history Mixed: one of the WikiExporter history export constants
246 * @param $list_authors Boolean: Whether to add distinct author list (when
247 * not returning full history)
248 */
249 private function doExport( $page, $history, $list_authors ) {
250 $pageSet = array(); // Inverted index of all pages to look up
251
252 // Split up and normalize input
253 foreach( explode( "\n", $page ) as $pageName ) {
254 $pageName = trim( $pageName );
255 $title = Title::newFromText( $pageName );
256 if( $title && $title->getInterwiki() == '' && $title->getText() !== '' ) {
257 // Only record each page once!
258 $pageSet[$title->getPrefixedText()] = true;
259 }
260 }
261
262 // Set of original pages to pass on to further manipulation...
263 $inputPages = array_keys( $pageSet );
264
265 // Look up any linked pages if asked...
266 if( $this->templates ) {
267 $pageSet = $this->getTemplates( $inputPages, $pageSet );
268 }
269 $linkDepth = $this->pageLinkDepth;
270 if( $linkDepth ) {
271 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
272 }
273
274 /*
275 // Enable this when we can do something useful exporting/importing image information. :)
276 if( $this->images ) ) {
277 $pageSet = $this->getImages( $inputPages, $pageSet );
278 }
279 */
280
281 $pages = array_keys( $pageSet );
282
283 // Normalize titles to the same format and remove dupes, see bug 17374
284 foreach( $pages as $k => $v ) {
285 $pages[$k] = str_replace( " ", "_", $v );
286 }
287
288 $pages = array_unique( $pages );
289
290 /* Ok, let's get to it... */
291 if( $history == WikiExporter::CURRENT ) {
292 $lb = false;
293 $db = wfGetDB( DB_SLAVE );
294 $buffer = WikiExporter::BUFFER;
295 } else {
296 // Use an unbuffered query; histories may be very long!
297 $lb = wfGetLBFactory()->newMainLB();
298 $db = $lb->getConnection( DB_SLAVE );
299 $buffer = WikiExporter::STREAM;
300
301 // This might take a while... :D
302 wfSuppressWarnings();
303 set_time_limit(0);
304 wfRestoreWarnings();
305 }
306
307 $exporter = new WikiExporter( $db, $history, $buffer );
308 $exporter->list_authors = $list_authors;
309 $exporter->openStream();
310
311 foreach( $pages as $page ) {
312 /*
313 if( $wgExportMaxHistory && !$this->curonly ) {
314 $title = Title::newFromText( $page );
315 if( $title ) {
316 $count = Revision::countByTitle( $db, $title );
317 if( $count > $wgExportMaxHistory ) {
318 wfDebug( __FUNCTION__ .
319 ": Skipped $page, $count revisions too big\n" );
320 continue;
321 }
322 }
323 }*/
324 #Bug 8824: Only export pages the user can read
325 $title = Title::newFromText( $page );
326 if( is_null( $title ) ) {
327 continue; #TODO: perhaps output an <error> tag or something.
328 }
329 if( !$title->userCan( 'read' ) ) {
330 continue; #TODO: perhaps output an <error> tag or something.
331 }
332
333 $exporter->pageByTitle( $title );
334 }
335
336 $exporter->closeStream();
337
338 if( $lb ) {
339 $lb->closeAll();
340 }
341 }
342
343 /**
344 * @param $title Title
345 * @return array
346 */
347 private function getPagesFromCategory( $title ) {
348 global $wgContLang;
349
350 $name = $title->getDBkey();
351
352 $dbr = wfGetDB( DB_SLAVE );
353 $res = $dbr->select(
354 array( 'page', 'categorylinks' ),
355 array( 'page_namespace', 'page_title' ),
356 array( 'cl_from=page_id', 'cl_to' => $name ),
357 __METHOD__,
358 array( 'LIMIT' => '5000' )
359 );
360
361 $pages = array();
362
363 foreach ( $res as $row ) {
364 $n = $row->page_title;
365 if ($row->page_namespace) {
366 $ns = $wgContLang->getNsText( $row->page_namespace );
367 $n = $ns . ':' . $n;
368 }
369
370 $pages[] = $n;
371 }
372 return $pages;
373 }
374
375 /**
376 * @param $nsindex int
377 * @return array
378 */
379 private function getPagesFromNamespace( $nsindex ) {
380 global $wgContLang;
381
382 $dbr = wfGetDB( DB_SLAVE );
383 $res = $dbr->select(
384 'page',
385 array( 'page_namespace', 'page_title' ),
386 array( 'page_namespace' => $nsindex ),
387 __METHOD__,
388 array( 'LIMIT' => '5000' )
389 );
390
391 $pages = array();
392
393 foreach ( $res as $row ) {
394 $n = $row->page_title;
395
396 if ( $row->page_namespace ) {
397 $ns = $wgContLang->getNsText( $row->page_namespace );
398 $n = $ns . ':' . $n;
399 }
400
401 $pages[] = $n;
402 }
403 return $pages;
404 }
405
406 /**
407 * Expand a list of pages to include templates used in those pages.
408 * @param $inputPages array, list of titles to look up
409 * @param $pageSet array, associative array indexed by titles for output
410 * @return array associative array index by titles
411 */
412 private function getTemplates( $inputPages, $pageSet ) {
413 return $this->getLinks( $inputPages, $pageSet,
414 'templatelinks',
415 array( 'tl_namespace AS namespace', 'tl_title AS title' ),
416 array( 'page_id=tl_from' )
417 );
418 }
419
420 /**
421 * Validate link depth setting, if available.
422 * @param $depth int
423 * @return int
424 */
425 private function validateLinkDepth( $depth ) {
426 global $wgExportMaxLinkDepth;
427
428 if( $depth < 0 ) {
429 return 0;
430 }
431
432 if ( !$this->userCanOverrideExportDepth() ) {
433 if( $depth > $wgExportMaxLinkDepth ) {
434 return $wgExportMaxLinkDepth;
435 }
436 }
437
438 /*
439 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
440 * crazy-big export from being done by someone setting the depth
441 * number too high. In other words, last resort safety net.
442 */
443 return intval( min( $depth, 5 ) );
444 }
445
446 /**
447 * Expand a list of pages to include pages linked to from that page.
448 * @param $inputPages array
449 * @param $pageSet array
450 * @param $depth int
451 * @return array
452 */
453 private function getPageLinks( $inputPages, $pageSet, $depth ) {
454 for( ; $depth > 0; --$depth ) {
455 $pageSet = $this->getLinks(
456 $inputPages, $pageSet, 'pagelinks',
457 array( 'pl_namespace AS namespace', 'pl_title AS title' ),
458 array( 'page_id=pl_from' )
459 );
460 $inputPages = array_keys( $pageSet );
461 }
462
463 return $pageSet;
464 }
465
466 /**
467 * Expand a list of pages to include images used in those pages.
468 *
469 * @param $inputPages array, list of titles to look up
470 * @param $pageSet array, associative array indexed by titles for output
471 *
472 * @return array associative array index by titles
473 */
474 private function getImages( $inputPages, $pageSet ) {
475 return $this->getLinks(
476 $inputPages,
477 $pageSet,
478 'imagelinks',
479 array( NS_FILE . ' AS namespace', 'il_to AS title' ),
480 array( 'page_id=il_from' )
481 );
482 }
483
484 /**
485 * Expand a list of pages to include items used in those pages.
486 */
487 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
488 $dbr = wfGetDB( DB_SLAVE );
489
490 foreach( $inputPages as $page ) {
491 $title = Title::newFromText( $page );
492
493 if( $title ) {
494 $pageSet[$title->getPrefixedText()] = true;
495 /// @todo FIXME: May or may not be more efficient to batch these
496 /// by namespace when given multiple input pages.
497 $result = $dbr->select(
498 array( 'page', $table ),
499 $fields,
500 array_merge(
501 $join,
502 array(
503 'page_namespace' => $title->getNamespace(),
504 'page_title' => $title->getDBkey()
505 )
506 ),
507 __METHOD__
508 );
509
510 foreach( $result as $row ) {
511 $template = Title::makeTitle( $row->namespace, $row->title );
512 $pageSet[$template->getPrefixedText()] = true;
513 }
514 }
515 }
516
517 return $pageSet;
518 }
519
520 }