GPL Headers for all!
[lhc/web/wiklou.git] / includes / specials / SpecialExport.php
1 <?php
2 /**
3 * Copyright (C) 2003-2008 Brion Vibber <brion@pobox.com>
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 */
20
21 /**
22 * @file
23 * @ingroup SpecialPage
24 */
25
26 class SpecialExport extends SpecialPage {
27
28 private $curonly, $doExport, $pageLinkDepth, $templates;
29 private $images;
30
31 public function __construct() {
32 parent::__construct( 'Export' );
33 }
34
35 public function execute( $par ) {
36 global $wgOut, $wgRequest, $wgSitename, $wgExportAllowListContributors;
37 global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
38 global $wgExportFromNamespaces, $wgUser;
39
40 $this->setHeaders();
41 $this->outputHeader();
42
43 // Set some variables
44 $this->curonly = true;
45 $this->doExport = false;
46 $this->templates = $wgRequest->getCheck( 'templates' );
47 $this->images = $wgRequest->getCheck( 'images' ); // Doesn't do anything yet
48 $this->pageLinkDepth = $this->validateLinkDepth(
49 $wgRequest->getIntOrNull( 'pagelink-depth' ) );
50 $nsindex = '';
51
52 if ( $wgRequest->getCheck( 'addcat' ) ) {
53 $page = $wgRequest->getText( 'pages' );
54 $catname = $wgRequest->getText( 'catname' );
55
56 if ( $catname !== '' && $catname !== null && $catname !== false ) {
57 $t = Title::makeTitleSafe( NS_MAIN, $catname );
58 if ( $t ) {
59 /**
60 * @todo Fixme: this can lead to hitting memory limit for very large
61 * categories. Ideally we would do the lookup synchronously
62 * during the export in a single query.
63 */
64 $catpages = $this->getPagesFromCategory( $t );
65 if ( $catpages ) $page .= "\n" . implode( "\n", $catpages );
66 }
67 }
68 }
69 else if( $wgRequest->getCheck( 'addns' ) && $wgExportFromNamespaces ) {
70 $page = $wgRequest->getText( 'pages' );
71 $nsindex = $wgRequest->getText( 'nsindex', '' );
72
73 if ( strval( $nsindex ) !== '' ) {
74 /**
75 * Same implementation as above, so same @todo
76 */
77 $nspages = $this->getPagesFromNamespace( $nsindex );
78 if ( $nspages ) $page .= "\n" . implode( "\n", $nspages );
79 }
80 }
81 else if( $wgRequest->wasPosted() && $par == '' ) {
82 $page = $wgRequest->getText( 'pages' );
83 $this->curonly = $wgRequest->getCheck( 'curonly' );
84 $rawOffset = $wgRequest->getVal( 'offset' );
85 if( $rawOffset ) {
86 $offset = wfTimestamp( TS_MW, $rawOffset );
87 } else {
88 $offset = null;
89 }
90 $limit = $wgRequest->getInt( 'limit' );
91 $dir = $wgRequest->getVal( 'dir' );
92 $history = array(
93 'dir' => 'asc',
94 'offset' => false,
95 'limit' => $wgExportMaxHistory,
96 );
97 $historyCheck = $wgRequest->getCheck( 'history' );
98 if ( $this->curonly ) {
99 $history = WikiExporter::CURRENT;
100 } elseif ( !$historyCheck ) {
101 if ( $limit > 0 && ($wgExportMaxHistory == 0 || $limit < $wgExportMaxHistory ) ) {
102 $history['limit'] = $limit;
103 }
104 if ( !is_null( $offset ) ) {
105 $history['offset'] = $offset;
106 }
107 if ( strtolower( $dir ) == 'desc' ) {
108 $history['dir'] = 'desc';
109 }
110 }
111
112 if( $page != '' ) $this->doExport = true;
113 } else {
114 // Default to current-only for GET requests
115 $page = $wgRequest->getText( 'pages', $par );
116 $historyCheck = $wgRequest->getCheck( 'history' );
117 if( $historyCheck ) {
118 $history = WikiExporter::FULL;
119 } else {
120 $history = WikiExporter::CURRENT;
121 }
122
123 if( $page != '' ) $this->doExport = true;
124 }
125
126 if( !$wgExportAllowHistory ) {
127 // Override
128 $history = WikiExporter::CURRENT;
129 }
130
131 $list_authors = $wgRequest->getCheck( 'listauthors' );
132 if ( !$this->curonly || !$wgExportAllowListContributors ) $list_authors = false ;
133
134 if ( $this->doExport ) {
135 $wgOut->disable();
136 // Cancel output buffering and gzipping if set
137 // This should provide safer streaming for pages with history
138 wfResetOutputBuffers();
139 $wgRequest->response()->header( "Content-type: application/xml; charset=utf-8" );
140 if( $wgRequest->getCheck( 'wpDownload' ) ) {
141 // Provide a sane filename suggestion
142 $filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
143 $wgRequest->response()->header( "Content-disposition: attachment;filename={$filename}" );
144 }
145 $this->doExport( $page, $history, $list_authors );
146 return;
147 }
148
149 $wgOut->addWikiMsg( 'exporttext' );
150
151 $form = Xml::openElement( 'form', array( 'method' => 'post',
152 'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
153 $form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&#160;';
154 $form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
155
156 if ( $wgExportFromNamespaces ) {
157 $form .= Xml::namespaceSelector( $nsindex, null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&#160;';
158 $form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
159 }
160
161 $form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
162 $form .= '<br />';
163
164 if( $wgExportAllowHistory ) {
165 $form .= Xml::checkLabel( wfMsg( 'exportcuronly' ), 'curonly', 'curonly', true ) . '<br />';
166 } else {
167 $wgOut->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
168 }
169 $form .= Xml::checkLabel( wfMsg( 'export-templates' ), 'templates', 'wpExportTemplates', false ) . '<br />';
170 if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
171 $form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
172 }
173 // Enable this when we can do something useful exporting/importing image information. :)
174 //$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
175 $form .= Xml::checkLabel( wfMsg( 'export-download' ), 'wpDownload', 'wpDownload', true ) . '<br />';
176
177 $form .= Xml::submitButton( wfMsg( 'export-submit' ), $wgUser->getSkin()->tooltipAndAccessKeyAttribs( 'export' ) );
178 $form .= Xml::closeElement( 'form' );
179 $wgOut->addHTML( $form );
180 }
181
182 private function userCanOverrideExportDepth() {
183 global $wgUser;
184
185 return $wgUser->isAllowed( 'override-export-depth' );
186 }
187
188 /**
189 * Do the actual page exporting
190 *
191 * @param $page String: user input on what page(s) to export
192 * @param $history Mixed: one of the WikiExporter history export constants
193 * @param $list_authors Boolean: Whether to add distinct author list (when
194 * not returning full history)
195 */
196 private function doExport( $page, $history, $list_authors ) {
197 global $wgExportMaxHistory;
198
199 $pageSet = array(); // Inverted index of all pages to look up
200
201 // Split up and normalize input
202 foreach( explode( "\n", $page ) as $pageName ) {
203 $pageName = trim( $pageName );
204 $title = Title::newFromText( $pageName );
205 if( $title && $title->getInterwiki() == '' && $title->getText() !== '' ) {
206 // Only record each page once!
207 $pageSet[$title->getPrefixedText()] = true;
208 }
209 }
210
211 // Set of original pages to pass on to further manipulation...
212 $inputPages = array_keys( $pageSet );
213
214 // Look up any linked pages if asked...
215 if( $this->templates ) {
216 $pageSet = $this->getTemplates( $inputPages, $pageSet );
217 }
218
219 if( $linkDepth = $this->pageLinkDepth ) {
220 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
221 }
222
223 /*
224 // Enable this when we can do something useful exporting/importing image information. :)
225 if( $this->images ) ) {
226 $pageSet = $this->getImages( $inputPages, $pageSet );
227 }
228 */
229
230 $pages = array_keys( $pageSet );
231
232 // Normalize titles to the same format and remove dupes, see bug 17374
233 foreach( $pages as $k => $v ) {
234 $pages[$k] = str_replace( " ", "_", $v );
235 }
236 $pages = array_unique( $pages );
237
238 /* Ok, let's get to it... */
239 if( $history == WikiExporter::CURRENT ) {
240 $lb = false;
241 $db = wfGetDB( DB_SLAVE );
242 $buffer = WikiExporter::BUFFER;
243 } else {
244 // Use an unbuffered query; histories may be very long!
245 $lb = wfGetLBFactory()->newMainLB();
246 $db = $lb->getConnection( DB_SLAVE );
247 $buffer = WikiExporter::STREAM;
248
249 // This might take a while... :D
250 wfSuppressWarnings();
251 set_time_limit(0);
252 wfRestoreWarnings();
253 }
254 $exporter = new WikiExporter( $db, $history, $buffer );
255 $exporter->list_authors = $list_authors;
256 $exporter->openStream();
257 foreach( $pages as $page ) {
258 /*
259 if( $wgExportMaxHistory && !$this->curonly ) {
260 $title = Title::newFromText( $page );
261 if( $title ) {
262 $count = Revision::countByTitle( $db, $title );
263 if( $count > $wgExportMaxHistory ) {
264 wfDebug( __FUNCTION__ .
265 ": Skipped $page, $count revisions too big\n" );
266 continue;
267 }
268 }
269 }*/
270 #Bug 8824: Only export pages the user can read
271 $title = Title::newFromText( $page );
272 if( is_null( $title ) ) continue; #TODO: perhaps output an <error> tag or something.
273 if( !$title->userCanRead() ) continue; #TODO: perhaps output an <error> tag or something.
274
275 $exporter->pageByTitle( $title );
276 }
277
278 $exporter->closeStream();
279 if( $lb ) {
280 $lb->closeAll();
281 }
282 }
283
284 private function getPagesFromCategory( $title ) {
285 global $wgContLang;
286
287 $name = $title->getDBkey();
288
289 $dbr = wfGetDB( DB_SLAVE );
290 $res = $dbr->select( array('page', 'categorylinks' ),
291 array( 'page_namespace', 'page_title' ),
292 array('cl_from=page_id', 'cl_to' => $name ),
293 __METHOD__, array('LIMIT' => '5000'));
294
295 $pages = array();
296 while ( $row = $dbr->fetchObject( $res ) ) {
297 $n = $row->page_title;
298 if ($row->page_namespace) {
299 $ns = $wgContLang->getNsText( $row->page_namespace );
300 $n = $ns . ':' . $n;
301 }
302
303 $pages[] = $n;
304 }
305 $dbr->freeResult($res);
306
307 return $pages;
308 }
309
310 private function getPagesFromNamespace( $nsindex ) {
311 global $wgContLang;
312
313 $dbr = wfGetDB( DB_SLAVE );
314 $res = $dbr->select( 'page', array('page_namespace', 'page_title'),
315 array('page_namespace' => $nsindex),
316 __METHOD__, array('LIMIT' => '5000') );
317
318 $pages = array();
319 while ( $row = $dbr->fetchObject( $res ) ) {
320 $n = $row->page_title;
321 if ($row->page_namespace) {
322 $ns = $wgContLang->getNsText( $row->page_namespace );
323 $n = $ns . ':' . $n;
324 }
325
326 $pages[] = $n;
327 }
328 $dbr->freeResult($res);
329
330 return $pages;
331 }
332
333 /**
334 * Expand a list of pages to include templates used in those pages.
335 * @param $inputPages array, list of titles to look up
336 * @param $pageSet array, associative array indexed by titles for output
337 * @return array associative array index by titles
338 */
339 private function getTemplates( $inputPages, $pageSet ) {
340 return $this->getLinks( $inputPages, $pageSet,
341 'templatelinks',
342 array( 'tl_namespace AS namespace', 'tl_title AS title' ),
343 array( 'page_id=tl_from' ) );
344 }
345
346 /**
347 * Validate link depth setting, if available.
348 */
349 private function validateLinkDepth( $depth ) {
350 global $wgExportMaxLinkDepth, $wgExportMaxLinkDepthLimit;
351 if( $depth < 0 ) {
352 return 0;
353 }
354 if ( !$this->userCanOverrideExportDepth() ) {
355 if( $depth > $wgExportMaxLinkDepth ) {
356 return $wgExportMaxLinkDepth;
357 }
358 }
359 /*
360 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
361 * crazy-big export from being done by someone setting the depth
362 * number too high. In other words, last resort safety net.
363 */
364 return intval( min( $depth, 5 ) );
365 }
366
367 /** Expand a list of pages to include pages linked to from that page. */
368 private function getPageLinks( $inputPages, $pageSet, $depth ) {
369 for( $depth=$depth; $depth>0; --$depth ) {
370 $pageSet = $this->getLinks( $inputPages, $pageSet, 'pagelinks',
371 array( 'pl_namespace AS namespace', 'pl_title AS title' ),
372 array( 'page_id=pl_from' ) );
373 $inputPages = array_keys( $pageSet );
374 }
375 return $pageSet;
376 }
377
378 /**
379 * Expand a list of pages to include images used in those pages.
380 * @param $inputPages array, list of titles to look up
381 * @param $pageSet array, associative array indexed by titles for output
382 * @return array associative array index by titles
383 */
384 private function getImages( $inputPages, $pageSet ) {
385 return $this->getLinks( $inputPages, $pageSet,
386 'imagelinks',
387 array( NS_FILE . ' AS namespace', 'il_to AS title' ),
388 array( 'page_id=il_from' ) );
389 }
390
391 /**
392 * Expand a list of pages to include items used in those pages.
393 * @private
394 */
395 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
396 $dbr = wfGetDB( DB_SLAVE );
397 foreach( $inputPages as $page ) {
398 $title = Title::newFromText( $page );
399 if( $title ) {
400 $pageSet[$title->getPrefixedText()] = true;
401 /// @todo Fixme: May or may not be more efficient to batch these
402 /// by namespace when given multiple input pages.
403 $result = $dbr->select(
404 array( 'page', $table ),
405 $fields,
406 array_merge( $join,
407 array(
408 'page_namespace' => $title->getNamespace(),
409 'page_title' => $title->getDBkey() ) ),
410 __METHOD__ );
411 foreach( $result as $row ) {
412 $template = Title::makeTitle( $row->namespace, $row->title );
413 $pageSet[$template->getPrefixedText()] = true;
414 }
415 }
416 }
417 return $pageSet;
418 }
419 }
420