"Fossies" - the Fresh Open Source Software Archive  

Source code changes of the file "includes/specials/SpecialExport.php" between
mediawiki-1.31.1.tar.gz and mediawiki-1.32.0.tar.gz

About: MediaWiki is a wiki engine (the collaborative editing software that runs for e.g. Wikipedia, the free encyclopedia).

SpecialExport.php  (mediawiki-1.31.1):SpecialExport.php  (mediawiki-1.32.0)
skipping to change at line 26 skipping to change at line 26
* *
* You should have received a copy of the GNU General Public License along * You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc., * with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html * http://www.gnu.org/copyleft/gpl.html
* *
* @file * @file
* @ingroup SpecialPage * @ingroup SpecialPage
*/ */
use MediaWiki\MediaWikiServices; use MediaWiki\Logger\LoggerFactory;
/** /**
* A special page that allows users to export pages in a XML file * A special page that allows users to export pages in a XML file
* *
* @ingroup SpecialPage * @ingroup SpecialPage
*/ */
class SpecialExport extends SpecialPage { class SpecialExport extends SpecialPage {
private $curonly, $doExport, $pageLinkDepth, $templates; private $curonly, $doExport, $pageLinkDepth, $templates;
public function __construct() { public function __construct() {
skipping to change at line 101 skipping to change at line 101
$this->doExport = true; $this->doExport = true;
$exportall = true; $exportall = true;
/* Although $page and $history are not used later on, we /* Although $page and $history are not used later on, we
nevertheless set them to avoid that PHP notices about usi ng nevertheless set them to avoid that PHP notices about usi ng
undefined variables foul up our XML output (see call to undefined variables foul up our XML output (see call to
doExport(...) further down) */ doExport(...) further down) */
$page = ''; $page = '';
$history = ''; $history = '';
} elseif ( $request->wasPosted() && $par == '' ) { } elseif ( $request->wasPosted() && $par == '' ) {
// Log to see if certain parameters are actually used.
// If not, we could deprecate them and do some cleanup, h
ere and in WikiExporter.
LoggerFactory::getInstance( 'export' )->debug(
'Special:Export POST, dir: [{dir}], offset: [{off
set}], limit: [{limit}]', [
'dir' => $request->getRawVal( 'dir' ),
'offset' => $request->getRawVal( 'offset' ),
'limit' => $request->getRawVal( 'limit' ),
] );
$page = $request->getText( 'pages' ); $page = $request->getText( 'pages' );
$this->curonly = $request->getCheck( 'curonly' ); $this->curonly = $request->getCheck( 'curonly' );
$rawOffset = $request->getVal( 'offset' ); $rawOffset = $request->getVal( 'offset' );
if ( $rawOffset ) { if ( $rawOffset ) {
$offset = wfTimestamp( TS_MW, $rawOffset ); $offset = wfTimestamp( TS_MW, $rawOffset );
} else { } else {
$offset = null; $offset = null;
} }
skipping to change at line 372 skipping to change at line 381
// Normalize titles to the same format and remove dupes, see T19374 // Normalize titles to the same format and remove dupes, see T19374
foreach ( $pages as $k => $v ) { foreach ( $pages as $k => $v ) {
$pages[$k] = str_replace( " ", "_", $v ); $pages[$k] = str_replace( " ", "_", $v );
} }
$pages = array_unique( $pages ); $pages = array_unique( $pages );
} }
/* Ok, let's get to it... */ /* Ok, let's get to it... */
if ( $history == WikiExporter::CURRENT ) { $lb = false;
$lb = false; $db = wfGetDB( DB_REPLICA );
$db = wfGetDB( DB_REPLICA );
$buffer = WikiExporter::BUFFER;
} else {
// Use an unbuffered query; histories may be very long!
$lb = MediaWikiServices::getInstance()->getDBLoadBalancer
Factory()->newMainLB();
$db = $lb->getConnection( DB_REPLICA );
$buffer = WikiExporter::STREAM;
// This might take a while... :D
Wikimedia\suppressWarnings();
set_time_limit( 0 );
Wikimedia\restoreWarnings();
}
$exporter = new WikiExporter( $db, $history, $buffer ); $exporter = new WikiExporter( $db, $history );
$exporter->list_authors = $list_authors; $exporter->list_authors = $list_authors;
$exporter->openStream(); $exporter->openStream();
if ( $exportall ) { if ( $exportall ) {
$exporter->allPages(); $exporter->allPages();
} else { } else {
foreach ( $pages as $page ) { foreach ( $pages as $page ) {
# T10824: Only export pages the user can read # T10824: Only export pages the user can read
$title = Title::newFromText( $page ); $title = Title::newFromText( $page );
if ( is_null( $title ) ) { if ( is_null( $title ) ) {
skipping to change at line 421 skipping to change at line 417
$exporter->closeStream(); $exporter->closeStream();
if ( $lb ) { if ( $lb ) {
$lb->closeAll(); $lb->closeAll();
} }
} }
/** /**
* @param Title $title * @param Title $title
* @return array * @return string[]
*/ */
private function getPagesFromCategory( $title ) { private function getPagesFromCategory( $title ) {
global $wgContLang;
$maxPages = $this->getConfig()->get( 'ExportPagelistLimit' ); $maxPages = $this->getConfig()->get( 'ExportPagelistLimit' );
$name = $title->getDBkey(); $name = $title->getDBkey();
$dbr = wfGetDB( DB_REPLICA ); $dbr = wfGetDB( DB_REPLICA );
$res = $dbr->select( $res = $dbr->select(
[ 'page', 'categorylinks' ], [ 'page', 'categorylinks' ],
[ 'page_namespace', 'page_title' ], [ 'page_namespace', 'page_title' ],
[ 'cl_from=page_id', 'cl_to' => $name ], [ 'cl_from=page_id', 'cl_to' => $name ],
__METHOD__, __METHOD__,
[ 'LIMIT' => $maxPages ] [ 'LIMIT' => $maxPages ]
); );
$pages = []; $pages = [];
foreach ( $res as $row ) { foreach ( $res as $row ) {
$n = $row->page_title; $pages[] = Title::makeName( $row->page_namespace, $row->p
if ( $row->page_namespace ) { age_title );
$ns = $wgContLang->getNsText( $row->page_namespac
e );
$n = $ns . ':' . $n;
}
$pages[] = $n;
} }
return $pages; return $pages;
} }
/** /**
* @param int $nsindex * @param int $nsindex
* @return array * @return string[]
*/ */
private function getPagesFromNamespace( $nsindex ) { private function getPagesFromNamespace( $nsindex ) {
global $wgContLang;
$maxPages = $this->getConfig()->get( 'ExportPagelistLimit' ); $maxPages = $this->getConfig()->get( 'ExportPagelistLimit' );
$dbr = wfGetDB( DB_REPLICA ); $dbr = wfGetDB( DB_REPLICA );
$res = $dbr->select( $res = $dbr->select(
'page', 'page',
[ 'page_namespace', 'page_title' ], [ 'page_namespace', 'page_title' ],
[ 'page_namespace' => $nsindex ], [ 'page_namespace' => $nsindex ],
__METHOD__, __METHOD__,
[ 'LIMIT' => $maxPages ] [ 'LIMIT' => $maxPages ]
); );
$pages = []; $pages = [];
foreach ( $res as $row ) { foreach ( $res as $row ) {
$n = $row->page_title; $pages[] = Title::makeName( $row->page_namespace, $row->p
age_title );
if ( $row->page_namespace ) {
$ns = $wgContLang->getNsText( $row->page_namespac
e );
$n = $ns . ':' . $n;
}
$pages[] = $n;
} }
return $pages; return $pages;
} }
/** /**
* Expand a list of pages to include templates used in those pages. * Expand a list of pages to include templates used in those pages.
* @param array $inputPages List of titles to look up * @param array $inputPages List of titles to look up
* @param array $pageSet Associative array indexed by titles for output * @param array $pageSet Associative array indexed by titles for output
* @return array Associative array index by titles * @return array Associative array index by titles
 End of changes. 10 change blocks. 
41 lines changed or deleted 21 lines changed or added

Home  |  About  |  Features  |  All  |  Newest  |  Dox  |  Diffs  |  RSS Feeds  |  Screenshots  |  Comments  |  Imprint  |  Privacy  |  HTTP(S)