From cecb985bee3bdd252e1b8dc0bd500b37cd52be01 Mon Sep 17 00:00:00 2001 From: Pierre Schmitz Date: Wed, 16 May 2007 20:58:53 +0000 Subject: Aktualisierung auf MediaWiki 1.10.0 Plugins angepasst und verbessert kleine Korrekturen am Design --- includes/Export.php | 38 +++++++++++++++++++++++++++----------- 1 file changed, 27 insertions(+), 11 deletions(-) (limited to 'includes/Export.php') diff --git a/includes/Export.php b/includes/Export.php index b7e0f9a1..9307795d 100644 --- a/includes/Export.php +++ b/includes/Export.php @@ -17,16 +17,15 @@ # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # http://www.gnu.org/copyleft/gpl.html + /** * - * @package MediaWiki - * @subpackage SpecialPage + * @addtogroup SpecialPage */ - class WikiExporter { var $list_authors = false ; # Return distinct author list (when not returning full history) var $author_list = "" ; - + const FULL = 0; const CURRENT = 1; @@ -44,14 +43,14 @@ class WikiExporter { * main query is still running. * * @param Database $db - * @param mixed $history one of WikiExporter::FULL or WikiExporter::CURRENT, or an + * @param mixed $history one of WikiExporter::FULL or WikiExporter::CURRENT, or an * associative array: * offset: non-inclusive offset at which to start the query * limit: maximum number of rows to return * dir: "asc" or "desc" timestamp order * @param int $buffer one of WikiExporter::BUFFER or WikiExporter::STREAM */ - function WikiExporter( &$db, $history = WikiExporter::CURRENT, + function __construct( &$db, $history = WikiExporter::CURRENT, $buffer = WikiExporter::BUFFER, $text = WikiExporter::TEXT ) { $this->db =& $db; $this->history = $history; @@ -140,7 +139,10 @@ class WikiExporter { $fname = "do_list_authors" ; wfProfileIn( $fname ); $this->author_list = ""; - $sql = "SELECT DISTINCT rev_user_text,rev_user FROM {$page},{$revision} WHERE page_id=rev_page AND " . $cond ; + //rev_deleted + $nothidden = '(rev_deleted & '.Revision::DELETED_USER.') = 0'; + + $sql = "SELECT DISTINCT rev_user_text,rev_user FROM {$page},{$revision} WHERE page_id=rev_page AND $nothidden AND " . $cond ; $result = $this->db->query( $sql, $fname ); $resultset = $this->db->resultObject( $result ); while( $row = $resultset->fetchObject() ) { @@ -164,10 +166,10 @@ class WikiExporter { $page = $this->db->tableName( 'page' ); $revision = $this->db->tableName( 'revision' ); $text = $this->db->tableName( 'text' ); - + $order = 'ORDER BY page_id'; $limit = ''; - + if( $this->history == WikiExporter::FULL ) { $join = 'page_id=rev_page'; } elseif( $this->history == WikiExporter::CURRENT ) { @@ -185,7 +187,7 @@ class WikiExporter { $order .= ', rev_timestamp DESC'; } if ( !empty( $this->history['offset'] ) ) { - $join .= " AND rev_timestamp $op " . $this->db->addQuotes( + $join .= " AND rev_timestamp $op " . $this->db->addQuotes( $this->db->timestamp( $this->history['offset'] ) ); } if ( !empty( $this->history['limit'] ) ) { @@ -229,7 +231,7 @@ class WikiExporter { $result = $this->db->query( $sql, $fname ); $wrapper = $this->db->resultObject( $result ); $this->outputStream( $wrapper ); - + if ( $this->list_authors ) { $this->outputStream( $wrapper ); } @@ -279,6 +281,9 @@ class WikiExporter { } } +/** + * @addtogroup Dump + */ class XmlDumpWriter { /** @@ -461,6 +466,7 @@ class XmlDumpWriter { /** * Base class for output stream; prints to stdout or buffer or whereever. + * @addtogroup Dump */ class DumpOutput { function writeOpenStream( $string ) { @@ -494,6 +500,7 @@ class DumpOutput { /** * Stream outputter to send data to a file. + * @addtogroup Dump */ class DumpFileOutput extends DumpOutput { var $handle; @@ -511,6 +518,7 @@ class DumpFileOutput extends DumpOutput { * Stream outputter to send data to a file via some filter program. * Even if compression is available in a library, using a separate * program can allow us to make use of a multi-processor system. + * @addtogroup Dump */ class DumpPipeOutput extends DumpFileOutput { function DumpPipeOutput( $command, $file = null ) { @@ -523,6 +531,7 @@ class DumpPipeOutput extends DumpFileOutput { /** * Sends dump output via the gzip compressor. + * @addtogroup Dump */ class DumpGZipOutput extends DumpPipeOutput { function DumpGZipOutput( $file ) { @@ -532,6 +541,7 @@ class DumpGZipOutput extends DumpPipeOutput { /** * Sends dump output via the bgzip2 compressor. + * @addtogroup Dump */ class DumpBZip2Output extends DumpPipeOutput { function DumpBZip2Output( $file ) { @@ -541,6 +551,7 @@ class DumpBZip2Output extends DumpPipeOutput { /** * Sends dump output via the p7zip compressor. + * @addtogroup Dump */ class Dump7ZipOutput extends DumpPipeOutput { function Dump7ZipOutput( $file ) { @@ -558,6 +569,7 @@ class Dump7ZipOutput extends DumpPipeOutput { * Dump output filter class. * This just does output filtering and streaming; XML formatting is done * higher up, so be careful in what you do. + * @addtogroup Dump */ class DumpFilter { function DumpFilter( &$sink ) { @@ -603,6 +615,7 @@ class DumpFilter { /** * Simple dump output filter to exclude all talk pages. + * @addtogroup Dump */ class DumpNotalkFilter extends DumpFilter { function pass( $page ) { @@ -612,6 +625,7 @@ class DumpNotalkFilter extends DumpFilter { /** * Dump output filter to include or exclude pages in a given set of namespaces. + * @addtogroup Dump */ class DumpNamespaceFilter extends DumpFilter { var $invert = false; @@ -666,6 +680,7 @@ class DumpNamespaceFilter extends DumpFilter { /** * Dump output filter to include only the last revision in each page sequence. + * @addtogroup Dump */ class DumpLatestFilter extends DumpFilter { var $page, $pageString, $rev, $revString; @@ -697,6 +712,7 @@ class DumpLatestFilter extends DumpFilter { /** * Base class for output stream; prints to stdout or buffer or whereever. + * @addtogroup Dump */ class DumpMultiWriter { function DumpMultiWriter( $sinks ) { -- cgit v1.2.2