summaryrefslogtreecommitdiff
path: root/maintenance
diff options
context:
space:
mode:
authorPierre Schmitz <pierre@archlinux.de>2006-10-11 20:21:25 +0000
committerPierre Schmitz <pierre@archlinux.de>2006-10-11 20:21:25 +0000
commitd81f562b712f2387fa02290bf2ca86392ab356f2 (patch)
treed666cdefbe6ac320827a2c6cb473581b46e22c4c /maintenance
parent183851b06bd6c52f3cae5375f433da720d410447 (diff)
Aktualisierung auf Version 1.8.1
Diffstat (limited to 'maintenance')
-rw-r--r--maintenance/FiveUpgrade.inc7
-rw-r--r--maintenance/InitialiseMessages.inc67
-rw-r--r--maintenance/addwiki.php56
-rw-r--r--maintenance/archives/patch-ipb_anon_only.sql44
-rw-r--r--maintenance/archives/patch-page_no_title_convert.sql0
-rw-r--r--maintenance/archives/patch-profiling.sql10
-rw-r--r--maintenance/archives/patch-recentchanges-utindex.sql4
-rw-r--r--maintenance/attachLatest.php6
-rw-r--r--maintenance/backup.inc16
-rw-r--r--maintenance/checkUsernames.php4
-rw-r--r--maintenance/cleanupDupes.inc9
-rw-r--r--maintenance/cleanupImages.php168
-rw-r--r--maintenance/cleanupTable.inc86
-rw-r--r--maintenance/cleanupTitles.php83
-rw-r--r--maintenance/cleanupWatchlist.php3
-rw-r--r--maintenance/commandLine.inc14
-rw-r--r--maintenance/convertLinks.inc4
-rw-r--r--maintenance/createAndPromote.php5
-rw-r--r--maintenance/deleteBatch.php8
-rw-r--r--maintenance/deleteImageMemcached.php6
-rw-r--r--maintenance/deleteRevision.php2
-rw-r--r--maintenance/dumpBackup.php6
-rw-r--r--maintenance/dumpHTML.inc539
-rw-r--r--maintenance/dumpHTML.php99
-rw-r--r--maintenance/dumpInterwiki.inc3
-rw-r--r--maintenance/dumpSisterSites.php49
-rw-r--r--maintenance/dumpTextPass.php34
-rw-r--r--maintenance/dumpUploads.php116
-rw-r--r--maintenance/fixSlaveDesync.php133
-rw-r--r--maintenance/fuzz-tester.php2458
-rw-r--r--maintenance/generateSitemap.php11
-rw-r--r--maintenance/importImages.php35
-rw-r--r--maintenance/installExtension.php642
-rw-r--r--maintenance/language/alltrans.php16
-rw-r--r--maintenance/language/checkLanguage.php177
-rw-r--r--maintenance/language/checktrans.php44
-rw-r--r--maintenance/language/date-formats.php45
-rw-r--r--maintenance/language/diffLanguage.php159
-rw-r--r--maintenance/language/dumpMessages.php20
-rw-r--r--maintenance/language/duplicatetrans.php43
-rw-r--r--maintenance/language/function-list.php44
-rw-r--r--maintenance/language/lang2po.php154
-rw-r--r--maintenance/language/langmemusage.php30
-rw-r--r--maintenance/language/languages.inc686
-rw-r--r--maintenance/language/splitLanguageFiles.inc1168
-rw-r--r--maintenance/language/splitLanguageFiles.php13
-rw-r--r--maintenance/language/transstat.php211
-rw-r--r--maintenance/language/unusedMessages.php42
-rw-r--r--maintenance/language/validate.php40
-rw-r--r--maintenance/mctest.php15
-rw-r--r--maintenance/mysql5/tables.sql23
-rw-r--r--maintenance/namespaceDupes.php13
-rw-r--r--maintenance/ourusers.php43
-rw-r--r--maintenance/parserTests.inc12
-rw-r--r--maintenance/parserTests.php2
-rw-r--r--maintenance/parserTests.txt413
-rw-r--r--maintenance/postgres/compare_schemas.pl181
-rw-r--r--maintenance/postgres/tables.sql156
-rw-r--r--maintenance/postgres/wp_mysql2postgres.pl400
-rw-r--r--maintenance/rebuildImages.php4
-rw-r--r--maintenance/refreshImageCount.php4
-rw-r--r--maintenance/runJobs.php12
-rw-r--r--maintenance/stats.php26
-rw-r--r--maintenance/storage/checkStorage.php936
-rw-r--r--maintenance/storage/compressOld.inc11
-rw-r--r--maintenance/tables.sql121
-rw-r--r--maintenance/update.php12
-rw-r--r--maintenance/updateSpecialPages.php4
-rw-r--r--maintenance/updaters.inc144
-rw-r--r--maintenance/userDupes.inc12
70 files changed, 9108 insertions, 1055 deletions
diff --git a/maintenance/FiveUpgrade.inc b/maintenance/FiveUpgrade.inc
index 7caf6810..4bbf0733 100644
--- a/maintenance/FiveUpgrade.inc
+++ b/maintenance/FiveUpgrade.inc
@@ -64,7 +64,7 @@ class FiveUpgrade {
function &newConnection() {
global $wgDBadminuser, $wgDBadminpassword;
global $wgDBserver, $wgDBname;
- $db =& new Database( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname );
+ $db = new Database( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname );
return $db;
}
@@ -159,8 +159,7 @@ class FiveUpgrade {
* @access private
*/
function log( $message ) {
- global $wgDBname;
- echo $wgDBname . ' ' . wfTimestamp( TS_DB ) . ': ' . $message . "\n";
+ echo wfWikiID() . ' ' . wfTimestamp( TS_DB ) . ': ' . $message . "\n";
flush();
}
@@ -804,7 +803,7 @@ END;
array_shift( $against );
}
- array_push( $pieces, basename( $path ) );
+ array_push( $pieces, wfBaseName( $path ) );
return implode( '/', $pieces );
}
diff --git a/maintenance/InitialiseMessages.inc b/maintenance/InitialiseMessages.inc
index 189fbd25..22e26b94 100644
--- a/maintenance/InitialiseMessages.inc
+++ b/maintenance/InitialiseMessages.inc
@@ -11,9 +11,9 @@
*/
/** */
-function initialiseMessages( $overwrite = false, $messageArray = false ) {
+function initialiseMessages( $overwrite = false, $messageArray = false, $outputCallback = false ) {
global $wgContLang, $wgContLanguageCode;
- global $wgContLangClass, $wgAllMessagesEn;
+ global $wgContLangClass;
global $wgDisableLangConversion;
global $wgForceUIMsgAsContentMsg;
global $wgLanguageNames;
@@ -26,7 +26,7 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) {
if ( $messageArray ) {
$sortedArray = $messageArray;
} else {
- $sortedArray = $wgAllMessagesEn;
+ $sortedArray = Language::getMessagesFor( 'en' );
}
ksort( $sortedArray );
@@ -37,11 +37,7 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) {
$variants[]=$wgContLanguageCode;
foreach ($variants as $v) {
- $langclass = 'Language'. str_replace( '-', '_', ucfirst( $v ) );
- if( !class_exists($langclass) ) {
- wfDie( "class $langclass not defined. perhaps you need to include the file $langclass.php in $wgContLangClass.php?" );
- }
- $lang = new $langclass;
+ $lang = Language::factory( $v );
if($v==$wgContLanguageCode)
$suffix='';
@@ -69,14 +65,14 @@ function initialiseMessages( $overwrite = false, $messageArray = false ) {
}
}
}
- initialiseMessagesReal( $overwrite, $messages );
+ initialiseMessagesReal( $overwrite, $messages, $outputCallback );
}
/** */
-function initialiseMessagesReal( $overwrite = false, $messageArray = false ) {
- global $wgContLang, $wgScript, $wgServer, $wgAllMessagesEn;
+function initialiseMessagesReal( $overwrite = false, $messageArray = false, $outputCallback = false ) {
+ global $wgContLang, $wgScript, $wgServer, $wgLanguageCode;
global $wgOut, $wgArticle, $wgUser;
- global $wgMessageCache, $wgMemc, $wgDBname, $wgUseMemCached;
+ global $wgMessageCache, $wgMemc, $wgUseMemCached;
# Initialise $wgOut and $wgUser for a command line script
$wgOut->disable();
@@ -91,14 +87,24 @@ function initialiseMessagesReal( $overwrite = false, $messageArray = false ) {
$fname = 'initialiseMessages';
$ns = NS_MEDIAWIKI;
- # cur_user_text responsible for the modifications
+ # username responsible for the modifications
# Don't change it unless you're prepared to update the DBs accordingly, otherwise the
- # default messages won't be overwritte
+ # default messages won't be overwritten
$username = 'MediaWiki default';
+ if ( !$outputCallback ) {
+ # Print is not a function, and there doesn't appear to be any built-in
+ # workalikes, so let's just make our own anonymous function to do the
+ # same thing.
+ $outputCallback = create_function( '$s', 'print $s;' );
+ }
- print "Initialising \"MediaWiki\" namespace...\n";
+ $outputCallback( "Initialising \"MediaWiki\" namespace for language code $wgLanguageCode...\n" );
+ # Check that the serialized data files are OK
+ if ( Language::isLocalisationOutOfDate( $wgLanguageCode ) ) {
+ $outputCallback( "Warning: serialized data file may be out of date.\n" );
+ }
$dbr =& wfGetDB( DB_SLAVE );
$dbw =& wfGetDB( DB_MASTER );
@@ -107,13 +113,11 @@ function initialiseMessagesReal( $overwrite = false, $messageArray = false ) {
$timestamp = wfTimestampNow();
- #$sql = "SELECT cur_title,cur_is_new,cur_user_text FROM $cur WHERE cur_namespace=$ns AND cur_title IN(";
- # Get keys from $wgAllMessagesEn, which is more complete than the local language
$first = true;
if ( $messageArray ) {
$sortedArray = $messageArray;
} else {
- $sortedArray = $wgAllMessagesEn;
+ $sortedArray = $wgContLang->getAllMessages();
}
ksort( $sortedArray );
@@ -132,7 +136,7 @@ function initialiseMessagesReal( $overwrite = false, $messageArray = false ) {
foreach ($chunks as $chunk) {
$first = true;
$sql = "SELECT page_title,page_is_new,rev_user_text FROM $page, $revision WHERE
- page_namespace=$ns AND rev_page=page_id AND page_title IN(";
+ page_namespace=$ns AND rev_id=page_latest AND page_title IN(";
foreach ( $chunk as $key => $enMsg ) {
if ( $key == '' ) {
@@ -171,20 +175,28 @@ function initialiseMessagesReal( $overwrite = false, $messageArray = false ) {
$talk = $wgContLang->getNsText( NS_TALK );
$mwtalk = $wgContLang->getNsText( NS_MEDIAWIKI_TALK );
+ $numUpdated = 0;
+ $numKept = 0;
+ $numInserted = 0;
+
# Merge these into a single transaction for speed
$dbw->begin();
# Process each message
- foreach ( $sortedArray as $key => $enMsg ) {
+ foreach ( $sortedArray as $key => $message ) {
if ( $key == '' ) {
continue; // Skip odd members
}
# Get message text
- if ( $messageArray ) {
- $message = $enMsg;
- } else {
+ if ( !$messageArray ) {
$message = wfMsgNoDBForContent( $key );
}
+ if ( is_null( $message ) ) {
+ # This happens sometimes with out of date serialized data files
+ $outputCallback( "Warning: Skipping null message $key\n" );
+ continue;
+ }
+
$titleObj = Title::newFromText( $wgContLang->ucfirst( $key ), NS_MEDIAWIKI );
$title = $titleObj->getDBkey();
@@ -197,7 +209,12 @@ function initialiseMessagesReal( $overwrite = false, $messageArray = false ) {
if( is_null( $revision ) || $revision->getText() != $message ) {
$article = new Article( $titleObj );
$article->quickEdit( $message );
+ ++$numUpdated;
+ } else {
+ ++$numKept;
}
+ } else {
+ ++$numKept;
}
} else {
$article = new Article( $titleObj );
@@ -212,14 +229,14 @@ function initialiseMessagesReal( $overwrite = false, $messageArray = false ) {
) );
$revid = $revision->insertOn( $dbw );
$article->updateRevisionOn( $dbw, $revision );
+ ++$numInserted;
}
}
$dbw->commit();
# Clear the relevant memcached key
- print 'Clearing message cache...';
$wgMessageCache->clear();
- print "Done.\n";
+ $outputCallback( "Done. Updated: $numUpdated, inserted: $numInserted, kept: $numKept.\n" );
}
/** */
diff --git a/maintenance/addwiki.php b/maintenance/addwiki.php
index 253033a3..b7843632 100644
--- a/maintenance/addwiki.php
+++ b/maintenance/addwiki.php
@@ -33,26 +33,40 @@ function addWiki( $lang, $site, $dbName )
print "Initialising tables\n";
dbsource( "$maintenance/tables.sql", $dbw );
dbsource( "$IP/extensions/OAI/update_table.sql", $dbw );
+ dbsource( "$IP/extensions/AntiSpoof/mysql/patch-antispoof.sql", $dbw );
$dbw->query( "INSERT INTO site_stats(ss_row_id) VALUES (1)" );
# Initialise external storage
- if ( $wgDefaultExternalStore && preg_match( '!^DB://(.*)$!', $wgDefaultExternalStore, $m ) ) {
- print "Initialising external storage...\n";
+ if ( is_array( $wgDefaultExternalStore ) ) {
+ $stores = $wgDefaultExternalStore;
+ } elseif ( $stores ) {
+ $stores = array( $wgDefaultExternalStore );
+ } else {
+ $stores = array();
+ }
+ if ( count( $stores ) ) {
require_once( 'ExternalStoreDB.php' );
+ print "Initialising external storage $store...\n";
global $wgDBuser, $wgDBpassword, $wgExternalServers;
- $cluster = $m[1];
-
- # Hack
- $wgExternalServers[$cluster][0]['user'] = $wgDBuser;
- $wgExternalServers[$cluster][0]['password'] = $wgDBpassword;
-
- $store = new ExternalStoreDB;
- $extdb =& $store->getMaster( $cluster );
- $extdb->query( "SET table_type=InnoDB" );
- $extdb->query( "CREATE DATABASE $dbName" );
- $extdb->selectDB( $dbName );
- dbsource( "$maintenance/storage/blobs.sql", $extdb );
- $extdb->immediateCommit();
+ foreach ( $stores as $storeURL ) {
+ if ( !preg_match( '!^DB://(.*)$!', $storeURL, $m ) ) {
+ continue;
+ }
+
+ $cluster = $m[1];
+
+ # Hack
+ $wgExternalServers[$cluster][0]['user'] = $wgDBuser;
+ $wgExternalServers[$cluster][0]['password'] = $wgDBpassword;
+
+ $store = new ExternalStoreDB;
+ $extdb =& $store->getMaster( $cluster );
+ $extdb->query( "SET table_type=InnoDB" );
+ $extdb->query( "CREATE DATABASE $dbName" );
+ $extdb->selectDB( $dbName );
+ dbsource( "$maintenance/storage/blobs.sql", $extdb );
+ $extdb->immediateCommit();
+ }
}
$wgTitle = Title::newMainPage();
@@ -203,7 +217,17 @@ See the [http://www.wikipedia.org Wikipedia portal] for other language Wikipedia
fclose( $file );
print "Sourcing interwiki SQL\n";
dbsource( $tempname, $dbw );
- unlink( $tempname );
+ #unlink( $tempname );
+
+ # Create the upload dir
+ global $wgUploadDirectory;
+ if( file_exists( $wgUploadDirectory ) ) {
+ echo "$wgUploadDirectory already exists.\n";
+ } else {
+ echo "Creating $wgUploadDirectory...\n";
+ mkdir( $wgUploadDirectory, 0777 );
+ chmod( $wgUploadDirectory, 0777 );
+ }
print "Script ended. You now want to run sync-common-all to publish *dblist files (check them for duplicates first)\n";
}
diff --git a/maintenance/archives/patch-ipb_anon_only.sql b/maintenance/archives/patch-ipb_anon_only.sql
new file mode 100644
index 00000000..709308a2
--- /dev/null
+++ b/maintenance/archives/patch-ipb_anon_only.sql
@@ -0,0 +1,44 @@
+-- Add extra option fields to the ipblocks table, add some extra indexes,
+-- convert infinity values in ipb_expiry to something that sorts better,
+-- extend ipb_address and range fields, add a unique index for block conflict
+-- detection.
+
+-- Conflicts in the new unique index can be handled by creating a new
+-- table and inserting into it instead of doing an ALTER TABLE.
+
+
+DROP TABLE IF EXISTS /*$wgDBprefix*/ipblocks_newunique;
+
+CREATE TABLE /*$wgDBprefix*/ipblocks_newunique (
+ ipb_id int(8) NOT NULL auto_increment,
+ ipb_address tinyblob NOT NULL default '',
+ ipb_user int(8) unsigned NOT NULL default '0',
+ ipb_by int(8) unsigned NOT NULL default '0',
+ ipb_reason tinyblob NOT NULL default '',
+ ipb_timestamp char(14) binary NOT NULL default '',
+ ipb_auto bool NOT NULL default 0,
+ ipb_anon_only bool NOT NULL default 0,
+ ipb_create_account bool NOT NULL default 1,
+ ipb_expiry char(14) binary NOT NULL default '',
+ ipb_range_start tinyblob NOT NULL default '',
+ ipb_range_end tinyblob NOT NULL default '',
+
+ PRIMARY KEY ipb_id (ipb_id),
+ UNIQUE INDEX ipb_address_unique (ipb_address(255), ipb_user, ipb_auto),
+ INDEX ipb_user (ipb_user),
+ INDEX ipb_range (ipb_range_start(8), ipb_range_end(8)),
+ INDEX ipb_timestamp (ipb_timestamp),
+ INDEX ipb_expiry (ipb_expiry)
+
+) TYPE=InnoDB;
+
+INSERT IGNORE INTO /*$wgDBprefix*/ipblocks_newunique
+ (ipb_id, ipb_address, ipb_user, ipb_by, ipb_reason, ipb_timestamp, ipb_auto, ipb_expiry, ipb_range_start, ipb_range_end, ipb_anon_only, ipb_create_account)
+ SELECT ipb_id, ipb_address, ipb_user, ipb_by, ipb_reason, ipb_timestamp, ipb_auto, ipb_expiry, ipb_range_start, ipb_range_end, 0 , ipb_user=0
+ FROM /*$wgDBprefix*/ipblocks;
+
+DROP TABLE IF EXISTS /*$wgDBprefix*/ipblocks_old;
+RENAME TABLE /*$wgDBprefix*/ipblocks TO /*$wgDBprefix*/ipblocks_old;
+RENAME TABLE /*$wgDBprefix*/ipblocks_newunique TO /*$wgDBprefix*/ipblocks;
+
+UPDATE /*$wgDBprefix*/ipblocks SET ipb_expiry='infinity' WHERE ipb_expiry='';
diff --git a/maintenance/archives/patch-page_no_title_convert.sql b/maintenance/archives/patch-page_no_title_convert.sql
new file mode 100644
index 00000000..e69de29b
--- /dev/null
+++ b/maintenance/archives/patch-page_no_title_convert.sql
diff --git a/maintenance/archives/patch-profiling.sql b/maintenance/archives/patch-profiling.sql
index 49b488e9..bafd2b67 100644
--- a/maintenance/archives/patch-profiling.sql
+++ b/maintenance/archives/patch-profiling.sql
@@ -2,9 +2,9 @@
-- This is optional
CREATE TABLE /*$wgDBprefix*/profiling (
- pf_count integer not null default 0,
- pf_time float not null default 0,
- pf_name varchar(255) not null default '',
- pf_server varchar(30) not null default '',
- UNIQUE KEY pf_name_server (pf_name, pf_server)
+ pf_count int NOT NULL default 0,
+ pf_time float NOT NULL default 0,
+ pf_name varchar(255) NOT NULL default '',
+ pf_server varchar(30) NOT NULL default '',
+ UNIQUE KEY pf_name_server (pf_name, pf_server)
) TYPE=HEAP;
diff --git a/maintenance/archives/patch-recentchanges-utindex.sql b/maintenance/archives/patch-recentchanges-utindex.sql
new file mode 100644
index 00000000..4ebe3165
--- /dev/null
+++ b/maintenance/archives/patch-recentchanges-utindex.sql
@@ -0,0 +1,4 @@
+--- July 2006
+--- Index on recentchanges.( rc_namespace, rc_user_text )
+--- Helps the username filtering in Special:Newpages
+ALTER TABLE /*$wgDBprefix*/recentchanges ADD INDEX `rc_ns_usertext` ( `rc_namespace` , `rc_user_text` ); \ No newline at end of file
diff --git a/maintenance/attachLatest.php b/maintenance/attachLatest.php
index 024a4fac..f4c11c01 100644
--- a/maintenance/attachLatest.php
+++ b/maintenance/attachLatest.php
@@ -47,17 +47,17 @@ while( $row = $dbw->fetchObject( $result ) ) {
array( 'rev_page' => $pageId ),
$fname );
if( !$latestTime ) {
- echo "$wgDBname $pageId [[$name]] can't find latest rev time?!\n";
+ echo wfWikiID()." $pageId [[$name]] can't find latest rev time?!\n";
continue;
}
$revision = Revision::loadFromTimestamp( $dbw, $title, $latestTime );
if( is_null( $revision ) ) {
- echo "$wgDBname $pageId [[$name]] latest time $latestTime, can't find revision id\n";
+ echo wfWikiID()." $pageId [[$name]] latest time $latestTime, can't find revision id\n";
continue;
}
$id = $revision->getId();
- echo "$wgDBname $pageId [[$name]] latest time $latestTime, rev id $id\n";
+ echo wfWikiID()." $pageId [[$name]] latest time $latestTime, rev id $id\n";
if( $fixit ) {
$article = new Article( $title );
$article->updateRevisionOn( $dbw, $revision );
diff --git a/maintenance/backup.inc b/maintenance/backup.inc
index d3603bd1..8b4b6726 100644
--- a/maintenance/backup.inc
+++ b/maintenance/backup.inc
@@ -168,9 +168,6 @@ class BackupDumper {
}
function dump( $history, $text = MW_EXPORT_TEXT ) {
- # This shouldn't happen if on console... ;)
- header( 'Content-type: text/html; charset=UTF-8' );
-
# Notice messages will foul up your XML output even if they're
# relatively harmless.
ini_set( 'display_errors', false );
@@ -206,11 +203,11 @@ class BackupDumper {
* Initialise starting time and maximum revision count.
* We'll make ETA calculations based an progress, assuming relatively
* constant per-revision rate.
- * @param int $history MW_EXPORT_CURRENT or MW_EXPORT_FULL
+ * @param int $history WikiExporter::CURRENT or WikiExporter::FULL
*/
- function initProgress( $history = MW_EXPORT_FULL ) {
- $table = ($history == MW_EXPORT_CURRENT) ? 'page' : 'revision';
- $field = ($history == MW_EXPORT_CURRENT) ? 'page_id' : 'rev_id';
+ function initProgress( $history = WikiExporter::FULL ) {
+ $table = ($history == WikiExporter::CURRENT) ? 'page' : 'revision';
+ $field = ($history == WikiExporter::CURRENT) ? 'page_id' : 'rev_id';
$dbr =& wfGetDB( DB_SLAVE );
$this->maxCount = $dbr->selectField( $table, "MAX($field)", '', 'BackupDumper::dump' );
@@ -221,7 +218,7 @@ class BackupDumper {
global $wgDBadminuser, $wgDBadminpassword;
global $wgDBname, $wgDebugDumpSql;
$flags = ($wgDebugDumpSql ? DBO_DEBUG : 0) | DBO_DEFAULT; // god-damn hack
- $db =& new Database( $this->backupServer(), $wgDBadminuser, $wgDBadminpassword, $wgDBname, false, $flags );
+ $db = new Database( $this->backupServer(), $wgDBadminuser, $wgDBadminpassword, $wgDBname, false, $flags );
$timeout = 3600 * 24;
$db->query( "SET net_read_timeout=$timeout" );
$db->query( "SET net_write_timeout=$timeout" );
@@ -265,9 +262,8 @@ class BackupDumper {
$revrate = '-';
$etats = '-';
}
- global $wgDBname;
$this->progress( sprintf( "%s: %s %d pages (%0.3f/sec), %d revs (%0.3f/sec), ETA %s [max %d]",
- $now, $wgDBname, $this->pageCount, $rate, $this->revCount, $revrate, $etats, $this->maxCount ) );
+ $now, wfWikiID(), $this->pageCount, $rate, $this->revCount, $revrate, $etats, $this->maxCount ) );
}
}
diff --git a/maintenance/checkUsernames.php b/maintenance/checkUsernames.php
index b577ebc6..4c0ecdce 100644
--- a/maintenance/checkUsernames.php
+++ b/maintenance/checkUsernames.php
@@ -10,7 +10,6 @@ class checkUsernames {
$this->log = fopen( '/home/wikipedia/logs/checkUsernames.log', 'at' );
}
function main() {
- global $wgDBname;
$fname = 'checkUsernames::main';
$dbr =& wfGetDB( DB_SLAVE );
@@ -21,10 +20,9 @@ class checkUsernames {
$fname
);
- #fwrite( $this->stderr, "Checking $wgDBname\n" );
while ( $row = $dbr->fetchObject( $res ) ) {
if ( ! User::isValidUserName( $row->user_name ) ) {
- $out = sprintf( "%s: %6d: '%s'\n", $wgDBname, $row->user_id, $row->user_name );
+ $out = sprintf( "%s: %6d: '%s'\n", wfWikiID(), $row->user_id, $row->user_name );
fwrite( $this->stderr, $out );
fwrite( $this->log, $out );
}
diff --git a/maintenance/cleanupDupes.inc b/maintenance/cleanupDupes.inc
index 18daab08..5db6bb39 100644
--- a/maintenance/cleanupDupes.inc
+++ b/maintenance/cleanupDupes.inc
@@ -113,19 +113,18 @@ END
}
function checkDupes( $fixthem = false, $indexonly = false ) {
- global $wgDBname;
$dbw =& wfGetDB( DB_MASTER );
if( $dbw->indexExists( 'cur', 'name_title' ) &&
$dbw->indexUnique( 'cur', 'name_title' ) ) {
- echo "$wgDBname: cur table has the current unique index; no duplicate entries.\n";
+ echo wfWikiID().": cur table has the current unique index; no duplicate entries.\n";
} elseif( $dbw->indexExists( 'cur', 'name_title_dup_prevention' ) ) {
- echo "$wgDBname: cur table has a temporary name_title_dup_prevention unique index; no duplicate entries.\n";
+ echo wfWikiID().": cur table has a temporary name_title_dup_prevention unique index; no duplicate entries.\n";
} else {
- echo "$wgDBname: cur table has the old non-unique index and may have duplicate entries.\n";
+ echo wfWikiID().": cur table has the old non-unique index and may have duplicate entries.\n";
if( !$indexonly ) {
fixDupes( $fixthem );
}
}
}
-?> \ No newline at end of file
+?>
diff --git a/maintenance/cleanupImages.php b/maintenance/cleanupImages.php
new file mode 100644
index 00000000..8ae5561a
--- /dev/null
+++ b/maintenance/cleanupImages.php
@@ -0,0 +1,168 @@
+<?php
+/*
+ * Script to clean up broken, unparseable upload filenames.
+ *
+ * Usage: php cleanupImages.php [--fix]
+ * Options:
+ * --fix Actually clean up titles; otherwise just checks for them
+ *
+ * Copyright (C) 2005-2006 Brion Vibber <brion@pobox.com>
+ * http://www.mediawiki.org/
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @author Brion Vibber <brion at pobox.com>
+ * @package MediaWiki
+ * @subpackage maintenance
+ */
+
+require_once( 'commandLine.inc' );
+require_once( 'cleanupTable.inc' );
+
+class ImageCleanup extends TableCleanup {
+ function __construct( $dryrun = false ) {
+ parent::__construct( 'image', $dryrun );
+ }
+
+ function processPage( $row ) {
+ global $wgContLang;
+
+ $source = $row->img_name;
+ if( $source == '' ) {
+ // Ye olde empty rows. Just kill them.
+ $this->killRow( $source );
+ return $this->progress( 1 );
+ }
+
+ $cleaned = $source;
+
+ // About half of old bad image names have percent-codes
+ $cleaned = rawurldecode( $cleaned );
+
+ // Some are old latin-1
+ $cleaned = $wgContLang->checkTitleEncoding( $cleaned );
+
+ // Many of remainder look like non-normalized unicode
+ $cleaned = UtfNormal::cleanUp( $cleaned );
+
+ $title = Title::makeTitleSafe( NS_IMAGE, $cleaned );
+
+ if( is_null( $title ) ) {
+ $this->log( "page $source ($cleaned) is illegal." );
+ $safe = $this->buildSafeTitle( $cleaned );
+ $this->pokeFile( $source, $safe );
+ return $this->progress( 1 );
+ }
+
+ if( $title->getDbKey() !== $source ) {
+ $munged = $title->getDbKey();
+ $this->log( "page $source ($munged) doesn't match self." );
+ $this->pokeFile( $source, $munged );
+ return $this->progress( 1 );
+ }
+
+ $this->progress( 0 );
+ }
+
+ function killRow( $name ) {
+ if( $this->dryrun ) {
+ $this->log( "DRY RUN: would delete bogus row '$name'" );
+ } else {
+ $this->log( "deleting bogus row '$name'" );
+ $db = wfGetDB( DB_MASTER );
+ $db->delete( 'image',
+ array( 'img_name' => $name ),
+ __METHOD__ );
+ }
+ }
+
+ function filePath( $name ) {
+ return wfImageDir( $name ) . "/$name";
+ }
+
+ function pokeFile( $orig, $new ) {
+ $path = $this->filePath( $orig );
+ if( !file_exists( $path ) ) {
+ $this->log( "missing file: $path" );
+ return $this->killRow( $orig );
+ }
+
+ $db = wfGetDB( DB_MASTER );
+ $version = 0;
+ $final = $new;
+
+ while( $db->selectField( 'image', 'img_name',
+ array( 'img_name' => $final ), __METHOD__ ) ) {
+ $this->log( "Rename conflicts with '$final'..." );
+ $version++;
+ $final = $this->appendTitle( $new, "_$version" );
+ }
+
+ $finalPath = $this->filePath( $final );
+
+ if( $this->dryrun ) {
+ $this->log( "DRY RUN: would rename $path to $finalPath" );
+ } else {
+ $this->log( "renaming $path to $finalPath" );
+ $db->begin();
+ $db->update( 'image',
+ array( 'img_name' => $final ),
+ array( 'img_name' => $orig ),
+ __METHOD__ );
+ $dir = dirname( $finalPath );
+ if( !file_exists( $dir ) ) {
+ if( !mkdir( $dir, 0777, true ) ) {
+ $this->log( "RENAME FAILED, COULD NOT CREATE $dir" );
+ $db->rollback();
+ return;
+ }
+ }
+ if( rename( $path, $finalPath ) ) {
+ $db->commit();
+ } else {
+ $this->log( "RENAME FAILED" );
+ $db->rollback();
+ }
+ }
+ }
+
+ function appendTitle( $name, $suffix ) {
+ return preg_replace( '/^(.*)(\..*?)$/',
+ "\\1$suffix\\2", $name );
+ }
+
+ function buildSafeTitle( $name ) {
+ global $wgLegalTitleChars;
+ $x = preg_replace_callback(
+ "/([^$wgLegalTitleChars])/",
+ array( $this, 'hexChar' ),
+ $name );
+
+ $test = Title::makeTitleSafe( NS_IMAGE, $x );
+ if( is_null( $test ) || $test->getDbKey() !== $x ) {
+ $this->log( "Unable to generate safe title from '$name', got '$x'" );
+ return false;
+ }
+
+ return $x;
+ }
+}
+
+$wgUser->setName( 'Conversion script' );
+$caps = new ImageCleanup( !isset( $options['fix'] ) );
+$caps->cleanup();
+
+?>
diff --git a/maintenance/cleanupTable.inc b/maintenance/cleanupTable.inc
new file mode 100644
index 00000000..cc551bce
--- /dev/null
+++ b/maintenance/cleanupTable.inc
@@ -0,0 +1,86 @@
+<?php
+
+require_once( 'FiveUpgrade.inc' );
+
+abstract class TableCleanup extends FiveUpgrade {
+ function __construct( $table, $dryrun = false ) {
+ parent::__construct();
+
+ $this->targetTable = $table;
+ $this->maxLag = 10; # if slaves are lagged more than 10 secs, wait
+ $this->dryrun = $dryrun;
+ }
+
+ function cleanup() {
+ if( $this->dryrun ) {
+ echo "Checking for bad titles...\n";
+ } else {
+ echo "Checking and fixing bad titles...\n";
+ }
+ $this->runTable( $this->targetTable,
+ '', //'WHERE page_namespace=0',
+ array( $this, 'processPage' ) );
+ }
+
+ function init( $count, $table ) {
+ $this->processed = 0;
+ $this->updated = 0;
+ $this->count = $count;
+ $this->startTime = wfTime();
+ $this->table = $table;
+ }
+
+ function progress( $updated ) {
+ $this->updated += $updated;
+ $this->processed++;
+ if( $this->processed % 100 != 0 ) {
+ return;
+ }
+ $portion = $this->processed / $this->count;
+ $updateRate = $this->updated / $this->processed;
+
+ $now = wfTime();
+ $delta = $now - $this->startTime;
+ $estimatedTotalTime = $delta / $portion;
+ $eta = $this->startTime + $estimatedTotalTime;
+
+ printf( "%s %s: %6.2f%% done on %s; ETA %s [%d/%d] %.2f/sec <%.2f%% updated>\n",
+ wfWikiID(),
+ wfTimestamp( TS_DB, intval( $now ) ),
+ $portion * 100.0,
+ $this->table,
+ wfTimestamp( TS_DB, intval( $eta ) ),
+ $this->processed,
+ $this->count,
+ $this->processed / $delta,
+ $updateRate * 100.0 );
+ flush();
+ }
+
+ function runTable( $table, $where, $callback ) {
+ $fname = 'CapsCleanup::buildTable';
+
+ $count = $this->dbw->selectField( $table, 'count(*)', '', $fname );
+ $this->init( $count, $table );
+ $this->log( "Processing $table..." );
+
+ $tableName = $this->dbr->tableName( $table );
+ $sql = "SELECT * FROM $tableName $where";
+ $result = $this->dbr->query( $sql, $fname );
+
+ while( $row = $this->dbr->fetchObject( $result ) ) {
+ $updated = call_user_func( $callback, $row );
+ }
+ $this->log( "Finished $table... $this->updated of $this->processed rows updated" );
+ $this->dbr->freeResult( $result );
+ }
+
+ function hexChar( $matches ) {
+ return sprintf( "\\x%02x", ord( $matches[1] ) );
+ }
+
+ abstract function processPage( $row );
+
+}
+
+?>
diff --git a/maintenance/cleanupTitles.php b/maintenance/cleanupTitles.php
index 930072de..12e07b67 100644
--- a/maintenance/cleanupTitles.php
+++ b/maintenance/cleanupTitles.php
@@ -2,9 +2,9 @@
/*
* Script to clean up broken, unparseable titles.
*
- * Usage: php cleanupTitles.php [--dry-run]
+ * Usage: php cleanupTitles.php [--fix]
* Options:
- * --dry-run don't actually try moving them
+ * --fix Actually clean up titles; otherwise just checks for them
*
* Copyright (C) 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
@@ -29,77 +29,12 @@
* @subpackage maintenance
*/
-$options = array( 'dry-run' );
-
require_once( 'commandLine.inc' );
-require_once( 'FiveUpgrade.inc' );
-
-class TitleCleanup extends FiveUpgrade {
- function TitleCleanup( $dryrun = false ) {
- parent::FiveUpgrade();
-
- $this->maxLag = 10; # if slaves are lagged more than 10 secs, wait
- $this->dryrun = $dryrun;
- }
-
- function cleanup() {
- $this->runTable( 'page',
- '', //'WHERE page_namespace=0',
- array( &$this, 'processPage' ) );
- }
+require_once( 'cleanupTable.inc' );
- function init( $count, $table ) {
- $this->processed = 0;
- $this->updated = 0;
- $this->count = $count;
- $this->startTime = wfTime();
- $this->table = $table;
- }
-
- function progress( $updated ) {
- $this->updated += $updated;
- $this->processed++;
- if( $this->processed % 100 != 0 ) {
- return;
- }
- $portion = $this->processed / $this->count;
- $updateRate = $this->updated / $this->processed;
-
- $now = wfTime();
- $delta = $now - $this->startTime;
- $estimatedTotalTime = $delta / $portion;
- $eta = $this->startTime + $estimatedTotalTime;
-
- global $wgDBname;
- printf( "%s %s: %6.2f%% done on %s; ETA %s [%d/%d] %.2f/sec <%.2f%% updated>\n",
- $wgDBname,
- wfTimestamp( TS_DB, intval( $now ) ),
- $portion * 100.0,
- $this->table,
- wfTimestamp( TS_DB, intval( $eta ) ),
- $this->processed,
- $this->count,
- $this->processed / $delta,
- $updateRate * 100.0 );
- flush();
- }
-
- function runTable( $table, $where, $callback ) {
- $fname = 'CapsCleanup::buildTable';
-
- $count = $this->dbw->selectField( $table, 'count(*)', '', $fname );
- $this->init( $count, 'page' );
- $this->log( "Processing $table..." );
-
- $tableName = $this->dbr->tableName( $table );
- $sql = "SELECT * FROM $tableName $where";
- $result = $this->dbr->query( $sql, $fname );
-
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $updated = call_user_func( $callback, $row );
- }
- $this->log( "Finished $table... $this->updated of $this->processed rows updated" );
- $this->dbr->freeResult( $result );
+class TitleCleanup extends TableCleanup {
+ function __construct( $dryrun = false ) {
+ parent::__construct( 'page', $dryrun );
}
function processPage( $row ) {
@@ -197,14 +132,10 @@ class TitleCleanup extends FiveUpgrade {
$linkCache->clear();
}
}
-
- function hexChar( $matches ) {
- return sprintf( "\\x%02x", ord( $matches[1] ) );
- }
}
$wgUser->setName( 'Conversion script' );
-$caps = new TitleCleanup( isset( $options['dry-run'] ) );
+$caps = new TitleCleanup( !isset( $options['fix'] ) );
$caps->cleanup();
?>
diff --git a/maintenance/cleanupWatchlist.php b/maintenance/cleanupWatchlist.php
index d2925db3..027859a4 100644
--- a/maintenance/cleanupWatchlist.php
+++ b/maintenance/cleanupWatchlist.php
@@ -70,9 +70,8 @@ class WatchlistCleanup extends FiveUpgrade {
$estimatedTotalTime = $delta / $portion;
$eta = $this->startTime + $estimatedTotalTime;
- global $wgDBname;
printf( "%s %s: %6.2f%% done on %s; ETA %s [%d/%d] %.2f/sec <%.2f%% updated>\n",
- $wgDBname,
+ wfWikiID(),
wfTimestamp( TS_DB, intval( $now ) ),
$portion * 100.0,
$this->table,
diff --git a/maintenance/commandLine.inc b/maintenance/commandLine.inc
index 2bb5389e..2549057e 100644
--- a/maintenance/commandLine.inc
+++ b/maintenance/commandLine.inc
@@ -28,16 +28,15 @@ if ( !isset( $optionsWithArgs ) ) {
$optionsWithArgs[] = 'conf'; # For specifying the location of LocalSettings.php
$self = array_shift( $argv );
-$self = __FILE__;
-$IP = realpath( dirname( $self ) . '/..' );
+$IP = realpath( dirname( __FILE__ ) . '/..' );
#chdir( $IP );
+require_once( "$IP/StartProfiler.php" );
$options = array();
$args = array();
# Parse arguments
-
for( $arg = reset( $argv ); $arg !== false; $arg = next( $argv ) ) {
if ( $arg == '--' ) {
# End of options, remainder should be considered arguments
@@ -133,6 +132,7 @@ if ( file_exists( '/home/wikipedia/common/langlist' ) ) {
# This is for the IRC scripts, which now run as the apache user
# The apache user doesn't have access to the wikiadmin_pass command
if ( $_ENV['USER'] == 'apache' ) {
+ #if ( posix_geteuid() == 48 ) {
$wgUseNormalUser = true;
}
@@ -141,7 +141,7 @@ if ( file_exists( '/home/wikipedia/common/langlist' ) ) {
$DP = $IP;
ini_set( 'include_path', ".:$IP:$IP/includes:$IP/languages:$IP/maintenance" );
- require_once( $IP.'/includes/ProfilerStub.php' );
+ #require_once( $IP.'/includes/ProfilerStub.php' );
require_once( $IP.'/includes/Defines.php' );
require_once( $IP.'/CommonSettings.php' );
@@ -168,7 +168,7 @@ if ( file_exists( '/home/wikipedia/common/langlist' ) ) {
}
$wgCommandLineMode = true;
$DP = $IP;
- require_once( $IP.'/includes/ProfilerStub.php' );
+ #require_once( $IP.'/includes/ProfilerStub.php' );
require_once( $IP.'/includes/Defines.php' );
require_once( $settingsFile );
ini_set( 'include_path', ".$sep$IP$sep$IP/includes$sep$IP/languages$sep$IP/maintenance" );
@@ -202,9 +202,11 @@ if ( defined( 'MW_CMDLINE_CALLBACK' ) ) {
ini_set( 'memory_limit', -1 );
+$wgShowSQLErrors = true;
+
require_once( 'Setup.php' );
require_once( 'install-utils.inc' );
-$wgTitle = Title::newFromText( 'Command line script' );
+$wgTitle = null; # Much much faster startup than creating a title object
set_time_limit(0);
// --------------------------------------------------------------------
diff --git a/maintenance/convertLinks.inc b/maintenance/convertLinks.inc
index f0d2c439..5f8c27a5 100644
--- a/maintenance/convertLinks.inc
+++ b/maintenance/convertLinks.inc
@@ -8,8 +8,8 @@
/** */
function convertLinks() {
global $wgDBtype;
- if( $wgDBtype == 'PostgreSQL' ) {
- print "Links table already ok on PostgreSQL.\n";
+ if( $wgDBtype == 'postgres' ) {
+ print "Links table already ok on Postgres.\n";
return;
}
diff --git a/maintenance/createAndPromote.php b/maintenance/createAndPromote.php
index df29c114..43ddcdd1 100644
--- a/maintenance/createAndPromote.php
+++ b/maintenance/createAndPromote.php
@@ -18,8 +18,7 @@ if( !count( $args ) == 2 ) {
$username = $args[0];
$password = $args[1];
-global $wgDBname;
-echo( "{$wgDBname}: Creating and promoting User:{$username}..." );
+echo( wfWikiID() . ": Creating and promoting User:{$username}..." );
# Validate username and check it doesn't exist
$user = User::newFromName( $username );
@@ -45,4 +44,4 @@ $ssu->doUpdate();
echo( "done.\n" );
-?> \ No newline at end of file
+?>
diff --git a/maintenance/deleteBatch.php b/maintenance/deleteBatch.php
index 697dffd7..234744c3 100644
--- a/maintenance/deleteBatch.php
+++ b/maintenance/deleteBatch.php
@@ -71,9 +71,13 @@ for ( $linenum = 1; !feof( $file ); $linenum++ ) {
} else {
$art = new Article( $page );
}
- $art->doDelete( $reason );
+ $success = $art->doDeleteArticle( $reason );
$dbw->immediateCommit();
- print "\n";
+ if ( $success ) {
+ print "\n";
+ } else {
+ print " FAILED\n";
+ }
if ( $interval ) {
sleep( $interval );
diff --git a/maintenance/deleteImageMemcached.php b/maintenance/deleteImageMemcached.php
index 4e17d21e..6af0e3a9 100644
--- a/maintenance/deleteImageMemcached.php
+++ b/maintenance/deleteImageMemcached.php
@@ -14,7 +14,7 @@ class DeleteImageCache {
}
function main() {
- global $wgMemc, $wgDBname;
+ global $wgMemc;
$fname = 'DeleteImageCache::main';
ini_set( 'display_errors', false );
@@ -32,9 +32,9 @@ class DeleteImageCache {
while ( $row = $dbr->fetchObject( $res ) ) {
if ($i % $this->report == 0)
- printf("%s: %13s done (%s)\n", $wgDBname, "$i/$total", wfPercent( $i / $total * 100 ));
+ printf("%s: %13s done (%s)\n", wfWikiID(), "$i/$total", wfPercent( $i / $total * 100 ));
$md5 = md5( $row->img_name );
- $wgMemc->delete( "$wgDBname:Image:$md5" );
+ $wgMemc->delete( wfMemcKey( 'Image', $md5 ) );
if ($this->sleep != 0)
usleep( $this->sleep );
diff --git a/maintenance/deleteRevision.php b/maintenance/deleteRevision.php
index e7d005b6..eb65e234 100644
--- a/maintenance/deleteRevision.php
+++ b/maintenance/deleteRevision.php
@@ -8,7 +8,7 @@ if ( count( $args ) == 0 ) {
exit(1);
}
-echo "Deleting revision(s) " . implode( ',', $args ) . " from $wgDBname...\n";
+echo "Deleting revision(s) " . implode( ',', $args ) . " from ".wfWikiID()."...\n";
$affected = 0;
foreach ( $args as $revID ) {
diff --git a/maintenance/dumpBackup.php b/maintenance/dumpBackup.php
index 1735422d..ef5d47c9 100644
--- a/maintenance/dumpBackup.php
+++ b/maintenance/dumpBackup.php
@@ -57,12 +57,12 @@ if( isset( $options['end'] ) ) {
$dumper->skipHeader = isset( $options['skip-header'] );
$dumper->skipFooter = isset( $options['skip-footer'] );
-$textMode = isset( $options['stub'] ) ? MW_EXPORT_STUB : MW_EXPORT_TEXT;
+$textMode = isset( $options['stub'] ) ? WikiExporter::STUB : WikiExporter::TEXT;
if( isset( $options['full'] ) ) {
- $dumper->dump( MW_EXPORT_FULL, $textMode );
+ $dumper->dump( WikiExporter::FULL, $textMode );
} elseif( isset( $options['current'] ) ) {
- $dumper->dump( MW_EXPORT_CURRENT, $textMode );
+ $dumper->dump( WikiExporter::CURRENT, $textMode );
} else {
$dumper->progress( <<<END
This script dumps the wiki page database into an XML interchange wrapper
diff --git a/maintenance/dumpHTML.inc b/maintenance/dumpHTML.inc
index 2ed1e4a2..ca2a62dc 100644
--- a/maintenance/dumpHTML.inc
+++ b/maintenance/dumpHTML.inc
@@ -14,6 +14,9 @@ class DumpHTML {
# Destination directory
var $dest;
+ # Skip existing files
+ var $noOverwrite = false;
+
# Show interlanguage links?
var $interwiki = true;
@@ -21,7 +24,10 @@ class DumpHTML {
var $depth = 3;
# Directory that commons images are copied into
- var $sharedStaticPath;
+ var $sharedStaticDirectory;
+
+ # Directory that the images are in, after copying
+ var $destUploadDirectory;
# Relative path to image directory
var $imageRel = 'upload';
@@ -29,6 +35,9 @@ class DumpHTML {
# Copy commons images instead of symlinking
var $forceCopy = false;
+ # Make a copy of all images encountered
+ var $makeSnapshot = false;
+
# Make links assuming the script path is in the same directory as
# the destination
var $alternateScriptPath = false;
@@ -39,42 +48,132 @@ class DumpHTML {
# Has setupGlobals been called?
var $setupDone = false;
+ # Has to compress html pages
+ var $compress = false;
+
# List of raw pages used in the current article
var $rawPages;
-
+
# Skin to use
- var $skin = 'dumphtml';
+ var $skin = 'htmldump';
+
+ # Checkpoint stuff
+ var $checkpointFile = false, $checkpoints = false;
+
+ var $startID = 1, $endID = false;
+
+ var $sliceNumerator = 1, $sliceDenominator = 1;
+
+ # Max page ID, lazy initialised
+ var $maxPageID = false;
- function DumpHTML( $settings ) {
+ function DumpHTML( $settings = array() ) {
foreach ( $settings as $var => $value ) {
$this->$var = $value;
}
}
+ function loadCheckpoints() {
+ if ( $this->checkpoints !== false ) {
+ return true;
+ } elseif ( !$this->checkpointFile ) {
+ return false;
+ } else {
+ $lines = @file( $this->checkpointFile );
+ if ( $lines === false ) {
+ print "Starting new checkpoint file \"{$this->checkpointFile}\"\n";
+ $this->checkpoints = array();
+ } else {
+ $lines = array_map( 'trim', $lines );
+ $this->checkpoints = array();
+ foreach ( $lines as $line ) {
+ list( $name, $value ) = explode( '=', $line, 2 );
+ $this->checkpoints[$name] = $value;
+ }
+ }
+ return true;
+ }
+ }
+
+ function getCheckpoint( $type, $defValue = false ) {
+ if ( !$this->loadCheckpoints() ) {
+ return false;
+ }
+ if ( !isset( $this->checkpoints[$type] ) ) {
+ return false;
+ } else {
+ return $this->checkpoints[$type];
+ }
+ }
+
+ function setCheckpoint( $type, $value ) {
+ if ( !$this->checkpointFile ) {
+ return;
+ }
+ $this->checkpoints[$type] = $value;
+ $blob = '';
+ foreach ( $this->checkpoints as $type => $value ) {
+ $blob .= "$type=$value\n";
+ }
+ file_put_contents( $this->checkpointFile, $blob );
+ }
+
+ function doEverything() {
+ if ( $this->getCheckpoint( 'everything' ) == 'done' ) {
+ print "Checkpoint says everything is already done\n";
+ return;
+ }
+ $this->doArticles();
+ $this->doLocalImageDescriptions();
+ $this->doSharedImageDescriptions();
+ $this->doCategories();
+ $this->doRedirects();
+ if ( $this->sliceNumerator == 1 ) {
+ $this->doSpecials();
+ }
+
+ $this->setCheckpoint( 'everything', 'done' );
+ }
+
/**
* Write a set of articles specified by start and end page_id
* Skip categories and images, they will be done separately
*/
- function doArticles( $start, $end = false ) {
- $fname = 'DumpHTML::doArticles';
+ function doArticles() {
+ if ( $this->endID === false ) {
+ $end = $this->getMaxPageID();
+ } else {
+ $end = $this->endID;
+ }
+ $start = $this->startID;
+
+ # Start from the checkpoint
+ $cp = $this->getCheckpoint( 'article' );
+ if ( $cp == 'done' ) {
+ print "Articles already done\n";
+ return;
+ } elseif ( $cp !== false ) {
+ $start = $cp;
+ print "Resuming article dump from checkpoint at page_id $start of $end\n";
+ } else {
+ print "Starting from page_id $start of $end\n";
+ }
- $this->setupGlobals();
+ # Move the start point to the correct slice if it isn't there already
+ $start = $this->modSliceStart( $start );
- if ( $end === false ) {
- $dbr =& wfGetDB( DB_SLAVE );
- $end = $dbr->selectField( 'page', 'max(page_id)', false, $fname );
- }
+ $this->setupGlobals();
$mainPageObj = Title::newMainPage();
$mainPage = $mainPageObj->getPrefixedDBkey();
-
- for ($id = $start; $id <= $end; $id++) {
+ for ( $id = $start, $i = 0; $id <= $end; $id += $this->sliceDenominator, $i++ ) {
wfWaitForSlaves( 20 );
- if ( !($id % REPORTING_INTERVAL) ) {
+ if ( !( $i % REPORTING_INTERVAL) ) {
print "Processing ID: $id\r";
+ $this->setCheckpoint( 'article', $id );
}
- if ( !($id % (REPORTING_INTERVAL*10) ) ) {
+ if ( !($i % (REPORTING_INTERVAL*10) ) ) {
print "\n";
}
$title = Title::newFromID( $id );
@@ -85,6 +184,7 @@ class DumpHTML {
}
}
}
+ $this->setCheckpoint( 'article', 'done' );
print "\n";
}
@@ -107,6 +207,11 @@ class DumpHTML {
$title = Title::newMainPage();
$text = $this->getArticleHTML( $title );
+
+ # Parse the XHTML to find the images
+ $images = $this->findImages( $text );
+ $this->copyImages( $images );
+
$file = fopen( "{$this->dest}/index.html", "w" );
if ( !$file ) {
print "\nCan't open index.html for writing\n";
@@ -118,49 +223,98 @@ class DumpHTML {
}
function doImageDescriptions() {
+ $this->doLocalImageDescriptions();
+ $this->doSharedImageDescriptions();
+ }
+
+ /**
+ * Dump image description pages that don't have an associated article, but do
+ * have a local image
+ */
+ function doLocalImageDescriptions() {
global $wgSharedUploadDirectory;
+ $chunkSize = 1000;
- $fname = 'DumpHTML::doImageDescriptions';
+ $dbr =& wfGetDB( DB_SLAVE );
+
+ $cp = $this->getCheckpoint( 'local image' );
+ if ( $cp == 'done' ) {
+ print "Local image descriptions already done\n";
+ return;
+ } elseif ( $cp !== false ) {
+ print "Writing image description pages starting from $cp\n";
+ $conds = array( 'img_name >= ' . $dbr->addQuotes( $cp ) );
+ } else {
+ print "Writing image description pages for local images\n";
+ $conds = false;
+ }
$this->setupGlobals();
+ $i = 0;
- /**
- * Dump image description pages that don't have an associated article, but do
- * have a local image
- */
- $dbr =& wfGetDB( DB_SLAVE );
- extract( $dbr->tableNames( 'image', 'page' ) );
- $res = $dbr->select( 'image', array( 'img_name' ), false, $fname );
+ do {
+ $res = $dbr->select( 'image', array( 'img_name' ), $conds, __METHOD__,
+ array( 'ORDER BY' => 'img_name', 'LIMIT' => $chunkSize ) );
+ $numRows = $dbr->numRows( $res );
+
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ # Update conds for the next chunk query
+ $conds = array( 'img_name > ' . $dbr->addQuotes( $row->img_name ) );
+
+ // Slice the result set with a filter
+ if ( !$this->sliceFilter( $row->img_name ) ) {
+ continue;
+ }
- $i = 0;
- print "Writing image description pages for local images\n";
- $num = $dbr->numRows( $res );
- while ( $row = $dbr->fetchObject( $res ) ) {
- wfWaitForSlaves( 10 );
- if ( !( ++$i % REPORTING_INTERVAL ) ) {
- print "Done $i of $num\r";
- }
- $title = Title::makeTitle( NS_IMAGE, $row->img_name );
- if ( $title->getArticleID() ) {
- // Already done by dumpHTML
- continue;
+ wfWaitForSlaves( 10 );
+ if ( !( ++$i % REPORTING_INTERVAL ) ) {
+ print "{$row->img_name}\n";
+ if ( $row->img_name !== 'done' ) {
+ $this->setCheckpoint( 'local image', $row->img_name );
+ }
+ }
+ $title = Title::makeTitle( NS_IMAGE, $row->img_name );
+ if ( $title->getArticleID() ) {
+ // Already done by dumpHTML
+ continue;
+ }
+ $this->doArticle( $title );
}
- $this->doArticle( $title );
- }
+ $dbr->freeResult( $res );
+ } while ( $numRows );
+
+ $this->setCheckpoint( 'local image', 'done' );
print "\n";
+ }
+
+ /**
+ * Dump images which only have a real description page on commons
+ */
+ function doSharedImageDescriptions() {
+ list( $start, $end ) = $this->sliceRange( 0, 255 );
+
+ $cp = $this->getCheckpoint( 'shared image' );
+ if ( $cp == 'done' ) {
+ print "Shared description pages already done\n";
+ return;
+ } elseif ( $cp !== false ) {
+ print "Writing description pages for commons images starting from directory $cp/255\n";
+ $start = $cp;
+ } else {
+ print "Writing description pages for commons images\n";
+ }
- /**
- * Dump images which only have a real description page on commons
- */
- print "Writing description pages for commons images\n";
+ $this->setupGlobals();
$i = 0;
- for ( $hash = 0; $hash < 256; $hash++ ) {
+ for ( $hash = $start; $hash <= $end; $hash++ ) {
+ $this->setCheckpoint( 'shared image', $hash );
+
$dir = sprintf( "%01x/%02x", intval( $hash / 16 ), $hash );
- $paths = array_merge( glob( "{$this->sharedStaticPath}/$dir/*" ),
- glob( "{$this->sharedStaticPath}/thumb/$dir/*" ) );
+ $paths = array_merge( glob( "{$this->sharedStaticDirectory}/$dir/*" ),
+ glob( "{$this->sharedStaticDirectory}/thumb/$dir/*" ) );
foreach ( $paths as $path ) {
- $file = basename( $path );
+ $file = wfBaseName( $path );
if ( !(++$i % REPORTING_INTERVAL ) ) {
print "$i\r";
}
@@ -169,49 +323,106 @@ class DumpHTML {
$this->doArticle( $title );
}
}
+ $this->setCheckpoint( 'shared image', 'done' );
print "\n";
}
function doCategories() {
- $fname = 'DumpHTML::doCategories';
+ $chunkSize = 1000;
+
$this->setupGlobals();
-
$dbr =& wfGetDB( DB_SLAVE );
- print "Selecting categories...";
- $sql = 'SELECT DISTINCT cl_to FROM ' . $dbr->tableName( 'categorylinks' );
- $res = $dbr->query( $sql, $fname );
+
+ $cp = $this->getCheckpoint( 'category' );
+ if ( $cp == 'done' ) {
+ print "Category pages already done\n";
+ return;
+ } elseif ( $cp !== false ) {
+ print "Resuming category page dump from $cp\n";
+ $conds = array( 'cl_to >= ' . $dbr->addQuotes( $cp ) );
+ } else {
+ print "Starting category pages\n";
+ $conds = false;
+ }
- print "\nWriting " . $dbr->numRows( $res ). " category pages\n";
$i = 0;
- while ( $row = $dbr->fetchObject( $res ) ) {
- wfWaitForSlaves( 10 );
- if ( !(++$i % REPORTING_INTERVAL ) ) {
- print "$i\r";
+ do {
+ $res = $dbr->select( 'categorylinks', 'DISTINCT cl_to', $conds, __METHOD__,
+ array( 'ORDER BY' => 'cl_to', 'LIMIT' => $chunkSize ) );
+ $numRows = $dbr->numRows( $res );
+
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ // Set conditions for next chunk
+ $conds = array( 'cl_to > ' . $dbr->addQuotes( $row->cl_to ) );
+
+ // Filter pages from other slices
+ if ( !$this->sliceFilter( $row->cl_to ) ) {
+ continue;
+ }
+
+ wfWaitForSlaves( 10 );
+ if ( !(++$i % REPORTING_INTERVAL ) ) {
+ print "{$row->cl_to}\n";
+ if ( $row->cl_to != 'done' ) {
+ $this->setCheckpoint( 'category', $row->cl_to );
+ }
+ }
+ $title = Title::makeTitle( NS_CATEGORY, $row->cl_to );
+ $this->doArticle( $title );
}
- $title = Title::makeTitle( NS_CATEGORY, $row->cl_to );
- $this->doArticle( $title );
- }
+ $dbr->freeResult( $res );
+ } while ( $numRows );
+
+ $this->setCheckpoint( 'category', 'done' );
print "\n";
}
function doRedirects() {
print "Doing redirects...\n";
- $fname = 'DumpHTML::doRedirects';
+
+ $chunkSize = 10000;
+ $end = $this->getMaxPageID();
+ $cp = $this->getCheckpoint( 'redirect' );
+ if ( $cp == 'done' ) {
+ print "Redirects already done\n";
+ return;
+ } elseif ( $cp !== false ) {
+ print "Resuming redirect generation from page_id $cp\n";
+ $start = intval( $cp );
+ } else {
+ $start = 1;
+ }
+
$this->setupGlobals();
$dbr =& wfGetDB( DB_SLAVE );
-
- $res = $dbr->select( 'page', array( 'page_namespace', 'page_title' ),
- array( 'page_is_redirect' => 1 ), $fname );
- $num = $dbr->numRows( $res );
- print "$num redirects to do...\n";
$i = 0;
- while ( $row = $dbr->fetchObject( $res ) ) {
- $title = Title::makeTitle( $row->page_namespace, $row->page_title );
- if ( !(++$i % (REPORTING_INTERVAL*10) ) ) {
- print "Done $i of $num\n";
- }
- $this->doArticle( $title );
+
+ for ( $chunkStart = $start; $chunkStart <= $end; $chunkStart += $chunkSize ) {
+ $chunkEnd = min( $end, $chunkStart + $chunkSize - 1 );
+ $conds = array(
+ 'page_is_redirect' => 1,
+ "page_id BETWEEN $chunkStart AND $chunkEnd"
+ );
+ # Modulo slicing in SQL
+ if ( $this->sliceDenominator != 1 ) {
+ $n = intval( $this->sliceNumerator );
+ $m = intval( $this->sliceDenominator );
+ $conds[] = "page_id % $m = $n";
+ }
+ $res = $dbr->select( 'page', array( 'page_id', 'page_namespace', 'page_title' ),
+ $conds, __METHOD__ );
+
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ $title = Title::makeTitle( $row->page_namespace, $row->page_title );
+ if ( !(++$i % (REPORTING_INTERVAL*10) ) ) {
+ printf( "Done %d redirects (%2.3f%%)\n", $i, $row->page_id / $end * 100 );
+ $this->setCheckpoint( 'redirect', $row->page_id );
+ }
+ $this->doArticle( $title );
+ }
+ $dbr->freeResult( $res );
}
+ $this->setCheckpoint( 'redirect', 'done' );
}
/** Write an article specified by title */
@@ -219,6 +430,13 @@ class DumpHTML {
global $wgTitle, $wgSharedUploadPath, $wgSharedUploadDirectory;
global $wgUploadDirectory;
+ if ( $this->noOverwrite ) {
+ $fileName = $this->dest.'/'.$this->getHashedFilename( $title );
+ if ( file_exists( $fileName ) ) {
+ return;
+ }
+ }
+
$this->rawPages = array();
$text = $this->getArticleHTML( $title );
@@ -263,11 +481,19 @@ class DumpHTML {
$fullName = "{$this->dest}/$filename";
$fullDir = dirname( $fullName );
+ if ( $this->compress ) {
+ $fullName .= ".gz";
+ $text = gzencode( $text, 9 );
+ }
+
wfMkdirParents( $fullDir, 0755 );
+ wfSuppressWarnings();
$file = fopen( $fullName, 'w' );
+ wfRestoreWarnings();
+
if ( !$file ) {
- print("Can't open file $fullName for writing\n");
+ die("Can't open file '$fullName' for writing.\nCheck permissions or use another destination (-d).\n");
return;
}
@@ -281,13 +507,16 @@ class DumpHTML {
global $wgUploadPath, $wgLogo, $wgMaxCredits, $wgSharedUploadPath;
global $wgHideInterlanguageLinks, $wgUploadDirectory, $wgThumbnailScriptPath;
global $wgSharedThumbnailScriptPath, $wgEnableParserCache, $wgHooks, $wgServer;
- global $wgRightsUrl, $wgRightsText, $wgCopyrightIcon;
+ global $wgRightsUrl, $wgRightsText, $wgCopyrightIcon, $wgEnableSidebarCache;
+ global $wgGenerateThumbnailOnParse;
static $oldLogo = NULL;
if ( !$this->setupDone ) {
$wgHooks['GetLocalURL'][] =& $this;
$wgHooks['GetFullURL'][] =& $this;
+ $wgHooks['SiteNoticeBefore'][] =& $this;
+ $wgHooks['SiteNoticeAfter'][] =& $this;
$this->oldArticlePath = $wgServer . $wgArticlePath;
}
@@ -331,8 +560,6 @@ class DumpHTML {
$wgCopyrightIcon = str_replace( 'src="/images',
'src="' . htmlspecialchars( $wgScriptPath ) . '/images', $this->oldCopyrightIcon );
-
-
$wgStylePath = "$wgScriptPath/skins";
$wgUploadPath = "$wgScriptPath/{$this->imageRel}";
$wgSharedUploadPath = "$wgUploadPath/shared";
@@ -341,6 +568,8 @@ class DumpHTML {
$wgThumbnailScriptPath = $wgSharedThumbnailScriptPath = false;
$wgEnableParserCache = false;
$wgMathPath = "$wgScriptPath/math";
+ $wgEnableSidebarCache = false;
+ $wgGenerateThumbnailOnParse = true;
if ( !empty( $wgRightsText ) ) {
$wgRightsUrl = "$wgScriptPath/COPYING.html";
@@ -350,7 +579,14 @@ class DumpHTML {
$wgUser->setOption( 'skin', $this->skin );
$wgUser->setOption( 'editsection', 0 );
- $this->sharedStaticPath = "$wgUploadDirectory/shared";
+ if ( $this->makeSnapshot ) {
+ $this->destUploadDirectory = "{$this->dest}/{$this->imageRel}";
+ if ( realpath( $this->destUploadDirectory == $wgUploadDirectory ) ) {
+ $this->makeSnapshot = false;
+ }
+ }
+
+ $this->sharedStaticDirectory = "{$this->destUploadDirectory}/shared";
$this->setupDone = true;
}
@@ -391,6 +627,7 @@ class DumpHTML {
}
}
+
$sk =& $wgUser->getSkin();
ob_start();
$sk->outputPage( $wgOut );
@@ -431,66 +668,71 @@ ENDTEXT;
}
/**
+ * Copy a file specified by a URL to a given directory
+ *
+ * @param string $srcPath The source URL
+ * @param string $srcPathBase The base directory of the source URL
+ * @param string $srcDirBase The base filesystem directory of the source URL
+ * @param string $destDirBase The base filesystem directory of the destination URL
+ */
+ function relativeCopy( $srcPath, $srcPathBase, $srcDirBase, $destDirBase ) {
+ $rel = substr( $srcPath, strlen( $srcPathBase ) + 1 ); // +1 for slash
+ $sourceLoc = "$srcDirBase/$rel";
+ $destLoc = "$destDirBase/$rel";
+ #print "Copying $sourceLoc to $destLoc\n";
+ if ( !file_exists( $destLoc ) ) {
+ wfMkdirParents( dirname( $destLoc ), 0755 );
+ if ( function_exists( 'symlink' ) && !$this->forceCopy ) {
+ symlink( $sourceLoc, $destLoc );
+ } else {
+ copy( $sourceLoc, $destLoc );
+ }
+ }
+ }
+
+ /**
+ * Copy an image, and if it is a thumbnail, copy its parent image too
+ */
+ function copyImage( $srcPath, $srcPathBase, $srcDirBase, $destDirBase ) {
+ global $wgUploadPath, $wgUploadDirectory, $wgSharedUploadPath;
+ $this->relativeCopy( $srcPath, $srcPathBase, $srcDirBase, $destDirBase );
+ if ( substr( $srcPath, strlen( $srcPathBase ) + 1, 6 ) == 'thumb/' ) {
+ # The image was a thumbnail
+ # Copy the source image as well
+ $rel = substr( $srcPath, strlen( $srcPathBase ) + 1 );
+ $parts = explode( '/', $rel );
+ $rel = "{$parts[1]}/{$parts[2]}/{$parts[3]}";
+ $newSrc = "$srcPathBase/$rel";
+ $this->relativeCopy( $newSrc, $srcPathBase, $srcDirBase, $destDirBase );
+ }
+ }
+
+ /**
* Copy images (or create symlinks) from commons to a static directory.
* This is necessary even if you intend to distribute all of commons, because
* the directory contents is used to work out which image description pages
* are needed.
*
- * Also copies math images
+ * Also copies math images, and full-sized images if the makeSnapshot option
+ * is specified.
*
*/
function copyImages( $images ) {
- global $wgSharedUploadPath, $wgSharedUploadDirectory, $wgMathPath, $wgMathDirectory;
+ global $wgUploadPath, $wgUploadDirectory, $wgSharedUploadPath, $wgSharedUploadDirectory,
+ $wgMathPath, $wgMathDirectory;
# Find shared uploads and copy them into the static directory
$sharedPathLength = strlen( $wgSharedUploadPath );
$mathPathLength = strlen( $wgMathPath );
+ $uploadPathLength = strlen( $wgUploadPath );
foreach ( $images as $escapedImage => $dummy ) {
$image = urldecode( $escapedImage );
- # Is it shared?
if ( substr( $image, 0, $sharedPathLength ) == $wgSharedUploadPath ) {
- # Reconstruct full filename
- $rel = substr( $image, $sharedPathLength + 1 ); // +1 for slash
- $sourceLoc = "$wgSharedUploadDirectory/$rel";
- $staticLoc = "{$this->sharedStaticPath}/$rel";
- #print "Copying $sourceLoc to $staticLoc\n";
- # Copy to static directory
- if ( !file_exists( $staticLoc ) ) {
- wfMkdirParents( dirname( $staticLoc ), 0755 );
- if ( function_exists( 'symlink' ) && !$this->forceCopy ) {
- symlink( $sourceLoc, $staticLoc );
- } else {
- copy( $sourceLoc, $staticLoc );
- }
- }
-
- if ( substr( $rel, 0, 6 ) == 'thumb/' ) {
- # That was a thumbnail
- # We will also copy the real image
- $parts = explode( '/', $rel );
- $rel = "{$parts[1]}/{$parts[2]}/{$parts[3]}";
- $sourceLoc = "$wgSharedUploadDirectory/$rel";
- $staticLoc = "{$this->sharedStaticPath}/$rel";
- #print "Copying $sourceLoc to $staticLoc\n";
- if ( !file_exists( $staticLoc ) ) {
- wfMkdirParents( dirname( $staticLoc ), 0755 );
- if ( function_exists( 'symlink' ) && !$this->forceCopy ) {
- symlink( $sourceLoc, $staticLoc );
- } else {
- copy( $sourceLoc, $staticLoc );
- }
- }
- }
- } else
- # Is it math?
- if ( substr( $image, 0, $mathPathLength ) == $wgMathPath ) {
- $rel = substr( $image, $mathPathLength + 1 ); // +1 for slash
- $source = "$wgMathDirectory/$rel";
- $dest = "{$this->dest}/math/$rel";
- @mkdir( "{$this->dest}/math", 0755 );
- if ( !file_exists( $dest ) ) {
- copy( $source, $dest );
- }
+ $this->copyImage( $image, $wgSharedUploadPath, $wgSharedUploadDirectory, $this->sharedStaticDirectory );
+ } elseif ( substr( $image, 0, $mathPathLength ) == $wgMathPath ) {
+ $this->relativeCopy( $image, $wgMathPath, $wgMathDirectory, "{$this->dest}/math" );
+ } elseif ( $this->makeSnapshot && substr( $image, 0, $uploadPathLength ) == $wgUploadPath ) {
+ $this->copyImage( $image, $wgUploadPath, $wgUploadDirectory, $this->destUploadDirectory );
}
}
}
@@ -506,6 +748,7 @@ ENDTEXT;
$url = str_replace( '$1', "../$iw/" . wfUrlencode( $this->getHashedFilename( $title ) ),
$wgArticlePath );
}
+ $url .= $this->compress ? ".gz" : "";
return false;
} else {
return true;
@@ -540,7 +783,7 @@ ENDTEXT;
if ( $url === false ) {
$url = str_replace( '$1', wfUrlencode( $this->getHashedFilename( $title ) ), $wgArticlePath );
}
-
+ $url .= $this->compress ? ".gz" : "";
return false;
}
@@ -632,6 +875,60 @@ ENDTEXT;
return $dir;
}
+ /**
+ * Calculate the start end end of a job based on the current slice
+ * @param integer $start
+ * @param integer $end
+ * @return array of integers
+ */
+ function sliceRange( $start, $end ) {
+ $count = $end - $start + 1;
+ $each = $count / $this->sliceDenominator;
+ $sliceStart = $start + intval( $each * ( $this->sliceNumerator - 1 ) );
+ if ( $this->sliceNumerator == $this->sliceDenominator ) {
+ $sliceEnd = $end;
+ } else {
+ $sliceEnd = $start + intval( $each * $this->sliceNumerator ) - 1;
+ }
+ return array( $sliceStart, $sliceEnd );
+ }
+
+ /**
+ * Adjust a start point so that it belongs to the current slice, where slices are defined by integer modulo
+ * @param integer $start
+ * @param integer $base The true start of the range; the minimum start
+ */
+ function modSliceStart( $start, $base = 1 ) {
+ return $start - ( $start % $this->sliceDenominator ) + $this->sliceNumerator - 1 + $base;
+ }
+
+ /**
+ * Determine whether a string belongs to the current slice, based on hash
+ */
+ function sliceFilter( $s ) {
+ return crc32( $s ) % $this->sliceDenominator == $this->sliceNumerator - 1;
+ }
+
+ /**
+ * No site notice
+ */
+ function onSiteNoticeBefore( &$text ) {
+ $text = '';
+ return false;
+ }
+ function onSiteNoticeAfter( &$text ) {
+ $text = '';
+ return false;
+ }
+
+ function getMaxPageID() {
+ if ( $this->maxPageID === false ) {
+ $dbr =& wfGetDB( DB_SLAVE );
+ $this->maxPageID = $dbr->selectField( 'page', 'max(page_id)', false, __METHOD__ );
+ }
+ return $this->maxPageID;
+ }
+
}
/** XML parser callback */
diff --git a/maintenance/dumpHTML.php b/maintenance/dumpHTML.php
index 37a46465..5e347e4b 100644
--- a/maintenance/dumpHTML.php
+++ b/maintenance/dumpHTML.php
@@ -9,20 +9,25 @@
* Usage:
* php dumpHTML.php [options...]
*
- * -d <dest> destination directory
- * -s <start> start ID
- * -e <end> end ID
- * -k <skin> skin to use (defaults to dumphtml)
- * --images only do image description pages
- * --categories only do category pages
- * --redirects only do redirects
- * --special only do miscellaneous stuff
- * --force-copy copy commons instead of symlink, needed for Wikimedia
- * --interlang allow interlanguage links
+ * -d <dest> destination directory
+ * -s <start> start ID
+ * -e <end> end ID
+ * -k <skin> skin to use (defaults to htmldump)
+ * --no-overwrite skip existing HTML files
+ * --checkpoint <file> use a checkpoint file to allow restarting of interrupted dumps
+ * --slice <n/m> split the job into m segments and do the n'th one
+ * --images only do image description pages
+ * --categories only do category pages
+ * --redirects only do redirects
+ * --special only do miscellaneous stuff
+ * --force-copy copy commons instead of symlink, needed for Wikimedia
+ * --interlang allow interlanguage links
+ * --image-snapshot copy all images used to the destination directory
+ * --compress generate compressed version of the html pages
*/
-$optionsWithArgs = array( 's', 'd', 'e', 'k' );
+$optionsWithArgs = array( 's', 'd', 'e', 'k', 'checkpoint', 'slice' );
$profiling = false;
@@ -40,7 +45,6 @@ require_once( "commandLine.inc" );
require_once( "dumpHTML.inc" );
error_reporting( E_ALL & (~E_NOTICE) );
-define( 'CHUNK_SIZE', 50 );
if ( !empty( $options['s'] ) ) {
$start = $options['s'];
@@ -58,10 +62,22 @@ if ( !empty( $options['e'] ) ) {
if ( !empty( $options['d'] ) ) {
$dest = $options['d'];
} else {
- $dest = 'static';
+ $dest = "$IP/static";
}
-$skin = isset( $options['k'] ) ? $options['k'] : 'dumphtml';
+$skin = isset( $options['k'] ) ? $options['k'] : 'htmldump';
+
+if ( $options['slice'] ) {
+ $bits = explode( '/', $options['slice'] );
+ if ( count( $bits ) != 2 || $bits[0] < 1 || $bits[0] > $bits[1] ) {
+ print "Invalid slice specification";
+ exit;
+ }
+ $sliceNumerator = $bits[0];
+ $sliceDenominator = $bits[1];
+} else {
+ $sliceNumerator = $sliceDenominator = 1;
+}
$wgHTMLDump = new DumpHTML( array(
'dest' => $dest,
@@ -69,6 +85,14 @@ $wgHTMLDump = new DumpHTML( array(
'alternateScriptPath' => $options['interlang'],
'interwiki' => $options['interlang'],
'skin' => $skin,
+ 'makeSnapshot' => $options['image-snapshot'],
+ 'checkpointFile' => $options['checkpoint'],
+ 'startID' => $start,
+ 'endID' => $end,
+ 'sliceNumerator' => $sliceNumerator,
+ 'sliceDenominator' => $sliceDenominator,
+ 'noOverwrite' => $options['no-overwrite'],
+ 'compress' => $options['compress'],
));
@@ -81,47 +105,32 @@ if ( $options['special'] ) {
} elseif ( $options['redirects'] ) {
$wgHTMLDump->doRedirects();
} else {
- print("Creating static HTML dump in directory $dest. \n".
- "Starting from page_id $start of $end.\n");
-
+ print "Creating static HTML dump in directory $dest. \n";
$dbr =& wfGetDB( DB_SLAVE );
$server = $dbr->getProperty( 'mServer' );
print "Using database {$server}\n";
- $wgHTMLDump->doArticles( $start, $end );
if ( !isset( $options['e'] ) ) {
- $wgHTMLDump->doImageDescriptions();
- $wgHTMLDump->doCategories();
- $wgHTMLDump->doSpecials();
- }
-
- /*
- if ( $end - $start > CHUNK_SIZE * 2 ) {
- // Split the problem into smaller chunks, run them in different PHP instances
- // This is a memory/resource leak workaround
- print("Creating static HTML dump in directory $dest. \n".
- "Starting from page_id $start of $end.\n");
-
- chdir( "maintenance" );
- for ( $chunkStart = $start; $chunkStart < $end; $chunkStart += CHUNK_SIZE ) {
- $chunkEnd = $chunkStart + CHUNK_SIZE - 1;
- if ( $chunkEnd > $end ) {
- $chunkEnd = $end;
- }
- passthru( "php dumpHTML.php -d " . wfEscapeShellArg( $dest ) . " -s $chunkStart -e $chunkEnd" );
- }
- chdir( ".." );
- $d->doImageDescriptions();
- $d->doCategories();
- $d->doMainPage( $dest );
+ $wgHTMLDump->doEverything();
} else {
- $d->doArticles( $start, $end );
+ $wgHTMLDump->doArticles();
}
- */
}
if ( isset( $options['debug'] ) ) {
- print_r($GLOBALS);
+ #print_r($GLOBALS);
+ # Workaround for bug #36957
+ $globals = array_keys( $GLOBALS );
+ #sort( $globals );
+ $sizes = array();
+ foreach ( $globals as $name ) {
+ $sizes[$name] = strlen( serialize( $GLOBALS[$name] ) );
+ }
+ arsort($sizes);
+ $sizes = array_slice( $sizes, 0, 20 );
+ foreach ( $sizes as $name => $size ) {
+ printf( "%9d %s\n", $size, $name );
+ }
}
if ( $profiling ) {
diff --git a/maintenance/dumpInterwiki.inc b/maintenance/dumpInterwiki.inc
index 3cca1e02..2039f2df 100644
--- a/maintenance/dumpInterwiki.inc
+++ b/maintenance/dumpInterwiki.inc
@@ -31,7 +31,7 @@ class Site {
}
function getRebuildInterwikiDump() {
- global $langlist, $languageAliases, $prefixRewrites, $wgDBname;
+ global $langlist, $languageAliases, $prefixRewrites;
# Multi-language sites
# db suffix => db suffix, iw prefix, hostname
@@ -43,6 +43,7 @@ function getRebuildInterwikiDump() {
'wikinews' => new Site( 'wikinews', 'n', 'wikinews.org' ),
'wikisource' => new Site( 'wikisource', 's', 'wikisource.org' ),
'wikimedia' => new Site( 'wikimedia', 'chapter', 'wikimedia.org' ),
+ 'wikiversity' => new Site( 'wikiversity', 'v', 'wikiversity.org' ),
);
# List of language prefixes likely to be found in multi-language sites
diff --git a/maintenance/dumpSisterSites.php b/maintenance/dumpSisterSites.php
new file mode 100644
index 00000000..50e121e6
--- /dev/null
+++ b/maintenance/dumpSisterSites.php
@@ -0,0 +1,49 @@
+<?php
+/**
+ * Quickie page name dump script for SisterSites usage.
+ * http://www.eekim.com/cgi-bin/wiki.pl?SisterSites
+ *
+ * Copyright (C) 2006 Brion Vibber <brion@pobox.com>
+ * http://www.mediawiki.org/
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @package MediaWiki
+ * @subpackage SpecialPage
+ */
+
+require_once( 'commandLine.inc' );
+
+$dbr = wfGetDB( DB_SLAVE );
+$dbr->bufferResults( false );
+$result = $dbr->select( 'page',
+ array( 'page_namespace', 'page_title' ),
+ array(
+ 'page_namespace' => NS_MAIN,
+ 'page_is_redirect' => 0,
+ ),
+ 'dumpSisterSites' );
+
+while( $row = $dbr->fetchObject( $result ) ) {
+ $title = Title::makeTitle( $row->page_namespace, $row->page_title );
+ $url = $title->getFullUrl();
+ $text = $title->getPrefixedText();
+ echo "$url $text\n";
+}
+
+$dbr->freeResult( $result );
+
+?>
diff --git a/maintenance/dumpTextPass.php b/maintenance/dumpTextPass.php
index 78367c0b..8c1563ad 100644
--- a/maintenance/dumpTextPass.php
+++ b/maintenance/dumpTextPass.php
@@ -99,9 +99,13 @@ stream_wrapper_register( 'mediawiki.compress.7z', 'SevenZipStream' );
class TextPassDumper extends BackupDumper {
var $prefetch = null;
var $input = "php://stdin";
- var $history = MW_EXPORT_FULL;
+ var $history = WikiExporter::FULL;
var $fetchCount = 0;
var $prefetchCount = 0;
+
+ var $failures = 0;
+ var $maxFailures = 200;
+ var $failureTimeout = 5; // Seconds to sleep after db failure
function dump() {
# This shouldn't happen if on console... ;)
@@ -139,10 +143,10 @@ class TextPassDumper extends BackupDumper {
$this->input = $url;
break;
case 'current':
- $this->history = MW_EXPORT_CURRENT;
+ $this->history = WikiExporter::CURRENT;
break;
case 'full':
- $this->history = MW_EXPORT_FULL;
+ $this->history = WikiExporter::FULL;
break;
}
}
@@ -186,9 +190,8 @@ class TextPassDumper extends BackupDumper {
$etats = '-';
$fetchrate = '-';
}
- global $wgDBname;
$this->progress( sprintf( "%s: %s %d pages (%0.3f/sec), %d revs (%0.3f/sec), %0.1f%% prefetched, ETA %s [max %d]",
- $now, $wgDBname, $this->pageCount, $rate, $this->revCount, $revrate, $fetchrate, $etats, $this->maxCount ) );
+ $now, wfWikiID(), $this->pageCount, $rate, $this->revCount, $revrate, $fetchrate, $etats, $this->maxCount ) );
}
}
@@ -236,6 +239,27 @@ class TextPassDumper extends BackupDumper {
return $text;
}
}
+ while( true ) {
+ try {
+ return $this->doGetText( $id );
+ } catch (DBQueryError $ex) {
+ $this->failures++;
+ if( $this->failures > $this->maxFailures ) {
+ throw $ex;
+ } else {
+ $this->progress( "Database failure $this->failures " .
+ "of allowed $this->maxFailures! " .
+ "Pausing $this->failureTimeout seconds..." );
+ sleep( $this->failureTimeout );
+ }
+ }
+ }
+ }
+
+ /**
+ * May throw a database error if, say, the server dies during query.
+ */
+ private function doGetText( $id ) {
$id = intval( $id );
$row = $this->db->selectRow( 'text',
array( 'old_text', 'old_flags' ),
diff --git a/maintenance/dumpUploads.php b/maintenance/dumpUploads.php
new file mode 100644
index 00000000..8ba4e87b
--- /dev/null
+++ b/maintenance/dumpUploads.php
@@ -0,0 +1,116 @@
+<?php
+
+require_once 'commandLine.inc';
+
+class UploadDumper {
+
+ function __construct( $args ) {
+ global $IP, $wgUseSharedUploads;
+ $this->mAction = 'fetchUsed';
+ $this->mBasePath = $IP;
+ $this->mShared = $wgUseSharedUploads;
+
+ if( isset( $args['help'] ) ) {
+ $this->mAction = 'help';
+ }
+
+ if( isset( $args['base'] ) ) {
+ $this->mBasePath = $args['base'];
+ }
+ }
+
+ function run() {
+ $this->{$this->mAction}();
+ }
+
+ function help() {
+ echo <<<END
+Generates list of uploaded files which can be fed to tar or similar.
+By default, outputs relative paths against the parent directory of
+\$wgUploadDirectory.
+
+Usage:
+php dumpUploads.php [options] > list-o-files.txt
+
+Options:
+--base=<path> Set base relative path instead of wiki include root
+
+FIXME: other options not implemented yet ;)
+
+--local List all local files, used or not. No shared files included.
+--used Skip local images that are not used
+--shared Include images used from shared repository
+
+END;
+ }
+
+ /**
+ * Fetch a list of all or used images from a particular image source.
+ * @param string $table
+ * @param string $directory Base directory where files are located
+ * @param bool $shared true to pass shared-dir settings to hash func
+ */
+ function fetchUsed() {
+ $dbr = wfGetDB( DB_SLAVE );
+ $image = $dbr->tableName( 'image' );
+ $imagelinks = $dbr->tableName( 'imagelinks' );
+
+ $sql = "SELECT DISTINCT il_to, img_name
+ FROM $imagelinks
+ LEFT OUTER JOIN $image
+ ON il_to=img_name";
+ $result = $dbr->query( $sql );
+
+ while( $row = $dbr->fetchObject( $result ) ) {
+ if( is_null( $row->img_name ) ) {
+ if( $this->mShared ) {
+ $this->outputShared( $row->il_to );
+ }
+ } else {
+ $this->outputLocal( $row->il_to );
+ }
+ }
+ $dbr->freeResult( $result );
+ }
+
+ function outputLocal( $name ) {
+ global $wgUploadDirectory;
+ return $this->outputItem( $name, $wgUploadDirectory, false );
+ }
+
+ function outputShared( $name ) {
+ global $wgSharedUploadDirectory;
+ return $this->outputItem( $name, $wgSharedUploadDirectory, true );
+ }
+
+ function outputItem( $name, $directory, $shared ) {
+ $filename = $directory .
+ wfGetHashPath( $name, $shared ) .
+ $name;
+ $rel = $this->relativePath( $filename, $this->mBasePath );
+ echo "$rel\n";
+ }
+
+ /**
+ * Return a relative path to $path from the base directory $base
+ * For instance relativePath( '/foo/bar/baz', '/foo' ) should return
+ * 'bar/baz'.
+ */
+ function relativePath( $path, $base) {
+ $path = explode( DIRECTORY_SEPARATOR, $path );
+ $base = explode( DIRECTORY_SEPARATOR, $base );
+ while( count( $base ) && $path[0] == $base[0] ) {
+ array_shift( $path );
+ array_shift( $base );
+ }
+ foreach( $base as $prefix ) {
+ array_unshift( $path, '..' );
+ }
+ return implode( DIRECTORY_SEPARATOR, $path );
+ }
+}
+
+$dumper = new UploadDumper( $options );
+$dumper->run();
+
+?> \ No newline at end of file
diff --git a/maintenance/fixSlaveDesync.php b/maintenance/fixSlaveDesync.php
index e97f96c9..d2dffe54 100644
--- a/maintenance/fixSlaveDesync.php
+++ b/maintenance/fixSlaveDesync.php
@@ -22,12 +22,50 @@ if ( isset( $args[0] ) ) {
} else {
$dbw =& wfGetDB( DB_MASTER );
$maxPage = $dbw->selectField( 'page', 'MAX(page_id)', false, 'fixDesync.php' );
+ $corrupt = findPageLatestCorruption();
+ foreach ( $corrupt as $id => $dummy ) {
+ desyncFixPage( $id );
+ }
+ /*
for ( $i=1; $i <= $maxPage; $i++ ) {
desyncFixPage( $i );
if ( !($i % $reportingInterval) ) {
print "$i\n";
}
+ }*/
+}
+
+function findPageLatestCorruption() {
+ $desync = array();
+ $n = 0;
+ $dbw =& wfGetDB( DB_MASTER );
+ $masterIDs = array();
+ $res = $dbw->select( 'page', array( 'page_id', 'page_latest' ), array( 'page_id<6054123' ), __METHOD__ );
+ print "Number of pages: " . $dbw->numRows( $res ) . "\n";
+ while ( $row = $dbw->fetchObject( $res ) ) {
+ $masterIDs[$row->page_id] = $row->page_latest;
+ if ( !( ++$n % 10000 ) ) {
+ print "$n\r";
+ }
}
+ print "\n";
+ $dbw->freeResult( $res );
+
+ global $slaveIndexes;
+ foreach ( $slaveIndexes as $i ) {
+ $slaveIDs = array();
+ $db =& wfGetDB( $i );
+ $res = $db->select( 'page', array( 'page_id', 'page_latest' ), array( 'page_id<6054123' ), __METHOD__ );
+ while ( $row = $db->fetchObject( $res ) ) {
+ if ( isset( $masterIDs[$row->page_id] ) && $masterIDs[$row->page_id] != $row->page_latest ) {
+ $desync[$row->page_id] = true;
+ print $row->page_id . "\t";
+ }
+ }
+ $db->freeResult( $res );
+ }
+ print "\n";
+ return $desync;
}
function desyncFixPage( $pageID ) {
@@ -36,10 +74,20 @@ function desyncFixPage( $pageID ) {
# Check for a corrupted page_latest
$dbw =& wfGetDB( DB_MASTER );
- $realLatest = $dbw->selectField( 'page', 'page_latest', array( 'page_id' => $pageID ), $fname );
+ $dbw->begin();
+ $realLatest = $dbw->selectField( 'page', 'page_latest', array( 'page_id' => $pageID ),
+ $fname, 'FOR UPDATE' );
+ #list( $masterFile, $masterPos ) = $dbw->getMasterPos();
$found = false;
foreach ( $slaveIndexes as $i ) {
$db =& wfGetDB( $i );
+ /*
+ if ( !$db->masterPosWait( $masterFile, $masterPos, 10 ) ) {
+ echo "Slave is too lagged, aborting\n";
+ $dbw->commit();
+ sleep(10);
+ return;
+ }*/
$latest = $db->selectField( 'page', 'page_latest', array( 'page_id' => $pageID ), $fname );
$max = $db->selectField( 'revision', 'MAX(rev_id)', false, $fname );
if ( $latest != $realLatest && $realLatest < $max ) {
@@ -49,11 +97,14 @@ function desyncFixPage( $pageID ) {
}
}
if ( !$found ) {
+ print "page_id $pageID seems fine\n";
+ $dbw->commit();
return;
}
- # Find the missing revision
- $res = $dbw->select( 'revision', array( 'rev_id' ), array( 'rev_page' => $pageID ), $fname );
+ # Find the missing revisions
+ $res = $dbw->select( 'revision', array( 'rev_id' ), array( 'rev_page' => $pageID ),
+ $fname, 'FOR UPDATE' );
$masterIDs = array();
while ( $row = $dbw->fetchObject( $res ) ) {
$masterIDs[] = $row->rev_id;
@@ -66,35 +117,79 @@ function desyncFixPage( $pageID ) {
$slaveIDs[] = $row->rev_id;
}
$db->freeResult( $res );
- $missingIDs = array_diff( $masterIDs, $slaveIDs );
+ if ( count( $masterIDs ) < count( $slaveIDs ) ) {
+ $missingIDs = array_diff( $slaveIDs, $masterIDs );
+ if ( count( $missingIDs ) ) {
+ print "Found " . count( $missingIDs ) . " lost in master, copying from slave... ";
+ $dbFrom = $db;
+ $dbTo = $dbw;
+ $found = true;
+ $toMaster = true;
+ } else {
+ $found = false;
+ }
+ } else {
+ $missingIDs = array_diff( $masterIDs, $slaveIDs );
+ if ( count( $missingIDs ) ) {
+ print "Found " . count( $missingIDs ) . " missing revision(s), copying from master... ";
+ $dbFrom = $dbw;
+ $dbTo = $db;
+ $found = true;
+ $toMaster = false;
+ } else {
+ $found = false;
+ }
+ }
- if ( count( $missingIDs ) ) {
- print "Found " . count( $missingIDs ) . " missing revision(s), copying from master... ";
+ if ( $found ) {
foreach ( $missingIDs as $rid ) {
print "$rid ";
# Revision
- $row = $dbw->selectRow( 'revision', '*', array( 'rev_id' => $rid ), $fname );
- foreach ( $slaveIndexes as $i ) {
- $db =& wfGetDB( $i );
- $db->insert( 'revision', get_object_vars( $row ), $fname, 'IGNORE' );
+ $row = $dbFrom->selectRow( 'revision', '*', array( 'rev_id' => $rid ), $fname );
+ if ( $toMaster ) {
+ $id = $dbw->selectField( 'revision', 'rev_id', array( 'rev_id' => $rid ),
+ $fname, 'FOR UPDATE' );
+ if ( $id ) {
+ echo "Revision already exists\n";
+ $found = false;
+ break;
+ } else {
+ $dbw->insert( 'revision', get_object_vars( $row ), $fname, 'IGNORE' );
+ }
+ } else {
+ foreach ( $slaveIndexes as $i ) {
+ $db =& wfGetDB( $i );
+ $db->insert( 'revision', get_object_vars( $row ), $fname, 'IGNORE' );
+ }
}
# Text
- $row = $dbw->selectRow( 'text', '*', array( 'old_id' => $row->rev_text_id ), $fname );
- foreach ( $slaveIndexes as $i ) {
- $db =& wfGetDB( $i );
- $db->insert( 'text', get_object_vars( $row ), $fname, 'IGNORE' );
+ $row = $dbFrom->selectRow( 'text', '*', array( 'old_id' => $row->rev_text_id ), $fname );
+ if ( $toMaster ) {
+ $dbw->insert( 'text', get_object_vars( $row ), $fname, 'IGNORE' );
+ } else {
+ foreach ( $slaveIndexes as $i ) {
+ $db =& wfGetDB( $i );
+ $db->insert( 'text', get_object_vars( $row ), $fname, 'IGNORE' );
+ }
}
}
print "done\n";
}
- print "Fixing page_latest... ";
- foreach ( $slaveIndexes as $i ) {
- $db =& wfGetDB( $i );
- $db->update( 'page', array( 'page_latest' => $realLatest ), array( 'page_id' => $pageID ), $fname );
+ if ( $found ) {
+ print "Fixing page_latest... ";
+ if ( $toMaster ) {
+ #$dbw->update( 'page', array( 'page_latest' => $realLatest ), array( 'page_id' => $pageID ), $fname );
+ } else {
+ foreach ( $slaveIndexes as $i ) {
+ $db =& wfGetDB( $i );
+ $db->update( 'page', array( 'page_latest' => $realLatest ), array( 'page_id' => $pageID ), $fname );
+ }
+ }
+ print "done\n";
}
- print "done\n";
+ $dbw->commit();
}
?>
diff --git a/maintenance/fuzz-tester.php b/maintenance/fuzz-tester.php
new file mode 100644
index 00000000..23c3cd7c
--- /dev/null
+++ b/maintenance/fuzz-tester.php
@@ -0,0 +1,2458 @@
+<?php
+/**
+* @package MediaWiki
+* @subpackage Maintainance
+* @author Nick Jenkins ( http://nickj.org/ ).
+* @copyright 2006 Nick Jenkins
+* @licence GNU General Public Licence 2.0
+
+Started: 18 May 2006.
+
+Description:
+ Performs fuzz-style testing of MediaWiki's parser and forms.
+
+How:
+ - Generate lots of nasty wiki text.
+ - Ask the Parser to render that wiki text to HTML, or ask MediaWiki's forms
+ to deal with that wiki text.
+ - Check MediaWiki's output for problems.
+ - Repeat.
+
+Why:
+ - To help find bugs.
+ - To help find security issues, or potential security issues.
+
+What type of problems are being checked for:
+ - Unclosed tags.
+ - Errors or interesting warnings from Tidy.
+ - PHP errors / warnings / notices.
+ - MediaWiki internal errors.
+ - Very slow responses.
+ - No response from apache.
+ - Optionally checking for malformed HTML using the W3C validator.
+
+Background:
+ Many of the wikiFuzz class methods are a modified PHP port,
+ of a "shameless" Python port, of LCAMTUF'S MANGELME:
+ - http://www.securiteam.com/tools/6Z00N1PBFK.html
+ - http://www.securityfocus.com/archive/1/378632/2004-10-15/2004-10-21/0
+
+Video:
+ There's an XviD video discussing this fuzz tester. You can get it from:
+ http://files.nickj.org/MediaWiki/Fuzz-Testing-MediaWiki-xvid.avi
+
+Requirements:
+ To run this, you will need:
+ - Command-line PHP5, with PHP-curl enabled (not all installations have this
+ enabled - try "apt-get install php5-curl" if you're on Debian to install).
+ - the Tidy standalone executable. ("apt-get install tidy").
+
+Optional:
+ - If you want to run the curl scripts, you'll need standalone curl installed
+ ("apt-get install curl")
+ - For viewing the W3C validator output on a command line, the "html2text"
+ program may be useful ("apt-get install html2text")
+
+Saving tests and test results:
+ Any of the fuzz tests which find problems are saved for later review.
+ In order to help track down problems, tests are saved in a number of
+ different formats. The default filename extensions and their meanings are:
+ - ".test.php" : PHP script that reproduces just that one problem using PHP-Curl.
+ - ".curl.sh" : Shell script that reproduces that problem using standalone curl.
+ - ".data.bin" : The serialized PHP data so that this script can re-run the test.
+ - ".info.txt" : A human-readable text file with details of the field contents.
+
+Wiki configuration for testing:
+ You should make some additions to LocalSettings.php in order to catch the most
+ errors. Note this configuration is for **TESTING PURPOSES ONLY**, and is IN NO
+ WAY, SHAPE, OR FORM suitable for deployment on a hostile network. That said,
+ personally I find these additions to be the most helpful for testing purposes:
+
+ // --------- Start ---------
+ // Everyone can do everything. Very useful for testing, yet useless for deployment.
+ $wgGroupPermissions['*']['autoconfirmed'] = true;
+ $wgGroupPermissions['*']['block'] = true;
+ $wgGroupPermissions['*']['bot'] = true;
+ $wgGroupPermissions['*']['delete'] = true;
+ $wgGroupPermissions['*']['deletedhistory'] = true;
+ $wgGroupPermissions['*']['deleterevision'] = true;
+ $wgGroupPermissions['*']['editinterface'] = true;
+ $wgGroupPermissions['*']['hiderevision'] = true;
+ $wgGroupPermissions['*']['import'] = true;
+ $wgGroupPermissions['*']['importupload'] = true;
+ $wgGroupPermissions['*']['minoredit'] = true;
+ $wgGroupPermissions['*']['move'] = true;
+ $wgGroupPermissions['*']['patrol'] = true;
+ $wgGroupPermissions['*']['protect'] = true;
+ $wgGroupPermissions['*']['proxyunbannable'] = true;
+ $wgGroupPermissions['*']['renameuser'] = true;
+ $wgGroupPermissions['*']['reupload'] = true;
+ $wgGroupPermissions['*']['reupload-shared'] = true;
+ $wgGroupPermissions['*']['rollback'] = true;
+ $wgGroupPermissions['*']['siteadmin'] = true;
+ $wgGroupPermissions['*']['trackback'] = true;
+ $wgGroupPermissions['*']['unwatchedpages'] = true;
+ $wgGroupPermissions['*']['upload'] = true;
+ $wgGroupPermissions['*']['userrights'] = true;
+ $wgGroupPermissions['*']['renameuser'] = true;
+ $wgGroupPermissions['*']['makebot'] = true;
+ $wgGroupPermissions['*']['makesysop'] = true;
+
+ // Enable weird and wonderful options:
+ // Increase default error reporting level.
+ error_reporting (E_ALL); // At a later date could be increased to E_ALL | E_STRICT
+ $wgBlockOpenProxies = true; // Some block pages require this to be true in order to test.
+ $wgEnableUploads = true; // enable uploads.
+ //$wgUseTrackbacks = true; // enable trackbacks; However this breaks the viewPageTest, so currently disabled.
+ $wgDBerrorLog = "/root/mediawiki-db-error-log.txt"; // log DB errors, replace with suitable path.
+ $wgShowSQLErrors = true; // Show SQL errors (instead of saying the query was hidden).
+
+ // Install & enable Parser Hook extensions to increase code coverage. E.g.:
+ require_once("extensions/ParserFunctions/ParserFunctions.php");
+ require_once("extensions/Cite/Cite.php");
+ require_once("extensions/inputbox/inputbox.php");
+ require_once("extensions/Sort/Sort.php");
+ require_once("extensions/wikihiero/wikihiero.php");
+ require_once("extensions/CharInsert/CharInsert.php");
+ require_once("extensions/FixedImage/FixedImage.php");
+
+ // Install & enable Special Page extensions to increase code coverage. E.g.:
+ require_once("extensions/Cite/SpecialCite.php");
+ require_once("extensions/Filepath/SpecialFilepath.php");
+ require_once("extensions/Makebot/Makebot.php");
+ require_once("extensions/Makesysop/SpecialMakesysop.php");
+ require_once("extensions/Renameuser/SpecialRenameuser.php");
+ require_once("extensions/LinkSearch/LinkSearch.php");
+ // --------- End ---------
+
+ Also add/change this in AdminSettings.php:
+ // --------- Start ---------
+ $wgEnableProfileInfo = true;
+ $wgDBserver = "localhost"; // replace with DB server hostname
+ // --------- End ---------
+
+Usage:
+ Run with "php fuzz-tester.php".
+ To see the various command-line options, run "php fuzz-tester.php --help".
+ To stop the script, press Ctrl-C.
+
+Console output:
+ - If requested, first any previously failed tests will be rerun.
+ - Then new tests will be generated and run. Any tests that fail will be saved,
+ and a brief message about why they failed will be printed on the console.
+ - The console will show the number of tests run, time run, number of tests
+ failed, number of tests being done per minute, and the name of the current test.
+
+TODO:
+ Some known things that could improve this script:
+ - Logging in with cookie jar storage needed for some tests (as there are some
+ pages that cannot be tested without being logged in, and which are currently
+ untested - e.g. Special:Emailuser, Special:Preferences, adding to Watchist).
+ - Testing of Timeline extension (I cannot test as ploticus has/had issues on
+ my architecture).
+
+*/
+
+/////////////////////////// COMMAND LINE HELP ////////////////////////////////////
+
+// This is a command line script, load MediaWiki env (gives command line options);
+include('commandLine.inc');
+
+// if the user asked for an explanation of command line options.
+if ( isset( $options["help"] ) ) {
+ print <<<ENDS
+MediaWiki $wgVersion fuzz tester
+Usage: php {$_SERVER["SCRIPT_NAME"]} [--quiet] [--base-url=<url-to-test-wiki>]
+ [--directory=<failed-test-path>] [--include-binary]
+ [--w3c-validate] [--delete-passed-retests] [--help]
+ [--user=<username>] [--password=<password>]
+ [--rerun-failed-tests] [--max-errors=<int>]
+ [--max-runtime=<num-minutes>]
+ [--specific-test=<test-name>]
+
+Options:
+ --quiet : Hides passed tests, shows only failed tests.
+ --base-url : URL to a wiki on which to run the tests.
+ The "http://" is optional and can be omitted.
+ --directory : Full path to directory for storing failed tests.
+ Will be created if it does not exist.
+ --include-binary : Includes non-alphanumeric characters in the tests.
+ --w3c-validate : Validates pages using the W3C's web validator.
+ Slow. Currently many pages fail validation.
+ --user : Login name of a valid user on your test wiki.
+ --password : Password for the valid user on your test wiki.
+ --delete-passed-retests : Will delete retests that now pass.
+ Requires --rerun-failed-tests to be meaningful.
+ --rerun-failed-tests : Whether to rerun any previously failed tests.
+ --max-errors : Maximum number of errors to report before exiting.
+ Does not include errors from --rerun-failed-tests
+ --max-runtime : Maximum runtime, in minutes, to run before exiting.
+ Only applies to new tests, not --rerun-failed-tests
+ --specific-test : Runs only the specified fuzz test.
+ Only applies to new tests, not --rerun-failed-tests
+ --help : Show this help message.
+
+Example:
+ If you wanted to fuzz test a nightly MediaWiki checkout using cron for 1 hour,
+ and only wanted to be informed of errors, and did not want to redo previously
+ failed tests, and wanted a maximum of 100 errors, then you could do:
+ php {$_SERVER["SCRIPT_NAME"]} --quiet --max-errors=100 --max-runtime=60
+
+
+ENDS;
+
+ exit( 0 );
+}
+
+
+// if we got command line options, check they look valid.
+$validOptions = array ("quiet", "base-url", "directory", "include-binary",
+ "w3c-validate", "user", "password", "delete-passed-retests",
+ "rerun-failed-tests", "max-errors",
+ "max-runtime", "specific-test", "help" );
+if (!empty($options)) {
+ $unknownArgs = array_diff (array_keys($options), $validOptions);
+ foreach ($unknownArgs as $invalidArg) {
+ print "Ignoring invalid command-line option: --$invalidArg\n";
+ }
+}
+
+
+///////////////////////////// CONFIGURATION ////////////////////////////////////
+
+// URL to some wiki on which we can run our tests.
+if (!empty($options["base-url"])) {
+ define("WIKI_BASE_URL", $options["base-url"]);
+} else {
+ define("WIKI_BASE_URL", $wgServer . $wgScriptPath . '/');
+}
+
+// The directory name where we store the output.
+// Example for Windows: "c:\\temp\\wiki-fuzz"
+if (!empty($options["directory"])) {
+ define("DIRECTORY", $options["directory"] );
+} else {
+ define("DIRECTORY", "{$wgUploadDirectory}/fuzz-tests");
+}
+
+// Should our test fuzz data include binary strings?
+define("INCLUDE_BINARY", isset($options["include-binary"]) );
+
+// Whether we want to validate HTML output on the web.
+// At the moment very few generated pages will validate, so not recommended.
+define("VALIDATE_ON_WEB", isset($options["w3c-validate"]) );
+// URL to use to validate our output:
+define("VALIDATOR_URL", "http://validator.w3.org/check");
+
+// Location of Tidy standalone executable.
+define("PATH_TO_TIDY", "/usr/bin/tidy");
+
+// The name of a user who has edited on your wiki. Used
+// when testing the Special:Contributions and Special:Userlogin page.
+if (!empty($options["user"])) {
+ define("USER_ON_WIKI", $options["user"] );
+} else {
+ define("USER_ON_WIKI", "nickj");
+}
+
+// The password of the above user. Used when testing the login page,
+// and to do this we sometimes need to login successfully.
+if (!empty($options["password"])) {
+ define("USER_PASSWORD", $options["password"] );
+} else {
+ // And no, this is not a valid password on any public wiki.
+ define("USER_PASSWORD", "nickj");
+}
+
+// If we have a test that failed, and then we run it again, and it passes,
+// do you want to delete it or keep it?
+define("DELETE_PASSED_RETESTS", isset($options["delete-passed-retests"]) );
+
+// Do we want to rerun old saved tests at script startup?
+// Set to true to help catch regressions, or false if you only want new stuff.
+define("RERUN_OLD_TESTS", isset($options["rerun-failed-tests"]) );
+
+// File where the database errors are logged. Should be defined in LocalSettings.php.
+define("DB_ERROR_LOG_FILE", $wgDBerrorLog );
+
+// Run in chatty mode (all output, default), or run in quiet mode (only prints out details of failed tests)?
+define("QUIET", isset($options["quiet"]) );
+
+// The maximum runtime, if specified.
+if (!empty($options["max-runtime"]) && intval($options["max-runtime"])>0) {
+ define("MAX_RUNTIME", intval($options["max-runtime"]) );
+}
+
+// The maximum number of problems to find, if specified. Excludes retest errors.
+if (!empty($options["max-errors"]) && intval($options["max-errors"])>0) {
+ define("MAX_ERRORS", intval($options["max-errors"]) );
+}
+
+// if the user has requested a specific test (instead of all tests), and the test they asked for looks valid.
+if (!empty($options["specific-test"])) {
+ if (class_exists($options["specific-test"]) && get_parent_class($options["specific-test"])=="pageTest") {
+ define("SPECIFIC_TEST", $options["specific-test"] );
+ }
+ else {
+ print "Ignoring invalid --specific-test\n";
+ }
+}
+
+// Define the file extensions we'll use:
+define("PHP_TEST" , ".test.php");
+define("CURL_TEST", ".curl.sh" );
+define("DATA_FILE", ".data.bin");
+define("INFO_FILE", ".info.txt");
+define("HTML_FILE", ".wiki_preview.html");
+
+// If it goes wrong, we want to know about it.
+error_reporting(E_ALL | E_STRICT);
+
+//////////////// A CLASS THAT GENERATES RANDOM NASTY WIKI & HTML STRINGS //////////////////////
+
+class wikiFuzz {
+
+ // Only some HTML tags are understood with params by MediaWiki, the rest are ignored.
+ // List the tags that accept params below, as well as what those params are.
+ public static $data = array(
+ "B" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "CAPTION" => array("CLASS", "ID", "STYLE", "align", "lang", "dir", "title"),
+ "CENTER" => array("CLASS", "STYLE", "ID", "lang", "dir", "title"),
+ "DIV" => array("CLASS", "STYLE", "ID", "align", "lang", "dir", "title"),
+ "FONT" => array("CLASS", "STYLE", "ID", "lang", "dir", "title", "face", "size", "color"),
+ "H1" => array("STYLE", "CLASS", "ID", "align", "lang", "dir", "title"),
+ "H2" => array("STYLE", "CLASS", "ID", "align", "lang", "dir", "title"),
+ "HR" => array("STYLE", "CLASS", "ID", "WIDTH", "lang", "dir", "title", "size", "noshade"),
+ "LI" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "type", "value"),
+ "TABLE" => array("STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "BORDER", "CELLPADDING",
+ "CELLSPACING", "lang", "dir", "title", "summary", "frame", "rules"),
+ "TD" => array("STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "COLSPAN", "ROWSPAN",
+ "VALIGN", "abbr", "axis", "headers", "scope", "nowrap", "height", "lang",
+ "dir", "title", "char", "charoff"),
+ "TH" => array("STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "COLSPAN", "ROWSPAN",
+ "VALIGN", "abbr", "axis", "headers", "scope", "nowrap", "height", "lang",
+ "dir", "title", "char", "charoff"),
+ "TR" => array("CLASS", "STYLE", "ID", "BGCOLOR", "ALIGN", "VALIGN", "lang", "dir", "title", "char", "charoff"),
+ "UL" => array("CLASS", "STYLE", "ID", "lang", "dir", "title", "type"),
+ "P" => array("style", "class", "id", "align", "lang", "dir", "title"),
+ "blockquote" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "cite"),
+ "span" => array("CLASS", "ID", "STYLE", "align", "lang", "dir", "title"),
+ "code" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "tt" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "small" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "big" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "s" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "u" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "del" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "datetime", "cite"),
+ "ins" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "datetime", "cite"),
+ "sub" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "sup" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "ol" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "type", "start"),
+ "br" => array("CLASS", "ID", "STYLE", "title", "clear"),
+ "cite" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "var" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "dl" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "ruby" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "rt" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "rp" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "dt" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "dl" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "em" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "strong" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "i" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
+ "thead" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign'),
+ "tfoot" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign'),
+ "tbody" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign'),
+ "colgroup" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign', 'span', 'width'),
+ "col" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign', 'span', 'width'),
+ "pre" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "width"),
+
+ // extension tags that accept parameters:
+ "sort" => array("order", "class"),
+ "ref" => array("name"),
+ "categorytree" => array("hideroot", "mode", "style"),
+ );
+
+ // The types of the HTML that we will be testing were defined above
+ // Note: this needs to be initialized later to be equal to: array_keys(wikiFuzz::$data);
+ // as such, it also needs to also be publicly modifiable.
+ public static $types;
+
+
+ // Some attribute values.
+ static private $other = array("&","=",":","?","\"","\n","%n%n%n%n%n%n%n%n%n%n%n%n","\\");
+ static private $ints = array(
+ // various numbers
+ "0","-1","127","-7897","89000","808080","90928345",
+ "0xfffffff","ffff",
+
+ // Different ways of saying: '
+ "&#0000039;", // Long UTF-8 Unicode encoding
+ "&#39;", // dec version.
+ "&#x27;", // hex version.
+ "&#xA7;", // malformed hex variant, MSB not zero.
+
+ // Different ways of saying: "
+ "&#0000034;", // Long UTF-8 Unicode encoding
+ "&#34;",
+ "&#x22;", // hex version.
+ "&#xA2;", // malformed hex variant, MSB not zero.
+
+ // Different ways of saying: <
+ "<",
+ "&#0000060", // Long UTF-8 Unicode encoding without semicolon (Mediawiki wants the colon)
+ "&#0000060;", // Long UTF-8 Unicode encoding with semicolon
+ "&#60;",
+ "&#x3C;", // hex version.
+ "&#xBC;", // malformed hex variant, MSB not zero.
+ "&#x0003C;", // mid-length hex version
+ "&#X00003C;", // slightly longer hex version, with capital "X"
+
+ // Different ways of saying: >
+ ">",
+ "&#0000062;", // Long UTF-8 Unicode encoding
+ "&#62;",
+ "&#x3E;", // hex version.
+ "&#xBE;", // malformed variant, MSB not zero.
+
+ // Different ways of saying: [
+ "&#0000091;", // Long UTF-8 Unicode encoding
+ "&#91;",
+ "&#x5B;", // hex version.
+
+ // Different ways of saying: {{
+ "&#0000123;&#0000123;", // Long UTF-8 Unicode encoding
+ "&#123;&#123;",
+ "&#x7B;&#x7B;", // hex version.
+
+ // Different ways of saying: |
+ "&#0000124;", // Long UTF-8 Unicode encoding
+ "&#124;",
+ "&#x7C;", // hex version.
+ "&#xFC;", // malformed hex variant, MSB not zero.
+
+ // a "lignature" - http://www.robinlionheart.com/stds/html4/spchars#ligature
+ "&zwnj;"
+ );
+
+ // Defines various wiki-related bits of syntax, that can potentially cause
+ // MediaWiki to do something other than just print that literal text.
+ static private $ext = array(
+ // links, templates, parameters.
+ "[[", "]]", "{{", "}}", "|", "[", "]", "{{{", "}}}", "|]]",
+
+ // wiki tables.
+ "\n{|", "\n|}",
+ "!",
+ "\n!",
+ "!!",
+ "||",
+ "\n|-", "| ", "\n|",
+
+ // section headings.
+ "=", "==", "===", "====", "=====", "======",
+
+ // lists (ordered and unordered) and indentation.
+ "\n*", "*", "\n:", ":",
+ "\n#", "#",
+
+ // definition lists (dl, dt, dd), newline, and newline with pre, and a tab.
+ "\n;", ";", "\n ",
+
+ // Whitespace: newline, tab, space.
+ "\n", "\t", " ",
+
+ // Some XSS attack vectors from http://ha.ckers.org/xss.html
+ "&#x09;", // tab
+ "&#x0A;", // newline
+ "&#x0D;", // carriage return
+ "\0", // null character
+ " &#14; ", // spaces and meta characters
+ "'';!--\"<XSS>=&{()}", // compact injection of XSS & SQL tester
+
+ // various NULL fields
+ "%00",
+ "&#00;",
+ "\0",
+
+ // horizontal rule.
+ "-----", "\n-----",
+
+ // signature, redirect, bold, italics.
+ "~~~~", "#REDIRECT [[", "'''", "''",
+
+ // comments.
+ "<!--", "-->",
+
+ // quotes.
+ "\"", "'",
+
+ // tag start and tag end.
+ "<", ">",
+
+ // implicit link creation on URIs.
+ "http://",
+ "https://",
+ "ftp://",
+ "irc://",
+ "news:",
+ 'gopher://',
+ 'telnet://',
+ 'nntp://',
+ 'worldwind://',
+ 'mailto:',
+
+ // images.
+ "[[image:",
+ ".gif",
+ ".png",
+ ".jpg",
+ ".jpeg",
+ 'thumbnail=',
+ 'thumbnail',
+ 'thumb=',
+ 'thumb',
+ 'right',
+ 'none',
+ 'left',
+ 'framed',
+ 'frame',
+ 'enframed',
+ 'centre',
+ 'center',
+ "Image:",
+ "[[:Image",
+ 'px',
+
+ // misc stuff to throw at the Parser.
+ '%08X',
+ '/',
+ ":x{|",
+ "\n|+",
+ "<noinclude>",
+ "</noinclude>",
+ " \302\273",
+ " :",
+ " !",
+ " ;",
+ "\302\253",
+ "[[category:",
+ "?=",
+ "(",
+ ")",
+ "]]]",
+ "../",
+ "{{{{",
+ "}}}}",
+ "[[Special:",
+ "<includeonly>",
+ "</includeonly>",
+ "<!--MWTEMPLATESECTION=",
+ '<!--MWTOC-->',
+
+ // implicit link creation on booknum, RFC, and PubMed ID usage (both with and without IDs)
+ "ISBN 2",
+ "RFC 000",
+ "PMID 000",
+ "ISBN ",
+ "RFC ",
+ "PMID ",
+
+ // magic words:
+ '__NOTOC__',
+ '__FORCETOC__',
+ '__NOEDITSECTION__',
+ '__START__',
+ '__NOTITLECONVERT__',
+ '__NOCONTENTCONVERT__',
+ '__END__',
+ '__TOC__',
+ '__NOTC__',
+ '__NOCC__',
+ "__FORCETOC__",
+ "__NEWSECTIONLINK__",
+ "__NOGALLERY__",
+
+ // more magic words / internal templates.
+ '{{PAGENAME}}',
+ '{{PAGENAMEE}}',
+ '{{NAMESPACE}}',
+ "{{MSG:",
+ "}}",
+ "{{MSGNW:",
+ "}}",
+ "{{INT:",
+ "}}",
+ '{{SITENAME}}',
+ "{{NS:",
+ "}}",
+ "{{LOCALURL:",
+ "}}",
+ "{{LOCALURLE:",
+ "}}",
+ "{{SCRIPTPATH}}",
+ "{{GRAMMAR:gentiv|",
+ "}}",
+ "{{REVISIONID}}",
+ "{{SUBPAGENAME}}",
+ "{{SUBPAGENAMEE}}",
+ "{{ns:0}}",
+ "{{fullurle:",
+ "}}",
+ "{{subst:",
+ "}}",
+ "{{UCFIRST:",
+ "}}",
+ "{{UC:",
+ '{{SERVERNAME}}',
+ '{{SERVER}}',
+ "{{RAW:",
+ "}}",
+ "{{PLURAL:",
+ "}}",
+ "{{LCFIRST:",
+ "}}",
+ "{{LC:",
+ "}}",
+ '{{CURRENTWEEK}}',
+ '{{CURRENTDOW}}',
+ "{{INT:{{LC:contribs-showhideminor}}|",
+ "}}",
+ "{{INT:googlesearch|",
+ "}}",
+ "{{BASEPAGENAME}}",
+ "{{CONTENTLANGUAGE}}",
+ "{{PAGESINNAMESPACE:}}",
+ "{{#language:",
+ "}}",
+
+ // Some raw link for magic words.
+ "{{NUMBEROFPAGES:R",
+ "}}",
+ "{{NUMBEROFUSERS:R",
+ "}}",
+ "{{NUMBEROFARTICLES:R",
+ "}}",
+ "{{NUMBEROFFILES:R",
+ "}}",
+ "{{NUMBEROFADMINS:R",
+ "}}",
+ "{{padleft:",
+ "}}",
+ "{{padright:",
+ "}}",
+
+ // internal Math "extension":
+ "<math>",
+ "</math>",
+
+ // Parser extension functions:
+ "{{#expr:",
+ "{{#if:",
+ "{{#ifeq:",
+ "{{#ifexist:",
+ "{{#ifexpr:",
+ "{{#switch:",
+ "{{#time:",
+ "}}",
+
+ // references table for the Cite extension.
+ "<references/>",
+
+ // Internal Parser tokens - try inserting some of these.
+ "UNIQ25f46b0524f13e67NOPARSE",
+ "UNIQ17197916557e7cd6-HTMLCommentStrip46238afc3bb0cf5f00000002",
+ "\x07UNIQ17197916557e7cd6-HTMLCommentStrip46238afc3bb0cf5f00000002-QINU",
+
+ // Inputbox extension:
+ "<inputbox>\ntype=search\nsearchbuttonlabel=\n",
+ "</inputbox>",
+
+ // charInsert extension:
+ "<charInsert>",
+ "</charInsert>",
+
+ // wikiHiero extension:
+ "<hiero>",
+ "</hiero>",
+
+ // Image gallery:
+ "<gallery>",
+ "</gallery>",
+
+ // FixedImage:
+ "<fundraising/>",
+
+ // Timeline extension: currently untested.
+
+ // Nowiki:
+ "<nOwIkI>",
+ "</nowiki>",
+
+ // an external image to test the external image displaying code
+ "http://debian.org/Pics/debian.png",
+ );
+
+ /**
+ ** @desc: Randomly returns one element of the input array.
+ */
+ static public function chooseInput(array $input) {
+ $randindex = wikiFuzz::randnum(count($input) - 1);
+ return $input[$randindex];
+ }
+
+ // Max number of parameters for HTML attributes.
+ static private $maxparams = 10;
+
+ /**
+ ** @desc: Returns random number between finish and start.
+ */
+ static public function randnum($finish,$start=0) {
+ return mt_rand($start,$finish);
+ }
+
+ /**
+ ** @desc: Returns a mix of random text and random wiki syntax.
+ */
+ static private function randstring() {
+ $thestring = "";
+
+ for ($i=0; $i<40; $i++) {
+ $what = wikiFuzz::randnum(1);
+
+ if ($what == 0) { // include some random wiki syntax
+ $which = wikiFuzz::randnum(count(wikiFuzz::$ext) - 1);
+ $thestring .= wikiFuzz::$ext[$which];
+ }
+ else { // include some random text
+ $char = INCLUDE_BINARY
+ // Decimal version:
+ // "&#" . wikiFuzz::randnum(255) . ";"
+ // Hex version:
+ ? "&#x" . str_pad(dechex(wikiFuzz::randnum(255)), wikiFuzz::randnum(2, 7), "0", STR_PAD_LEFT) . ";"
+ : chr(wikiFuzz::randnum(126,32));
+
+ $length = wikiFuzz::randnum(8);
+ $thestring .= str_repeat ($char, $length);
+ }
+ }
+ return $thestring;
+ }
+
+ /**
+ ** @desc: Returns either random text, or random wiki syntax, or random data from "ints",
+ ** or random data from "other".
+ */
+ static private function makestring() {
+ $what = wikiFuzz::randnum(2);
+ if ($what == 0) {
+ return wikiFuzz::randstring();
+ }
+ elseif ($what == 1) {
+ return wikiFuzz::$ints[wikiFuzz::randnum(count(wikiFuzz::$ints) - 1)];
+ }
+ else {
+ return wikiFuzz::$other[wikiFuzz::randnum(count(wikiFuzz::$other) - 1)];
+ }
+ }
+
+
+ /**
+ ** @desc: Strips out the stuff that Mediawiki balks at in a page's title.
+ ** Implementation copied/pasted from cleanupTable.inc & cleanupImages.php
+ */
+ static public function makeTitleSafe($str) {
+ $legalTitleChars = " %!\"$&'()*,\\-.\\/0-9:;=?@A-Z\\\\^_`a-z~\\x80-\\xFF";
+ return preg_replace_callback(
+ "/([^$legalTitleChars])/",
+ create_function(
+ // single quotes are essential here,
+ // or alternative escape all $ as \$
+ '$matches',
+ 'return sprintf( "\\x%02x", ord( $matches[1] ) );'
+ ),
+ $str );
+ }
+
+ /**
+ ** @desc: Returns a string of fuzz text.
+ */
+ static private function loop() {
+ switch ( wikiFuzz::randnum(3) ) {
+ case 1: // an opening tag, with parameters.
+ $string = "";
+ $i = wikiFuzz::randnum(count(wikiFuzz::$types) - 1);
+ $t = wikiFuzz::$types[$i];
+ $arr = wikiFuzz::$data[$t];
+ $string .= "<" . $t . " ";
+ $num_params = min(wikiFuzz::$maxparams, count($arr));
+ for ($z=0; $z<$num_params; $z++) {
+ $badparam = $arr[wikiFuzz::randnum(count($arr) - 1)];
+ $badstring = wikiFuzz::makestring();
+ $string .= $badparam . "=" . wikiFuzz::getRandQuote() . $badstring . wikiFuzz::getRandQuote() . " ";
+ }
+ $string .= ">\n";
+ return $string;
+ case 2: // a closing tag.
+ $i = wikiFuzz::randnum(count(wikiFuzz::$types) - 1);
+ return "</". wikiFuzz::$types[$i] . ">";
+ case 3: // a random string, between tags.
+ return wikiFuzz::makeString();
+ }
+ return ""; // catch-all, should never be called.
+ }
+
+ /**
+ ** @desc: Returns one of the three styles of random quote: ', ", and nothing.
+ */
+ static private function getRandQuote() {
+ switch ( wikiFuzz::randnum(3) ) {
+ case 1 : return "'";
+ case 2 : return "\"";
+ default: return "";
+ }
+ }
+
+ /**
+ ** @desc: Returns fuzz text, with the parameter indicating approximately how many lines of text you want.
+ */
+ static public function makeFuzz($maxtypes = 2) {
+ $page = "";
+ for ($k=0; $k<$maxtypes; $k++) {
+ $page .= wikiFuzz::loop();
+ }
+ return $page;
+ }
+}
+
+
+//////// MEDIAWIKI PAGES TO TEST, AND HOW TO TEST THEM ///////
+
+/**
+ ** @desc: A page test has just these things:
+ ** 1) Form parameters.
+ ** 2) the URL we are going to test those parameters on.
+ ** 3) Any cookies required for the test.
+ ** Declared abstract because it should be extended by a class
+ ** that supplies these parameters.
+ */
+abstract class pageTest {
+ protected $params;
+ protected $pagePath;
+ protected $cookie = "";
+
+ public function getParams() {
+ return $this->params;
+ }
+
+ public function getPagePath() {
+ return $this->pagePath;
+ }
+
+ public function getCookie() {
+ return $this->cookie;
+ }
+}
+
+
+/**
+ ** @desc: a page test for the "Edit" page. Tests Parser.php and Sanitizer.php.
+ */
+class editPageTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=WIKIFUZZ";
+
+ $this->params = array (
+ "action" => "submit",
+ "wpMinoredit" => wikiFuzz::makeFuzz(2),
+ "wpPreview" => wikiFuzz::makeFuzz(2),
+ "wpSection" => wikiFuzz::makeFuzz(2),
+ "wpEdittime" => wikiFuzz::makeFuzz(2),
+ "wpSummary" => wikiFuzz::makeFuzz(2),
+ "wpScrolltop" => wikiFuzz::makeFuzz(2),
+ "wpStarttime" => wikiFuzz::makeFuzz(2),
+ "wpAutoSummary" => wikiFuzz::makeFuzz(2),
+ "wpTextbox1" => wikiFuzz::makeFuzz(40) // the main wiki text, need lots of this.
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpSection"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpEdittime"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpSummary"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpScrolltop"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpStarttime"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpAutoSummary"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpTextbox1"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Listusers".
+ */
+class listusersTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Listusers";
+
+ $this->params = array (
+ "title" => wikiFuzz::makeFuzz(2),
+ "group" => wikiFuzz::makeFuzz(2),
+ "username" => wikiFuzz::makeFuzz(2),
+ "Go" => wikiFuzz::makeFuzz(2),
+ "limit" => wikiFuzz::chooseInput( array("0", "-1", "---'----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "offset" => wikiFuzz::chooseInput( array("0", "-1", "--------'-----0", "+1", "81343242346234234", wikiFuzz::makeFuzz(2)) )
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Search".
+ */
+class searchTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Search";
+
+ $this->params = array (
+ "action" => "index.php/Special:Search",
+ "ns0" => wikiFuzz::makeFuzz(2),
+ "ns1" => wikiFuzz::makeFuzz(2),
+ "ns2" => wikiFuzz::makeFuzz(2),
+ "ns3" => wikiFuzz::makeFuzz(2),
+ "ns4" => wikiFuzz::makeFuzz(2),
+ "ns5" => wikiFuzz::makeFuzz(2),
+ "ns6" => wikiFuzz::makeFuzz(2),
+ "ns7" => wikiFuzz::makeFuzz(2),
+ "ns8" => wikiFuzz::makeFuzz(2),
+ "ns9" => wikiFuzz::makeFuzz(2),
+ "ns10" => wikiFuzz::makeFuzz(2),
+ "ns11" => wikiFuzz::makeFuzz(2),
+ "ns12" => wikiFuzz::makeFuzz(2),
+ "ns13" => wikiFuzz::makeFuzz(2),
+ "ns14" => wikiFuzz::makeFuzz(2),
+ "ns15" => wikiFuzz::makeFuzz(2),
+ "redirs" => wikiFuzz::makeFuzz(2),
+ "search" => wikiFuzz::makeFuzz(2),
+ "offset" => wikiFuzz::chooseInput( array("", "0", "-1", "--------'-----0", "+1", "81343242346234234", wikiFuzz::makeFuzz(2)) ),
+ "fulltext" => wikiFuzz::chooseInput( array("", "0", "1", "--------'-----0", "+1", wikiFuzz::makeFuzz(2)) ),
+ "searchx" => wikiFuzz::chooseInput( array("", "0", "1", "--------'-----0", "+1", wikiFuzz::makeFuzz(2)) )
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Recentchanges".
+ */
+class recentchangesTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Recentchanges";
+
+ $this->params = array (
+ "action" => wikiFuzz::makeFuzz(2),
+ "title" => wikiFuzz::makeFuzz(2),
+ "namespace" => wikiFuzz::chooseInput( range(-1, 15) ),
+ "Go" => wikiFuzz::makeFuzz(2),
+ "invert" => wikiFuzz::chooseInput( array("-1", "---'----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "hideanons" => wikiFuzz::chooseInput( array("-1", "------'-------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ 'limit' => wikiFuzz::chooseInput( array("0", "-1", "---------'----0", "+1", "81340909772349234", wikiFuzz::makeFuzz(2)) ),
+ "days" => wikiFuzz::chooseInput( array("-1", "----------'---0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "hideminor" => wikiFuzz::chooseInput( array("-1", "-----------'--0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "hidebots" => wikiFuzz::chooseInput( array("-1", "---------'----0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "hideliu" => wikiFuzz::chooseInput( array("-1", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "hidepatrolled" => wikiFuzz::chooseInput( array("-1", "-----'--------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "hidemyself" => wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ 'categories_any'=> wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ 'categories' => wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ 'feed' => wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) )
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Prefixindex".
+ */
+class prefixindexTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Prefixindex";
+
+ $this->params = array (
+ "title" => "Special:Prefixindex",
+ "namespace" => wikiFuzz::randnum(-10,101),
+ "Go" => wikiFuzz::makeFuzz(2)
+ );
+
+ // sometimes we want 'prefix', sometimes we want 'from', and sometimes we want nothing.
+ if (wikiFuzz::randnum(3) == 0) {
+ $this->params["prefix"] = wikiFuzz::chooseInput( array("-1", "-----'--------0", "+++--+1",
+ wikiFuzz::randnum(-10,8134), wikiFuzz::makeFuzz(2)) );
+ }
+ if (wikiFuzz::randnum(3) == 0) {
+ $this->params["from"] = wikiFuzz::chooseInput( array("-1", "-----'--------0", "+++--+1",
+ wikiFuzz::randnum(-10,8134), wikiFuzz::makeFuzz(2)) );
+ }
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:MIMEsearch".
+ */
+class mimeSearchTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:MIMEsearch";
+
+ $this->params = array (
+ "action" => "/wiki/index.php/Special:MIMEsearch",
+ "mime" => wikiFuzz::makeFuzz(3),
+ 'limit' => wikiFuzz::chooseInput( array("0", "-1", "-------'------0", "+1", "81342321351235325", wikiFuzz::makeFuzz(2)) ),
+ 'offset' => wikiFuzz::chooseInput( array("0", "-1", "-----'--------0", "+1", "81341231235365252234324", wikiFuzz::makeFuzz(2)) )
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Log".
+ */
+class specialLogTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Log";
+
+ $this->params = array (
+ "type" => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
+ "par" => wikiFuzz::makeFuzz(2),
+ "user" => wikiFuzz::makeFuzz(2),
+ "page" => wikiFuzz::makeFuzz(2),
+ "from" => wikiFuzz::makeFuzz(2),
+ "until" => wikiFuzz::makeFuzz(2),
+ "title" => wikiFuzz::makeFuzz(2)
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Userlogin", with a successful login.
+ */
+class successfulUserLoginTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Userlogin&action=submitlogin&type=login&returnto=" . wikiFuzz::makeFuzz(2);
+
+ $this->params = array (
+ "wpName" => USER_ON_WIKI,
+ // sometimes real password, sometimes not:
+ 'wpPassword' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz(2), USER_PASSWORD ) ),
+ 'wpRemember' => wikiFuzz::makeFuzz(2)
+ );
+
+ $this->cookie = "wikidb_session=" . wikiFuzz::chooseInput( array("1" , wikiFuzz::makeFuzz(2) ) );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Userlogin".
+ */
+class userLoginTest extends pageTest {
+ function __construct() {
+
+ $this->pagePath = "index.php/Special:Userlogin";
+
+ $this->params = array (
+ 'wpRetype' => wikiFuzz::makeFuzz(2),
+ 'wpRemember' => wikiFuzz::makeFuzz(2),
+ 'wpRealName' => wikiFuzz::makeFuzz(2),
+ 'wpPassword' => wikiFuzz::makeFuzz(2),
+ 'wpName' => wikiFuzz::makeFuzz(2),
+ 'wpMailmypassword'=> wikiFuzz::makeFuzz(2),
+ 'wpLoginattempt' => wikiFuzz::makeFuzz(2),
+ 'wpEmail' => wikiFuzz::makeFuzz(2),
+ 'wpDomain' => wikiFuzz::chooseInput( array("", "local", wikiFuzz::makeFuzz(2)) ),
+ 'wpCreateaccountMail' => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
+ 'wpCreateaccount' => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
+ 'wpCookieCheck' => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
+ 'type' => wikiFuzz::chooseInput( array("signup", "login", "", wikiFuzz::makeFuzz(2)) ),
+ 'returnto' => wikiFuzz::makeFuzz(2),
+ 'action' => wikiFuzz::chooseInput( array("", "submitlogin", wikiFuzz::makeFuzz(2)) )
+ );
+
+ $this->cookie = "wikidb_session=" . wikiFuzz::chooseInput( array("1" , wikiFuzz::makeFuzz(2) ) );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Ipblocklist" (also includes unblocking)
+ */
+class ipblocklistTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Ipblocklist";
+
+ $this->params = array (
+ 'wpUnblockAddress'=> wikiFuzz::makeFuzz(2),
+ 'ip' => wikiFuzz::chooseInput( array("20398702394", "", "Nickj2", wikiFuzz::makeFuzz(2),
+ // something like an IP address, sometimes invalid:
+ ( wikiFuzz::randnum(300,-20) . "." . wikiFuzz::randnum(300,-20) . "."
+ . wikiFuzz::randnum(300,-20) . "." .wikiFuzz::randnum(300,-20) ) ) ),
+ 'id' => wikiFuzz::makeFuzz(2),
+ 'wpUnblockReason' => wikiFuzz::makeFuzz(2),
+ 'action' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "success", "submit", "unblock") ),
+ 'wpEditToken' => wikiFuzz::makeFuzz(2),
+ 'wpBlock' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "") ),
+ 'limit' => wikiFuzz::chooseInput( array("0", "-1", "--------'-----0", "+1",
+ "09700982312351132098234", wikiFuzz::makeFuzz(2)) ),
+ 'offset' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1",
+ "09700980982341535324234234", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["action"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["ip"]);
+ if (wikiFuzz::randnum(2) == 0) unset($this->params["id"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["wpUnblockAddress"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Newimages".
+ */
+class newImagesTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Newimages";
+
+ $this->params = array (
+ 'hidebots' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "1", "", "-1") ),
+ 'wpIlMatch' => wikiFuzz::makeFuzz(2),
+ 'until' => wikiFuzz::makeFuzz(2),
+ 'from' => wikiFuzz::makeFuzz(2)
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["until"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["from"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for the "Special:Imagelist" page.
+ */
+class imagelistTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Imagelist";
+
+ $this->params = array (
+ 'sort' => wikiFuzz::chooseInput( array("bysize", "byname" , "bydate", wikiFuzz::makeFuzz(2)) ),
+ 'limit' => wikiFuzz::chooseInput( array("0", "-1", "--------'-----0", "+1", "09700982312351132098234", wikiFuzz::makeFuzz(2)) ),
+ 'offset' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1", "09700980982341535324234234", wikiFuzz::makeFuzz(2)) ),
+ 'wpIlMatch' => wikiFuzz::makeFuzz(2)
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Export".
+ */
+class specialExportTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Export";
+
+ $this->params = array (
+ 'action' => wikiFuzz::chooseInput( array("submit", "", wikiFuzz::makeFuzz(2)) ),
+ 'pages' => wikiFuzz::makeFuzz(2),
+ 'curonly' => wikiFuzz::chooseInput( array("", "0", "-1", wikiFuzz::makeFuzz(2)) ),
+ 'listauthors' => wikiFuzz::chooseInput( array("", "0", "-1", wikiFuzz::makeFuzz(2)) ),
+ 'history' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1", "09700980982341535324234234", wikiFuzz::makeFuzz(2)) ),
+
+ );
+
+ // For the time being, need to disable "submit" action as Tidy barfs on MediaWiki's XML export.
+ if ($this->params['action'] == 'submit') $this->params['action'] = '';
+
+ // Sometimes remove the history field.
+ if (wikiFuzz::randnum(2) == 0) unset($this->params["history"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Booksources".
+ */
+class specialBooksourcesTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Booksources";
+
+ $this->params = array (
+ 'go' => wikiFuzz::makeFuzz(2),
+ // ISBN codes have to contain some semi-numeric stuff or will be ignored:
+ 'isbn' => "0X0" . wikiFuzz::makeFuzz(2)
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Allpages".
+ */
+class specialAllpagesTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special%3AAllpages";
+
+ $this->params = array (
+ 'from' => wikiFuzz::makeFuzz(2),
+ 'namespace' => wikiFuzz::chooseInput( range(-1, 15) ),
+ 'go' => wikiFuzz::makeFuzz(2)
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for the page History.
+ */
+class pageHistoryTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Main_Page&action=history";
+
+ $this->params = array (
+ 'limit' => wikiFuzz::chooseInput( array("-1", "0", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ 'offset' => wikiFuzz::chooseInput( array("-1", "0", "------'-------0", "+1", "9823412312312412435", wikiFuzz::makeFuzz(2)) ),
+ "go" => wikiFuzz::chooseInput( array("first", "last", wikiFuzz::makeFuzz(2)) ),
+ "dir" => wikiFuzz::chooseInput( array("prev", "next", wikiFuzz::makeFuzz(2)) ),
+ "diff" => wikiFuzz::chooseInput( array("-1", "--------'-----0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "oldid" => wikiFuzz::chooseInput( array("prev", "-1", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ "feed" => wikiFuzz::makeFuzz(2)
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for the Special:Contributions".
+ */
+class contributionsTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Contributions/" . USER_ON_WIKI;
+
+ $this->params = array (
+ 'target' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "newbies") ),
+ 'namespace' => wikiFuzz::chooseInput( array(-1, 15, 1, wikiFuzz::makeFuzz(2)) ),
+ 'offset' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1", "982342131232131231241", wikiFuzz::makeFuzz(2)) ),
+ 'bot' => wikiFuzz::chooseInput( array("", "-1", "0", "1", wikiFuzz::makeFuzz(2)) ),
+ 'go' => wikiFuzz::chooseInput( array("-1", 'prev', 'next', wikiFuzz::makeFuzz(2)) )
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for viewing a normal page, whilst posting various params.
+ */
+class viewPageTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Main_Page";
+
+ $this->params = array (
+ "useskin" => wikiFuzz::chooseInput( array("chick", "cologneblue", "myskin",
+ "nostalgia", "simple", "standard", wikiFuzz::makeFuzz(2)) ),
+ "uselang" => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz(2),
+ "ab", "af", "an", "ar", "arc", "as", "ast", "av", "ay", "az", "ba",
+ "bat-smg", "be", "bg", "bm", "bn", "bo", "bpy", "br", "bs", "ca",
+ "ce", "cs", "csb", "cv", "cy", "da", "de", "dv", "dz", "el", "en",
+ "eo", "es", "et", "eu", "fa", "fi", "fo", "fr", "fur", "fy", "ga",
+ "gn", "gsw", "gu", "he", "hi", "hr", "hu", "ia", "id", "ii", "is",
+ "it", "ja", "jv", "ka", "km", "kn", "ko", "ks", "ku", "kv", "la",
+ "li", "lo", "lt", "lv", "mk", "ml", "ms", "nah", "nap", "nds",
+ "nds-nl", "nl", "nn", "no", "non", "nv", "oc", "or", "os", "pa",
+ "pl", "pms", "ps", "pt", "pt-br", "qu", "rmy", "ro", "ru", "sc",
+ "sd", "sk", "sl", "sq", "sr", "sr-ec", "sr-el", "sr-jc", "sr-jl",
+ "su", "sv", "ta", "te", "th", "tlh", "tr", "tt", "ty", "tyv", "udm",
+ "ug", "uk", "ur", "utf8", "vec", "vi", "wa", "xal", "yi", "za",
+ "zh", "zh-cn", "zh-hk", "zh-sg", "zh-tw", "zh-tw") ),
+ "returnto" => wikiFuzz::makeFuzz(2),
+ "feed" => wikiFuzz::chooseInput( array("atom", "rss", wikiFuzz::makeFuzz(2)) ),
+ "rcid" => wikiFuzz::makeFuzz(2),
+ "action" => wikiFuzz::chooseInput( array("view", "raw", "render", wikiFuzz::makeFuzz(2), "markpatrolled") ),
+ "printable" => wikiFuzz::makeFuzz(2),
+ "oldid" => wikiFuzz::makeFuzz(2),
+ "redirect" => wikiFuzz::makeFuzz(2),
+ "diff" => wikiFuzz::makeFuzz(2),
+ "search" => wikiFuzz::makeFuzz(2),
+ "rdfrom" => wikiFuzz::makeFuzz(2), // things from Article.php from here on:
+ "token" => wikiFuzz::makeFuzz(2),
+ "tbid" => wikiFuzz::makeFuzz(2),
+ "action" => wikiFuzz::chooseInput( array("purge", wikiFuzz::makeFuzz(2)) ),
+ "wpReason" => wikiFuzz::makeFuzz(2),
+ "wpEditToken" => wikiFuzz::makeFuzz(2),
+ "from" => wikiFuzz::makeFuzz(2),
+ "bot" => wikiFuzz::makeFuzz(2),
+ "summary" => wikiFuzz::makeFuzz(2),
+ "direction" => wikiFuzz::chooseInput( array("next", "prev", wikiFuzz::makeFuzz(2)) ),
+ "section" => wikiFuzz::makeFuzz(2),
+ "preload" => wikiFuzz::makeFuzz(2),
+
+ );
+
+ // Tidy does not know how to valid atom or rss, so exclude from testing for the time being.
+ if ($this->params["feed"] == "atom") unset($this->params["feed"]);
+ else if ($this->params["feed"] == "rss") unset($this->params["feed"]);
+
+ // Raw pages cannot really be validated
+ if ($this->params["action"] == "raw") unset($this->params["action"]);
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["rcid"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["diff"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["rdfrom"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["oldid"]);
+
+ // usually don't want action == purge.
+ if (wikiFuzz::randnum(6) > 1) unset($this->params["action"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Allmessages".
+ */
+class specialAllmessagesTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Allmessages";
+
+ // only really has one parameter
+ $this->params = array (
+ "ot" => wikiFuzz::chooseInput( array("php", "html", wikiFuzz::makeFuzz(2)) )
+ );
+ }
+}
+
+/**
+ ** @desc: a page test for "Special:Newpages".
+ */
+class specialNewpages extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Newpages";
+
+ $this->params = array (
+ "namespace" => wikiFuzz::chooseInput( range(-1, 15) ),
+ "feed" => wikiFuzz::chooseInput( array("atom", "rss", wikiFuzz::makeFuzz(2)) ),
+ 'limit' => wikiFuzz::chooseInput( array("-1", "0", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
+ 'offset' => wikiFuzz::chooseInput( array("-1", "0", "------'-------0", "+1", "9823412312312412435", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // Tidy does not know how to valid atom or rss, so exclude from testing for the time being.
+ if ($this->params["feed"] == "atom") unset($this->params["feed"]);
+ else if ($this->params["feed"] == "rss") unset($this->params["feed"]);
+ }
+}
+
+/**
+ ** @desc: a page test for "redirect.php"
+ */
+class redirectTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "redirect.php";
+
+ $this->params = array (
+ "wpDropdown" => wikiFuzz::makeFuzz(2)
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpDropdown"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Confirmemail"
+ */
+class confirmEmail extends pageTest {
+ function __construct() {
+ // sometimes we send a bogus confirmation code, and sometimes we don't.
+ $this->pagePath = "index.php?title=Special:Confirmemail" . wikiFuzz::chooseInput( array("", "/" . wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(1)) ) );
+
+ $this->params = array (
+ "token" => wikiFuzz::makeFuzz(2)
+ );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Watchlist"
+ ** Note: this test would be better if we were logged in.
+ */
+class watchlistTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Watchlist";
+
+ $this->params = array (
+ "remove" => wikiFuzz::chooseInput( array("Remove checked items from watchlist", wikiFuzz::makeFuzz(2))),
+ 'days' => wikiFuzz::chooseInput( array(0, -1, -230, "--", 3, 9, wikiFuzz::makeFuzz(2)) ),
+ 'hideOwn' => wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) ),
+ 'hideBots' => wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) ),
+ 'namespace'=> wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) ),
+ 'action' => wikiFuzz::chooseInput( array("submit", "clear", wikiFuzz::makeFuzz(2)) ),
+ 'id[]' => wikiFuzz::makeFuzz(2),
+ 'edit' => wikiFuzz::makeFuzz(2),
+ 'token' => wikiFuzz::chooseInput( array("", "1243213", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // sometimes we specifiy "reset", and sometimes we don't.
+ if (wikiFuzz::randnum(3) == 0) $this->params["reset"] = wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) );
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Blockme"
+ */
+class specialBlockmeTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Blockme";
+
+ $this->params = array ( );
+
+ // sometimes we specify "ip", and sometimes we don't.
+ if (wikiFuzz::randnum(1) == 0) {
+ $this->params["ip"] = wikiFuzz::chooseInput( array("10.12.41.213", wikiFuzz::randnum(-10,8134), wikiFuzz::makeFuzz(2)) );
+ }
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Movepage"
+ */
+class specialMovePage extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Movepage";
+
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array("success", "submit", "", wikiFuzz::makeFuzz(2)) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array('', 0, 34987987, wikiFuzz::makeFuzz(2)) ),
+ 'target' => wikiFuzz::chooseInput( array("x", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)) ) ),
+ 'wpOldTitle' => wikiFuzz::chooseInput( array("z", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)), wikiFuzz::makeFuzz(2) ) ),
+ 'wpNewTitle' => wikiFuzz::chooseInput( array("y", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)), wikiFuzz::makeFuzz(2) ) ),
+ 'wpReason' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2)) ),
+ 'wpMovetalk' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ 'wpDeleteAndMove' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ 'wpConfirm' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ 'talkmoved' => wikiFuzz::chooseInput( array("1", wikiFuzz::makeFuzz(2), "articleexists", 'notalkpage') ),
+ 'oldtitle' => wikiFuzz::makeFuzz(2),
+ 'newtitle' => wikiFuzz::makeFuzz(2),
+ 'wpMovetalk' => wikiFuzz::chooseInput( array("1", "0", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(2) == 0) unset($this->params["wpEditToken"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["target"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["wpNewTitle"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpReason"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpOldTitle"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Undelete"
+ */
+class specialUndelete extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Undelete";
+
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array("submit", "", wikiFuzz::makeFuzz(2)) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array('', 0, 34987987, wikiFuzz::makeFuzz(2)) ),
+ 'target' => wikiFuzz::chooseInput( array("x", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)) ) ),
+ 'timestamp' => wikiFuzz::chooseInput( array("125223", wikiFuzz::makeFuzz(2) ) ),
+ 'file' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ 'restore' => wikiFuzz::chooseInput( array("0", "1", wikiFuzz::makeFuzz(2)) ),
+ 'preview' => wikiFuzz::chooseInput( array("0", "1", wikiFuzz::makeFuzz(2)) ),
+ 'wpComment' => wikiFuzz::makeFuzz(2)
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(2) == 0) unset($this->params["wpEditToken"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["target"]);
+ if (wikiFuzz::randnum(1) == 0) unset($this->params["restore"]);
+ if (wikiFuzz::randnum(1) == 0) unset($this->params["preview"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Unlockdb"
+ */
+class specialUnlockdb extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Unlockdb";
+
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array("submit", "success", "", wikiFuzz::makeFuzz(2)) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ 'wpLockConfirm' => wikiFuzz::chooseInput( array("0", "1", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpEditToken"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["action"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpLockConfirm"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Lockdb"
+ */
+class specialLockdb extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Lockdb";
+
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array("submit", "success", "", wikiFuzz::makeFuzz(2)) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ 'wpLockReason' => wikiFuzz::makeFuzz(2),
+ 'wpLockConfirm'=> wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpEditToken"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["action"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpLockConfirm"]);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Userrights"
+ */
+class specialUserrights extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Userrights";
+
+ $this->params = array (
+ 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ 'user-editname' => wikiFuzz::chooseInput( array("Nickj2", "Nickj2\n<xyz>", wikiFuzz::makeFuzz(2)) ),
+ 'ssearchuser' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ 'saveusergroups'=> wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)), "Save User Groups"),
+ 'member[]' => wikiFuzz::chooseInput( array("0", "bot", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "available[]" => wikiFuzz::chooseInput( array("0", "sysop", "bureaucrat", "1", "++--34234", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(3) == 0) unset($this->params['ssearchuser']);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params['saveusergroups']);
+ }
+}
+
+
+/**
+ ** @desc: a test for page protection and unprotection.
+ */
+class pageProtectionForm extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Main_Page";
+
+ $this->params = array (
+ "action" => "protect",
+ 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ "mwProtect-level-edit" => wikiFuzz::chooseInput( array('', 'autoconfirmed', 'sysop', wikifuzz::makeFuzz(2)) ),
+ "mwProtect-level-move" => wikiFuzz::chooseInput( array('', 'autoconfirmed', 'sysop', wikifuzz::makeFuzz(2)) ),
+ "mwProtectUnchained" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ 'mwProtect-reason' => wikiFuzz::chooseInput( array("because it was there", wikifuzz::makeFuzz(2)) )
+ );
+
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["mwProtectUnchained"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params['mwProtect-reason']);
+ }
+}
+
+
+/**
+ ** @desc: a page test for "Special:Blockip".
+ */
+class specialBlockip extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Blockip";
+
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array("submit", "", wikiFuzz::makeFuzz(2)) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ "wpBlockAddress" => wikiFuzz::chooseInput( array("20398702394", "", "Nickj2", wikiFuzz::makeFuzz(2),
+ // something like an IP address, sometimes invalid:
+ ( wikiFuzz::randnum(300,-20) . "." . wikiFuzz::randnum(300,-20) . "."
+ . wikiFuzz::randnum(300,-20) . "." .wikiFuzz::randnum(300,-20) ) ) ),
+ "ip" => wikiFuzz::chooseInput( array("20398702394", "", "Nickj2", wikiFuzz::makeFuzz(2),
+ // something like an IP address, sometimes invalid:
+ ( wikiFuzz::randnum(300,-20) . "." . wikiFuzz::randnum(300,-20) . "."
+ . wikiFuzz::randnum(300,-20) . "." .wikiFuzz::randnum(300,-20) ) ) ),
+ "wpBlockOther" => wikiFuzz::chooseInput( array('', 'Nickj2', wikifuzz::makeFuzz(2)) ),
+ "wpBlockExpiry" => wikiFuzz::chooseInput( array("other", "2 hours", "1 day", "3 days", "1 week", "2 weeks",
+ "1 month", "3 months", "6 months", "1 year", "infinite", wikiFuzz::makeFuzz(2)) ),
+ "wpBlockReason" => wikiFuzz::chooseInput( array("because it was there", wikifuzz::makeFuzz(2)) ),
+ "wpAnonOnly" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "wpCreateAccount" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "wpBlock" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockOther"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockExpiry"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockReason"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpAnonOnly"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpCreateAccount"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockAddress"]);
+ if (wikiFuzz::randnum(4) == 0) unset($this->params["ip"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for the imagepage.
+ */
+class imagepageTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Image:Small-email.png";
+
+ $this->params = array (
+ "image" => wikiFuzz::chooseInput( array("Small-email.png", wikifuzz::makeFuzz(2)) ),
+ "wpReason" => wikifuzz::makeFuzz(2),
+ "oldimage" => wikiFuzz::chooseInput( array("Small-email.png", wikifuzz::makeFuzz(2)) ),
+ "wpEditToken" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["image"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpReason"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["oldimage"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpEditToken"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for page deletion form.
+ */
+class pageDeletion extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Main_Page&action=delete";
+
+ $this->params = array (
+ "wpEditToken" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ "wpReason" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "wpConfirm" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(5) == 0) unset($this->params["wpReason"]);
+ if (wikiFuzz::randnum(5) == 0) unset($this->params["wpEditToken"]);
+ if (wikiFuzz::randnum(5) == 0) unset($this->params["wpConfirm"]);
+ }
+}
+
+
+
+/**
+ ** @desc: a test for Revision Deletion.
+ */
+class specialRevisionDelete extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Revisiondelete";
+
+ $this->params = array (
+ "target" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
+ "oldid" => wikifuzz::makeFuzz(2),
+ "oldid[]" => wikifuzz::makeFuzz(2),
+ "wpReason" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "revdelete-hide-text" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "revdelete-hide-comment" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "revdelete-hide-user" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "revdelete-hide-restricted" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["target"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["oldid"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["oldid[]"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["wpReason"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-text"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-comment"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-user"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-restricted"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:Import.
+ */
+class specialImport extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Import";
+
+ $this->params = array (
+ "action" => "submit",
+ "source" => wikiFuzz::chooseInput( array("upload", "interwiki", wikifuzz::makeFuzz(2)) ),
+ "MAX_FILE_SIZE" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
+ "xmlimport" => wikiFuzz::chooseInput( array("/var/www/hosts/mediawiki/wiki/AdminSettings.php", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
+ "namespace" => wikiFuzz::chooseInput( array(wikiFuzz::randnum(30,-6), wikiFuzz::makeFuzz(2)) ),
+ "interwiki" => wikiFuzz::makeFuzz(2),
+ "interwikiHistory" => wikiFuzz::makeFuzz(2),
+ "frompage" => wikiFuzz::makeFuzz(2),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["action"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["source"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["MAX_FILE_SIZE"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["xmlimport"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["interwiki"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["interwikiHistory"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["frompage"]);
+
+ // Note: Need to do a file upload to fully test this Special page.
+ }
+}
+
+
+
+/**
+ ** @desc: a test for thumb.php
+ */
+class thumbTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "thumb.php";
+
+ $this->params = array (
+ "f" => wikiFuzz::chooseInput( array("..", "\\", "small-email.png", wikifuzz::makeFuzz(2)) ),
+ "w" => wikiFuzz::chooseInput( array("80", wikiFuzz::randnum(6000,-200), wikifuzz::makeFuzz(2)) ),
+ "r" => wikiFuzz::chooseInput( array("0", wikifuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["f"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["w"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["r"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for trackback.php
+ */
+class trackbackTest extends pageTest {
+ function __construct() {
+ $this->pagePath = "trackback.php";
+
+ $this->params = array (
+ "url" => wikifuzz::makeFuzz(2),
+ "blog_name" => wikiFuzz::chooseInput( array("80", wikiFuzz::randnum(6000,-200), wikifuzz::makeFuzz(2)) ),
+ "article" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
+ "title" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
+ "excerpt" => wikifuzz::makeFuzz(2),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["title"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["excerpt"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for profileinfo.php
+ */
+class profileInfo extends pageTest {
+ function __construct() {
+ $this->pagePath = "profileinfo.php";
+
+ $this->params = array (
+ "expand" => wikifuzz::makeFuzz(2),
+ "sort" => wikiFuzz::chooseInput( array("time", "count", "name", wikifuzz::makeFuzz(2)) ),
+ "filter" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["sort"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["filter"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:Cite (extension Special page).
+ */
+class specialCite extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Cite";
+
+ $this->params = array (
+ "page" => wikiFuzz::chooseInput( array("\" onmouseover=\"alert(1);\"", "Main Page", wikifuzz::makeFuzz(2)) ),
+ "id" => wikiFuzz::chooseInput( array("-1", "0", "------'-------0", "+1", "-9823412312312412435", wikiFuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["page"]);
+ if (wikiFuzz::randnum(6) == 0) unset($this->params["id"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:Filepath (extension Special page).
+ */
+class specialFilepath extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Filepath";
+
+ $this->params = array (
+ "file" => wikiFuzz::chooseInput( array("Small-email.png", "Small-email.png" . wikifuzz::makeFuzz(1), wikiFuzz::makeFuzz(2)) ),
+ );
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:Makebot (extension Special page).
+ */
+class specialMakebot extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Makebot";
+
+ $this->params = array (
+ "username" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
+ "dosearch" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
+ "grant" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
+ "comment" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ "token" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(2) == 0) unset($this->params["dosearch"]);
+ if (wikiFuzz::randnum(2) == 0) unset($this->params["grant"]);
+ if (wikiFuzz::randnum(5) == 0) unset($this->params["token"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:Makesysop (extension Special page).
+ */
+class specialMakesysop extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Makesysop";
+
+ $this->params = array (
+ "wpMakesysopUser" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
+ "action" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
+ "wpMakesysopSubmit" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
+ "wpEditToken" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ "wpSetBureaucrat" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["wpMakesysopSubmit"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["wpEditToken"]);
+ if (wikiFuzz::randnum(3) == 0) unset($this->params["wpSetBureaucrat"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:Renameuser (extension Special page).
+ */
+class specialRenameuser extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php/Special:Renameuser";
+
+ $this->params = array (
+ "oldusername" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
+ "newusername" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
+ "token" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
+ );
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:Linksearch (extension Special page).
+ */
+class specialLinksearch extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special%3ALinksearch";
+
+ $this->params = array (
+ "target" => wikifuzz::makeFuzz(2),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if (wikiFuzz::randnum(10) == 0) unset($this->params["target"]);
+ }
+}
+
+
+/**
+ ** @desc: a test for Special:CategoryTree (extension Special page).
+ */
+class specialCategoryTree extends pageTest {
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:CategoryTree";
+
+ $this->params = array (
+ "target" => wikifuzz::makeFuzz(2),
+ "from" => wikifuzz::makeFuzz(2),
+ "until" => wikifuzz::makeFuzz(2),
+ "showas" => wikifuzz::makeFuzz(2),
+ "mode" => wikiFuzz::chooseInput( array("pages", "categories", "all", wikifuzz::makeFuzz(2)) ),
+ );
+
+ // sometimes we do want to specify certain parameters.
+ if (wikiFuzz::randnum(5) == 0) $this->params["notree"] = wikiFuzz::chooseInput( array("1", 0, "", wikiFuzz::makeFuzz(2)) );
+ }
+}
+
+
+
+/**
+ ** @desc: selects a page test to run.
+ */
+function selectPageTest($count) {
+
+ // if the user only wants a specific test, then only ever give them that.
+ if (defined("SPECIFIC_TEST")) {
+ $testType = SPECIFIC_TEST;
+ return new $testType ();
+ }
+
+ // Some of the time we test Special pages, the remaining
+ // time we test using the standard edit page.
+ switch ($count % 100) {
+ case 0 : return new successfulUserLoginTest();
+ case 1 : return new listusersTest();
+ case 2 : return new searchTest();
+ case 3 : return new recentchangesTest();
+ case 4 : return new prefixindexTest();
+ case 5 : return new mimeSearchTest();
+ case 6 : return new specialLogTest();
+ case 7 : return new userLoginTest();
+ case 8 : return new ipblocklistTest();
+ case 9 : return new newImagesTest();
+ case 10: return new imagelistTest();
+ case 11: return new specialExportTest();
+ case 12: return new specialBooksourcesTest();
+ case 13: return new specialAllpagesTest();
+ case 14: return new pageHistoryTest();
+ case 15: return new contributionsTest();
+ case 16: return new viewPageTest();
+ case 17: return new specialAllmessagesTest();
+ case 18: return new specialNewpages();
+ case 19: return new searchTest();
+ case 20: return new redirectTest();
+ case 21: return new confirmEmail();
+ case 22: return new watchlistTest();
+ case 23: return new specialBlockmeTest();
+ case 24: return new specialUndelete();
+ case 25: return new specialMovePage();
+ case 26: return new specialUnlockdb();
+ case 27: return new specialLockdb();
+ case 28: return new specialUserrights();
+ case 29: return new pageProtectionForm();
+ case 30: return new specialBlockip();
+ case 31: return new imagepageTest();
+ case 32: return new pageDeletion();
+ case 33: return new specialRevisionDelete();
+ case 34: return new specialImport();
+ case 35: return new thumbTest();
+ case 36: return new trackbackTest();
+ case 37: return new profileInfo();
+ case 38: return new specialCite();
+ case 39: return new specialFilepath();
+ case 40: return new specialMakebot();
+ case 41: return new specialMakesysop();
+ case 42: return new specialRenameuser();
+ case 43: return new specialLinksearch();
+ case 44: return new specialCategoryTree();
+ default: return new editPageTest();
+ }
+}
+
+
+/////////////////////// SAVING OUTPUT /////////////////////////
+
+/**
+ ** @desc: Utility function for saving a file. Currently has no error checking.
+ */
+function saveFile($data, $name) {
+ file_put_contents($name, $data);
+}
+
+
+/**
+ ** @desc: Returns a test as an experimental GET-to-POST URL.
+ ** This doesn't seem to always work though, and sometimes the output is too long
+ ** to be a valid GET URL, so we also save in other formats.
+ */
+function getAsURL(pageTest $test) {
+ $used_question_mark = (strpos($test->getPagePath(), "?") !== false);
+ $retval = "http://get-to-post.nickj.org/?http://" . WIKI_BASE_URL . $test->getPagePath();
+ foreach ($test->getParams() as $param => $value) {
+ if (!$used_question_mark) {
+ $retval .= "?";
+ $used_question_mark = true;
+ }
+ else {
+ $retval .= "&";
+ }
+ $retval .= $param . "=" . urlencode($value);
+ }
+ return $retval;
+}
+
+
+/**
+ ** @desc: Saves a plain-text human-readable version of a test.
+ */
+function saveTestAsText(pageTest $test, $filename) {
+ $str = "Test: " . $test->getPagePath();
+ foreach ($test->getParams() as $param => $value) {
+ $str .= "\n$param: $value";
+ }
+ $str .= "\nGet-to-post URL: " . getAsURL($test) . "\n";
+ saveFile($str, $filename);
+}
+
+
+/**
+ ** @desc: Saves a test as a standalone basic PHP script that shows this one problem.
+ ** Resulting script requires PHP-Curl be installed in order to work.
+ */
+function saveTestAsPHP(pageTest $test, $filename) {
+ $str = "<?php\n"
+ . "\$params = " . var_export(escapeForCurl($test->getParams()), true) . ";\n"
+ . "\$ch = curl_init();\n"
+ . "curl_setopt(\$ch, CURLOPT_POST, 1);\n"
+ . "curl_setopt(\$ch, CURLOPT_POSTFIELDS, \$params );\n"
+ . "curl_setopt(\$ch, CURLOPT_URL, " . var_export(WIKI_BASE_URL . $test->getPagePath(), true) . ");\n"
+ . "curl_setopt(\$ch, CURLOPT_RETURNTRANSFER,1);\n"
+ . ($test->getCookie() ? "curl_setopt(\$ch, CURLOPT_COOKIE, " . var_export($test->getCookie(), true) . ");\n" : "")
+ . "\$result=curl_exec(\$ch);\n"
+ . "curl_close (\$ch);\n"
+ . "print \$result;\n"
+ . "?>\n";
+ saveFile($str, $filename);
+}
+
+
+/**
+ ** @desc: Escapes a value so that it can be used on the command line by Curl.
+ ** Specifically, "<" and "@" need to be escaped if they are the first character,
+ ** otherwise curl interprets these as meaning that we want to insert a file.
+ */
+function escapeForCurl(array $input_params) {
+ $output_params = array();
+ foreach ($input_params as $param => $value) {
+ if (strlen($value) > 0 && ( $value[0] == "@" || $value[0] == "<")) {
+ $value = "\\" . $value;
+ }
+ $output_params[$param] = $value;
+ }
+ return $output_params;
+}
+
+
+/**
+ ** @desc: Saves a test as a standalone CURL shell script that shows this one problem.
+ ** Resulting script requires standalone Curl be installed in order to work.
+ */
+function saveTestAsCurl(pageTest $test, $filename) {
+ $str = "#!/bin/bash\n"
+ . "curl --silent --include --globoff \\\n"
+ . ($test->getCookie() ? " --cookie " . escapeshellarg($test->getCookie()) . " \\\n" : "");
+ foreach (escapeForCurl($test->getParams()) as $param => $value) {
+ $str .= " -F " . escapeshellarg($param) . "=" . escapeshellarg($value) . " \\\n";
+ }
+ $str .= " " . escapeshellarg(WIKI_BASE_URL . $test->getPagePath()); // beginning space matters.
+ $str .= "\n";
+ saveFile($str, $filename);
+ chmod($filename, 0755); // make executable
+}
+
+
+/**
+ ** @desc: Saves the internal data structure to file.
+ */
+function saveTestData (pageTest $test, $filename) {
+ saveFile(serialize($test), $filename);
+}
+
+
+/**
+ ** @desc: saves a test in the various formats.
+ */
+function saveTest(pageTest $test, $testname) {
+ $base_name = DIRECTORY . "/" . $testname;
+ saveTestAsText($test, $base_name . INFO_FILE);
+ saveTestAsPHP ($test, $base_name . PHP_TEST );
+ saveTestAsCurl($test, $base_name . CURL_TEST);
+ saveTestData ($test, $base_name . DATA_FILE);
+}
+
+
+//////////////////// MEDIAWIKI OUTPUT /////////////////////////
+
+/**
+ ** @desc: Asks MediaWiki for the HTML output of a test.
+ */
+function wikiTestOutput(pageTest $test) {
+
+ $ch = curl_init();
+
+ // specify the cookie, if required.
+ if ($test->getCookie()) curl_setopt($ch, CURLOPT_COOKIE, $test->getCookie());
+ curl_setopt($ch, CURLOPT_POST, 1); // save form using a POST
+
+ $params = escapeForCurl($test->getParams());
+ curl_setopt($ch, CURLOPT_POSTFIELDS, $params ); // load the POST variables
+
+ curl_setopt($ch, CURLOPT_URL, WIKI_BASE_URL . $test->getPagePath() ); // set url to post to
+ curl_setopt($ch, CURLOPT_RETURNTRANSFER,1); // return into a variable
+
+ $result=curl_exec ($ch);
+
+ // if we encountered an error, then say so, and return an empty string.
+ if (curl_error($ch)) {
+ print "\nCurl error #: " . curl_errno($ch) . " - " . curl_error ($ch);
+ $result = "";
+ }
+
+ curl_close ($ch);
+
+ return $result;
+}
+
+
+//////////////////// HTML VALIDATION /////////////////////////
+
+/*
+ ** @desc: Asks the validator whether this is valid HTML, or not.
+ */
+function validateHTML($text) {
+
+ $params = array ("fragment" => $text);
+
+ $ch = curl_init();
+
+ curl_setopt($ch, CURLOPT_POST, 1); // save form using a POST
+ curl_setopt($ch, CURLOPT_POSTFIELDS, $params); // load the POST variables
+ curl_setopt($ch, CURLOPT_URL, VALIDATOR_URL); // set url to post to
+ curl_setopt($ch, CURLOPT_RETURNTRANSFER,1); // return into a variable
+
+ $result=curl_exec ($ch);
+
+ // if we encountered an error, then log it, and exit.
+ if (curl_error($ch)) {
+ trigger_error("Curl error #: " . curl_errno($ch) . " - " . curl_error ($ch) );
+ print "Curl error #: " . curl_errno($ch) . " - " . curl_error ($ch) . " - exiting.\n";
+ exit();
+ }
+
+ curl_close ($ch);
+
+ $valid = (strpos($result, "Failed validation") === false ? true : false);
+
+ return array($valid, $result);
+}
+
+
+/**
+ ** @desc: Get tidy to check for no HTML errors in the output file (e.g. unescaped strings).
+ */
+function tidyCheckFile($name) {
+ $file = DIRECTORY . "/" . $name;
+ $command = PATH_TO_TIDY . " -output /tmp/out.html -quiet $file 2>&1";
+ $x = `$command`;
+
+ // Look for the most interesting Tidy errors and warnings.
+ if ( strpos($x,"end of file while parsing attributes") !== false
+ || strpos($x,"attribute with missing trailing quote mark") !== false
+ || strpos($x,"missing '>' for end of tag") !== false
+ || strpos($x,"Error:") !== false) {
+ print "\nTidy found something - view details with: $command";
+ return false;
+ } else {
+ return true;
+ }
+}
+
+
+/**
+ ** @desc: Returns whether or not an database error log file has changed in size since
+ ** the last time this was run. This is used to tell if a test caused a DB error.
+ */
+function dbErrorLogged() {
+ static $filesize;
+
+ // first time running this function
+ if (!isset($filesize)) {
+ // create log if it does not exist
+ if (!file_exists(DB_ERROR_LOG_FILE)) {
+ saveFile("", DB_ERROR_LOG_FILE);
+ }
+ $filesize = filesize(DB_ERROR_LOG_FILE);
+ return false;
+ }
+
+ $newsize = filesize(DB_ERROR_LOG_FILE);
+ // if the log has grown, then assume the current test caused it.
+ if ($newsize != $filesize) {
+ $filesize = $newsize;
+ return true;
+ }
+
+ return false;
+}
+
+////////////////// TOP-LEVEL PROBLEM-FINDING FUNCTION ////////////////////////
+
+/**
+ ** @desc: takes a page test, and runs it and tests it for problems in the output.
+ ** Returns: False on finding a problem, or True on no problems being found.
+ */
+function runWikiTest(pageTest $test, &$testname, $can_overwrite = false) {
+
+ // by default don't overwrite a previous test of the same name.
+ while ( ! $can_overwrite && file_exists(DIRECTORY . "/" . $testname . DATA_FILE)) {
+ $testname .= "-" . mt_rand(0,9);
+ }
+
+ $filename = DIRECTORY . "/" . $testname . DATA_FILE;
+
+ // Store the time before and after, to find slow pages.
+ $before = microtime(true);
+
+ // Get MediaWiki to give us the output of this test.
+ $wiki_preview = wikiTestOutput($test);
+
+ $after = microtime(true);
+
+ // if we received no response, then that's interesting.
+ if ($wiki_preview == "") {
+ print "\nNo response received for: $filename";
+ return false;
+ }
+
+ // save output HTML to file.
+ $html_file = DIRECTORY . "/" . $testname . HTML_FILE;
+ saveFile($wiki_preview, $html_file);
+
+ // if there were PHP errors in the output, then that's interesting too.
+ if ( strpos($wiki_preview, "<b>Warning</b>: " ) !== false
+ || strpos($wiki_preview, "<b>Fatal error</b>: ") !== false
+ || strpos($wiki_preview, "<b>Notice</b>: " ) !== false
+ || strpos($wiki_preview, "<b>Error</b>: " ) !== false ) {
+ $error = substr($wiki_preview, strpos($wiki_preview, "</b>:") + 7, 50);
+ // Avoid probable PHP bug with bad session ids; http://bugs.php.net/bug.php?id=38224
+ if ($error != "Unknown: The session id contains illegal character") {
+ print "\nPHP error/warning/notice in HTML output: $html_file ; $error";
+ return false;
+ }
+ }
+
+ // if there was a MediaWiki Backtrace message in the output, then that's also interesting.
+ if (strpos($wiki_preview, "Backtrace:") !== false) {
+ print "\nInternal MediaWiki error in HTML output: $html_file";
+ return false;
+ }
+
+ // if there was a Parser error comment in the output, then that's potentially interesting.
+ if (strpos($wiki_preview, "!-- ERR") !== false) {
+ print "\nParser Error comment in HTML output: $html_file";
+ return false;
+ }
+
+ // if a database error was logged, then that's definitely interesting.
+ if (dbErrorLogged()) {
+ print "\nDatabase Error logged for: $filename";
+ return false;
+ }
+
+ // validate result
+ $valid = true;
+ if (VALIDATE_ON_WEB) {
+ list ($valid, $validator_output) = validateHTML($wiki_preview);
+ if (!$valid) print "\nW3C web validation failed - view details with: html2text " . DIRECTORY . "/" . $testname . ".validator_output.html";
+ }
+
+ // Get tidy to check the page, unless it is a test which produces XML.
+ if (!$test instanceof trackbackTest && !$test instanceof specialExportTest) {
+ $valid = tidyCheckFile( $testname . HTML_FILE ) && $valid;
+ }
+
+ // if it took more than 2 seconds to render, then it may be interesting too. (Possible DoS attack?)
+ if (($after - $before) >= 2) {
+ print "\nParticularly slow to render (" . round($after - $before, 2) . " seconds): $filename";
+ return false;
+ }
+
+ if( $valid ) {
+ // Remove temp HTML file if test was valid:
+ unlink( $html_file );
+ } elseif( VALIDATE_ON_WEB ) {
+ saveFile($validator_output, DIRECTORY . "/" . $testname . ".validator_output.html");
+ }
+
+ return $valid;
+}
+
+
+/////////////////// RERUNNING OLD TESTS ///////////////////
+
+/**
+ ** @desc: We keep our failed tests so that they can be rerun.
+ ** This function does that retesting.
+ */
+function rerunPreviousTests() {
+ print "Retesting previously found problems.\n";
+
+ $dir_contents = scandir (DIRECTORY);
+
+ // sort file into the order a normal person would use.
+ natsort ($dir_contents);
+
+ foreach ($dir_contents as $file) {
+
+ // if file is not a test, then skip it.
+ // Note we need to escape any periods or will be treated as "any character".
+ $matches = array();
+ if (!ereg("(.*)" . str_replace(".", "\.", DATA_FILE) . "$", $file, $matches)) continue;
+
+ // reload the test.
+ $full_path = DIRECTORY . "/" . $file;
+ $test = unserialize(file_get_contents($full_path));
+
+ // if this is not a valid test, then skip it.
+ if (! $test instanceof pageTest) {
+ print "\nSkipping invalid test - $full_path";
+ continue;
+ }
+
+ // The date format is in Apache log format, which makes it easier to locate
+ // which retest caused which error in the Apache logs (only happens usually if
+ // apache segfaults).
+ if (!QUIET) print "[" . date ("D M d H:i:s Y") . "] Retesting $file (" . get_class($test) . ")";
+
+ // run test
+ $testname = $matches[1];
+ $valid = runWikiTest($test, $testname, true);
+
+ if (!$valid) {
+ saveTest($test, $testname);
+ if (QUIET) {
+ print "\nTest: " . get_class($test) . " ; Testname: $testname\n------";
+ } else {
+ print "\n";
+ }
+ }
+ else {
+ if (!QUIET) print "\r";
+ if (DELETE_PASSED_RETESTS) {
+ $prefix = DIRECTORY . "/" . $testname;
+ if (is_file($prefix . DATA_FILE)) unlink($prefix . DATA_FILE);
+ if (is_file($prefix . PHP_TEST )) unlink($prefix . PHP_TEST );
+ if (is_file($prefix . CURL_TEST)) unlink($prefix . CURL_TEST);
+ if (is_file($prefix . INFO_FILE)) unlink($prefix . INFO_FILE);
+ }
+ }
+ }
+
+ print "\nDone retesting.\n";
+}
+
+
+////////////////////// MAIN LOOP ////////////////////////
+
+
+// first check whether CURL is installed, because sometimes it's not.
+if( ! function_exists('curl_init') ) {
+ die("Could not find 'curl_init' function. Is the curl extension compiled into PHP?\n");
+}
+
+// Initialization of types. wikiFuzz doesn't have a constructor because we want to
+// access it staticly and not have any globals.
+wikiFuzz::$types = array_keys(wikiFuzz::$data);
+
+// Make directory if doesn't exist
+if (!is_dir(DIRECTORY)) {
+ mkdir (DIRECTORY, 0700 );
+}
+// otherwise, we first retest the things that we have found in previous runs
+else if (RERUN_OLD_TESTS) {
+ rerunPreviousTests();
+}
+
+// seed the random number generator
+mt_srand(crc32(microtime()));
+
+// main loop.
+$start_time = date("U");
+$num_errors = 0;
+if (!QUIET) print "Beginning main loop. Results are stored in the " . DIRECTORY . " directory.\n";
+if (!QUIET) print "Press CTRL+C to stop testing.\n";
+
+for ($count=0; true; $count++) {
+ if (!QUIET) {
+ // spinning progress indicator.
+ switch( $count % 4 ) {
+ case '0': print "\r/"; break;
+ case '1': print "\r-"; break;
+ case '2': print "\r\\"; break;
+ case '3': print "\r|"; break;
+ }
+ print " $count";
+ }
+
+ // generate a page test to run.
+ $test = selectPageTest($count);
+
+ $mins = ( date("U") - $start_time ) / 60;
+ if (!QUIET && $mins > 0) {
+ print ". $num_errors poss errors. "
+ . floor($mins) . " mins. "
+ . round ($count / $mins, 0) . " tests/min. "
+ . get_class($test); // includes the current test name.
+ }
+
+ // run this test against MediaWiki, and see if the output was valid.
+ $testname = $count;
+ $valid = runWikiTest($test, $testname, false);
+
+ // save the failed test
+ if (!$valid) {
+ if (QUIET) {
+ print "\nTest: " . get_class($test) . " ; Testname: $testname\n------";
+ } else {
+ print "\n";
+ }
+ saveTest($test, $testname);
+ $num_errors += 1;
+ }
+
+ // stop if we have reached max number of errors.
+ if (defined("MAX_ERRORS") && $num_errors>=MAX_ERRORS) {
+ break;
+ }
+
+ // stop if we have reached max number of mins runtime.
+ if (defined("MAX_RUNTIME") && $mins>=MAX_RUNTIME) {
+ break;
+ }
+}
+
+?>
diff --git a/maintenance/generateSitemap.php b/maintenance/generateSitemap.php
index 2cf8312a..a0b6979d 100644
--- a/maintenance/generateSitemap.php
+++ b/maintenance/generateSitemap.php
@@ -145,7 +145,7 @@ class GenerateSitemap {
* @param bool $compress Whether to compress the sitemap files
*/
function GenerateSitemap( $fspath, $path, $compress ) {
- global $wgDBname, $wgScriptPath;
+ global $wgScriptPath;
$this->url_limit = 50000;
$this->size_limit = pow( 2, 20 ) * 10;
@@ -157,7 +157,7 @@ class GenerateSitemap {
$this->dbr =& wfGetDB( DB_SLAVE );
$this->generateNamespaces();
$this->timestamp = wfTimestamp( TS_ISO_8601, wfTimestampNow() );
- $this->findex = fopen( "{$this->fspath}sitemap-index-$wgDBname.xml", 'wb' );
+ $this->findex = fopen( "{$this->fspath}sitemap-index-" . wfWikiID() . ".xml", 'wb' );
}
/**
@@ -232,7 +232,7 @@ class GenerateSitemap {
* @access public
*/
function main() {
- global $wgDBname, $wgContLang;
+ global $wgContLang;
fwrite( $this->findex, $this->openIndex() );
@@ -314,11 +314,8 @@ class GenerateSitemap {
* @return string
*/
function sitemapFilename( $namespace, $count ) {
- global $wgDBname;
-
$ext = $this->compress ? '.gz' : '';
-
- return "sitemap-$wgDBname-NS_$namespace-$count.xml$ext";
+ return "sitemap-".wfWikiID()."-NS_$namespace-$count.xml$ext";
}
/**
diff --git a/maintenance/importImages.php b/maintenance/importImages.php
index 925c64b7..2cf8bd19 100644
--- a/maintenance/importImages.php
+++ b/maintenance/importImages.php
@@ -26,13 +26,25 @@ if( count( $args ) > 1 ) {
$files = findFiles( $dir, $exts );
# Set up a fake user for this operation
- $wgUser = User::newFromName( 'Image import script' );
- $wgUser->setLoaded( true );
+ if( isset( $options['user'] ) ) {
+ $wgUser = User::newFromName( $options['user'] );
+ } else {
+ $wgUser = User::newFromName( 'Image import script' );
+ $wgUser->setLoaded( true );
+ }
+
+ # Get the upload comment
+ $comment = isset( $options['comment'] )
+ ? $options['comment']
+ : 'Importing image file';
+
+ # Get the license specifier
+ $license = isset( $options['license'] ) ? $options['license'] : '';
# Batch "upload" operation
foreach( $files as $file ) {
- $base = basename( $file );
+ $base = wfBaseName( $file );
# Validate a title
$title = Title::makeTitleSafe( NS_IMAGE, $base );
@@ -59,7 +71,7 @@ if( count( $args ) > 1 ) {
$image->loadFromFile();
# Record the upload
- if( $image->recordUpload( '', 'Importing image file' ) ) {
+ if( $image->recordUpload( '', $comment, $license ) ) {
# We're done!
echo( "done.\n" );
@@ -92,9 +104,18 @@ exit();
function showUsage( $reason = false ) {
if( $reason )
echo( $reason . "\n" );
- echo( "USAGE: php importImages.php <dir> <ext1> <ext2>\n\n" );
- echo( "<dir> : Path to the directory containing images to be imported\n" );
- echo( "<ext1+> File extensions to import\n\n" );
+ echo <<<END
+USAGE: php importImages.php [options] <dir> <ext1> ...
+
+<dir> : Path to the directory containing images to be imported
+<ext1+> File extensions to import
+
+Options:
+--user=<username> Set username of uploader, default 'Image import script'
+--comment=<text> Set upload summary comment, default 'Importing image file'
+--license=<code> Use an optional license template
+
+END;
exit();
}
diff --git a/maintenance/installExtension.php b/maintenance/installExtension.php
new file mode 100644
index 00000000..f6b2dff4
--- /dev/null
+++ b/maintenance/installExtension.php
@@ -0,0 +1,642 @@
+<?php
+/**
+ * Copyright (C) 2006 Daniel Kinzler, brightbyte.de
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @package MediaWiki
+ * @subpackage Maintenance
+ */
+
+$optionsWithArgs = array( 'target', 'repository', 'repos' );
+
+require_once( 'commandLine.inc' );
+
+define('EXTINST_NOPATCH', 0);
+define('EXTINST_WRITEPATCH', 6);
+define('EXTINST_HOTPATCH', 10);
+
+class InstallerRepository {
+ var $path;
+
+ function InstallerRepository( $path ) {
+ $this->path = $path;
+ }
+
+ function printListing( ) {
+ trigger_error( 'override InstallerRepository::printListing()', E_USER_ERROR );
+ }
+
+ function getResource( $name ) {
+ trigger_error( 'override InstallerRepository::getResource()', E_USER_ERROR );
+ }
+
+ /*static*/ function makeRepository( $path, $type = NULL ) {
+ if ( !$type ) {
+ preg_match( '!(([-+\w]+)://)?.*?(\.[-\w\d.]+)?$!', $path, $m );
+ $proto = @$m[2];
+
+ if( !$proto ) $type = 'dir';
+ else if ( ( $proto == 'http' || $proto == 'https' )
+ && preg_match( '!([^\w]svn|svn[^\w])!i', $path) ) $type = 'svn'; #HACK!
+ else $type = $proto;
+ }
+
+ if ( $type == 'dir' || $type == 'file' ) return new LocalInstallerRepository( $path );
+ else if ( $type == 'http' || $type == 'http' ) return new WebInstallerRepository( $path );
+ else return new SVNInstallerRepository( $path );
+ }
+}
+
+class LocalInstallerRepository extends InstallerRepository {
+
+ function LocalInstallerRepository ( $path ) {
+ InstallerRepository::InstallerRepository( $path );
+ }
+
+ function printListing( ) {
+ $ff = glob( "{$this->path}/*" );
+ if ( $ff === false || $ff === NULL ) {
+ ExtensionInstaller::error( "listing directory $repos failed!" );
+ return false;
+ }
+
+ foreach ( $ff as $f ) {
+ $n = basename($f);
+
+ if ( !is_dir( $f ) ) {
+ if ( !preg_match( '/(.*)\.(tgz|tar\.gz|zip)/', $n, $m ) ) continue;
+ $n = $m[1];
+ }
+
+ print "\t$n\n";
+ }
+ }
+
+ function getResource( $name ) {
+ $path = $this->path . '/' . $name;
+
+ if ( !file_exists( $path ) || !is_dir( $path ) ) $path = $this->path . '/' . $name . '.tgz';
+ if ( !file_exists( $path ) ) $path = $this->path . '/' . $name . '.tar.gz';
+ if ( !file_exists( $path ) ) $path = $this->path . '/' . $name . '.zip';
+
+ return new LocalInstallerResource( $path );
+ }
+}
+
+class WebInstallerRepository extends InstallerRepository {
+
+ function WebInstallerRepository ( $path ) {
+ InstallerRepository::InstallerRepository( $path );
+ }
+
+ function printListing( ) {
+ ExtensionInstaller::note( "listing index from {$this->path}..." );
+
+ $txt = @file_get_contents( $this->path . '/index.txt' );
+ if ( $txt ) {
+ print $txt;
+ print "\n";
+ }
+ else {
+ $txt = file_get_contents( $this->path );
+ if ( !$txt ) {
+ ExtensionInstaller::error( "listing index from {$this->path} failed!" );
+ print ( $txt );
+ return false;
+ }
+
+ $ok = preg_match_all( '!<a\s[^>]*href\s*=\s*['."'".'"]([^/'."'".'"]+)\.tgz['."'".'"][^>]*>.*?</a>!si', $txt, $m, PREG_SET_ORDER );
+ if ( !$ok ) {
+ ExtensionInstaller::error( "listing index from {$this->path} does not match!" );
+ print ( $txt );
+ return false;
+ }
+
+ foreach ( $m as $l ) {
+ $n = $l[1];
+ print "\t$n\n";
+ }
+ }
+ }
+
+ function getResource( $name ) {
+ $path = $this->path . '/' . $name . '.tgz';
+ return new WebInstallerResource( $path );
+ }
+}
+
+class SVNInstallerRepository extends InstallerRepository {
+
+ function SVNInstallerRepository ( $path ) {
+ InstallerRepository::InstallerRepository( $path );
+ }
+
+ function printListing( ) {
+ ExtensionInstaller::note( "SVN list {$this->path}..." );
+ $txt = wfShellExec( 'svn ls ' . escapeshellarg( $this->path ), $code );
+ if ( $code !== 0 ) {
+ ExtensionInstaller::error( "svn list for {$this->path} failed!" );
+ return false;
+ }
+
+ $ll = preg_split('/(\s*[\r\n]\s*)+/', $txt);
+
+ foreach ( $ll as $line ) {
+ if ( !preg_match('!^(.*)/$!', $line, $m) ) continue;
+ $n = $m[1];
+
+ print "\t$n\n";
+ }
+ }
+
+ function getResource( $name ) {
+ $path = $this->path . '/' . $name;
+ return new SVNInstallerResource( $path );
+ }
+}
+
+class InstallerResource {
+ var $path;
+ var $isdir;
+ var $islocal;
+
+ function InstallerResource( $path, $isdir, $islocal ) {
+ $this->path = $path;
+
+ $this->isdir= $isdir;
+ $this->islocal = $islocal;
+
+ preg_match( '!([-+\w]+://)?.*?(\.[-\w\d.]+)?$!', $path, $m );
+
+ $this->protocol = @$m[1];
+ $this->extensions = @$m[2];
+
+ if ( $this->extensions ) $this->extensions = strtolower( $this->extensions );
+ }
+
+ function fetch( $target ) {
+ trigger_error( 'override InstallerResource::fetch()', E_USER_ERROR );
+ }
+
+ function extract( $file, $target ) {
+
+ if ( $this->extensions == '.tgz' || $this->extensions == '.tar.gz' ) { #tgz file
+ ExtensionInstaller::note( "extracting $file..." );
+ wfShellExec( 'tar zxvf ' . escapeshellarg( $file ) . ' -C ' . escapeshellarg( $target ), $code );
+
+ if ( $code !== 0 ) {
+ ExtensionInstaller::error( "failed to extract $file!" );
+ return false;
+ }
+ }
+ else if ( $this->extensions == '.zip' ) { #zip file
+ ExtensionInstaller::note( "extracting $file..." );
+ wfShellExec( 'unzip ' . escapeshellarg( $file ) . ' -d ' . escapeshellarg( $target ) , $code );
+
+ if ( $code !== 0 ) {
+ ExtensionInstaller::error( "failed to extract $file!" );
+ return false;
+ }
+ }
+ else {
+ ExtensionInstaller::error( "unknown extension {$this->extensions}!" );
+ return false;
+ }
+
+ return true;
+ }
+
+ /*static*/ function makeResource( $url ) {
+ preg_match( '!(([-+\w]+)://)?.*?(\.[-\w\d.]+)?$!', $url, $m );
+ $proto = @$m[2];
+ $ext = @$m[3];
+ if ( $ext ) $ext = strtolower( $ext );
+
+ if ( !$proto ) return new LocalInstallerResource( $url, $ext ? false : true );
+ else if ( $ext && ( $proto == 'http' || $proto == 'http' || $proto == 'ftp' ) ) return new WebInstallerResource( $url );
+ else return new SVNInstallerResource( $url );
+ }
+}
+
+class LocalInstallerResource extends InstallerResource {
+ function LocalInstallerResource( $path ) {
+ InstallerResource::InstallerResource( $path, is_dir( $path ), true );
+ }
+
+ function fetch( $target ) {
+ if ( $this->isdir ) return ExtensionInstaller::copyDir( $this->path, dirname( $target ) );
+ else return $this->extract( $this->path, dirname( $target ) );
+ }
+
+}
+
+class WebInstallerResource extends InstallerResource {
+ function WebInstallerResource( $path ) {
+ InstallerResource::InstallerResource( $path, false, false );
+ }
+
+ function fetch( $target ) {
+ $tmp = wfTempDir() . '/' . basename( $this->path );
+
+ ExtensionInstaller::note( "downloading {$this->path}..." );
+ $ok = copy( $this->path, $tmp );
+
+ if ( !$ok ) {
+ ExtensionInstaller::error( "failed to download {$this->path}" );
+ return false;
+ }
+
+ $this->extract( $tmp, dirname( $target ) );
+ unlink($tmp);
+
+ return true;
+ }
+}
+
+class SVNInstallerResource extends InstallerResource {
+ function SVNInstallerResource( $path ) {
+ InstallerResource::InstallerResource( $path, true, false );
+ }
+
+ function fetch( $target ) {
+ ExtensionInstaller::note( "SVN checkout of {$this->path}..." );
+ wfShellExec( 'svn co ' . escapeshellarg( $this->path ) . ' ' . escapeshellarg( $target ), $code );
+
+ if ( $code !== 0 ) {
+ ExtensionInstaller::error( "checkout failed for {$this->path}!" );
+ return false;
+ }
+
+ return true;
+ }
+}
+
+class ExtensionInstaller {
+ var $source;
+ var $target;
+ var $name;
+ var $dir;
+ var $tasks;
+
+ function ExtensionInstaller( $name, $source, $target ) {
+ if ( !is_object( $source ) ) $source = InstallerResource::makeResource( $source );
+
+ $this->name = $name;
+ $this->source = $source;
+ $this->target = realpath( $target );
+ $this->extdir = "$target/extensions";
+ $this->dir = "{$this->extdir}/$name";
+ $this->incpath = "extensions/$name";
+ $this->tasks = array();
+
+ #TODO: allow a subdir different from "extensions"
+ #TODO: allow a config file different from "LocalSettings.php"
+ }
+
+ function note( $msg ) {
+ print "$msg\n";
+ }
+
+ function warn( $msg ) {
+ print "WARNING: $msg\n";
+ }
+
+ function error( $msg ) {
+ print "ERROR: $msg\n";
+ }
+
+ function prompt( $msg ) {
+ if ( function_exists( 'readline' ) ) {
+ $s = readline( $msg );
+ }
+ else {
+ if ( !@$this->stdin ) $this->stdin = fopen( 'php://stdin', 'r' );
+ if ( !$this->stdin ) die( "Failed to open stdin for user interaction!\n" );
+
+ print $msg;
+ flush();
+
+ $s = fgets( $this->stdin );
+ }
+
+ $s = trim( $s );
+ return $s;
+ }
+
+ function confirm( $msg ) {
+ while ( true ) {
+ $s = $this->prompt( $msg . " [yes/no]: ");
+ $s = strtolower( trim($s) );
+
+ if ( $s == 'yes' || $s == 'y' ) return true;
+ else if ( $s == 'no' || $s == 'n' ) return false;
+ else print "bad response: $s\n";
+ }
+ }
+
+ function deleteContents( $dir ) {
+ $ff = glob( $dir . "/*" );
+ if ( !$ff ) return;
+
+ foreach ( $ff as $f ) {
+ if ( is_dir( $f ) && !is_link( $f ) ) $this->deleteContents( $f );
+ unlink( $f );
+ }
+ }
+
+ function copyDir( $dir, $tgt ) {
+ $d = $tgt . '/' . basename( $dir );
+
+ if ( !file_exists( $d ) ) {
+ $ok = mkdir( $d );
+ if ( !$ok ) {
+ ExtensionInstaller::error( "failed to create director $d" );
+ return false;
+ }
+ }
+
+ $ff = glob( $dir . "/*" );
+ if ( $ff === false || $ff === NULL ) return false;
+
+ foreach ( $ff as $f ) {
+ if ( is_dir( $f ) && !is_link( $f ) ) {
+ $ok = ExtensionInstaller::copyDir( $f, $d );
+ if ( !$ok ) return false;
+ }
+ else {
+ $t = $d . '/' . basename( $f );
+ $ok = copy( $f, $t );
+
+ if ( !$ok ) {
+ ExtensionInstaller::error( "failed to copy $f to $t" );
+ return false;
+ }
+ }
+ }
+
+ return true;
+ }
+
+ function setPermissions( $dir, $dirbits, $filebits ) {
+ if ( !chmod( $dir, $dirbits ) ) ExtensionInstaller::warn( "faield to set permissions for $dir" );
+
+ $ff = glob( $dir . "/*" );
+ if ( $ff === false || $ff === NULL ) return false;
+
+ foreach ( $ff as $f ) {
+ $n= basename( $f );
+ if ( $n{0} == '.' ) continue; #HACK: skip dot files
+
+ if ( is_link( $f ) ) continue; #skip link
+
+ if ( is_dir( $f ) ) {
+ ExtensionInstaller::setPermissions( $f, $dirbits, $filebits );
+ }
+ else {
+ if ( !chmod( $f, $filebits ) ) ExtensionInstaller::warn( "faield to set permissions for $f" );
+ }
+ }
+
+ return true;
+ }
+
+ function fetchExtension( ) {
+ if ( $this->source->islocal && $this->source->isdir && realpath( $this->source->path ) === $this->dir ) {
+ $this->note( "files are already in the extension dir" );
+ return true;
+ }
+
+ if ( file_exists( $this->dir ) && glob( $this->dir . "/*" ) ) {
+ if ( $this->confirm( "{$this->dir} exists and is not empty.\nDelete all files in that directory?" ) ) {
+ $this->deleteContents( $this->dir );
+ }
+ else {
+ return false;
+ }
+ }
+
+ $ok = $this->source->fetch( $this->dir );
+ if ( !$ok ) return false;
+
+ if ( !file_exists( $this->dir ) && glob( $this->dir . "/*" ) ) {
+ $this->error( "{$this->dir} does not exist or is empty. Something went wrong, sorry." );
+ return false;
+ }
+
+ if ( file_exists( $this->dir . '/README' ) ) $this->tasks[] = "read the README file in {$this->dir}";
+ if ( file_exists( $this->dir . '/INSTALL' ) ) $this->tasks[] = "read the INSTALL file in {$this->dir}";
+ if ( file_exists( $this->dir . '/RELEASE-NOTES' ) ) $this->tasks[] = "read the RELEASE-NOTES file in {$this->dir}";
+
+ #TODO: configure this smartly...?
+ $this->setPermissions( $this->dir, 0755, 0644 );
+
+ $this->note( "fetched extension to {$this->dir}" );
+ return true;
+ }
+
+ function patchLocalSettings( $mode ) {
+ #NOTE: if we get a better way to hook up extensions, that should be used instead.
+
+ $f = $this->dir . '/install.settings';
+ $t = $this->target . '/LocalSettings.php';
+
+ #TODO: assert version ?!
+ #TODO: allow custom installer scripts + sql patches
+
+ if ( !file_exists( $f ) ) {
+ $this->warn( "No install.settings file provided!" );
+ $this->tasks[] = "Please read the instructions and edit LocalSettings.php manually to activate the extension.";
+ return '?';
+ }
+ else {
+ $this->note( "applying settings patch..." );
+ }
+
+ $settings = file_get_contents( $f );
+
+ if ( !$settings ) {
+ $this->error( "failed to read settings from $f!" );
+ return false;
+ }
+
+ $settings = str_replace( '{{path}}', $this->incpath, $settings );
+
+ if ( $mode == EXTINST_NOPATCH ) {
+ $this->tasks[] = "Please put the following into your LocalSettings.php:" . "\n$settings\n";
+ $this->note( "Skipping patch phase, automatic patching is off." );
+ return true;
+ }
+
+ if ( $mode == EXTINST_HOTPATCH ) {
+ #NOTE: keep php extension for backup file!
+ $bak = $this->target . '/LocalSettings.install-' . $this->name . '-' . wfTimestamp(TS_MW) . '.bak.php';
+
+ $ok = copy( $t, $bak );
+
+ if ( !$ok ) {
+ $this->warn( "failed to create backup of LocalSettings.php!" );
+ return false;
+ }
+ else {
+ $this->note( "created backup of LocalSettings.php at $bak" );
+ }
+ }
+
+ $localsettings = file_get_contents( $t );
+
+ if ( !$settings ) {
+ $this->error( "failed to read $t for patching!" );
+ return false;
+ }
+
+ $marker = "<@< extension {$this->name} >@>";
+ $blockpattern = "/\n\s*#\s*BEGIN\s*$marker.*END\s*$marker\s*/smi";
+
+ if ( preg_match( $blockpattern, $localsettings ) ) {
+ $localsettings = preg_replace( $blockpattern, "\n", $localsettings );
+ $this->warn( "removed old configuration block for extension {$this->name}!" );
+ }
+
+ $newblock= "\n# BEGIN $marker\n$settings\n# END $marker\n";
+
+ $localsettings = preg_replace( "/\?>\s*$/si", "$newblock?>", $localsettings );
+
+ if ( $mode != EXTINST_HOTPATCH ) {
+ $t = $this->target . '/LocalSettings.install-' . $this->name . '-' . wfTimestamp(TS_MW) . '.php';
+ }
+
+ $ok = file_put_contents( $t, $localsettings );
+
+ if ( !$ok ) {
+ $this->error( "failed to patch $t!" );
+ return false;
+ }
+ else if ( $mode == EXTINST_HOTPATCH ) {
+ $this->note( "successfully patched $t" );
+ }
+ else {
+ $this->note( "created patched settings file $t" );
+ $this->tasks[] = "Replace your current LocalSettings.php with ".basename($t);
+ }
+
+ return true;
+ }
+
+ function printNotices( ) {
+ if ( !$this->tasks ) {
+ $this->note( "Installation is complete, no pending tasks" );
+ }
+ else {
+ $this->note( "" );
+ $this->note( "PENDING TASKS:" );
+ $this->note( "" );
+
+ foreach ( $this->tasks as $t ) {
+ $this->note ( "* " . $t );
+ }
+
+ $this->note( "" );
+ }
+
+ return true;
+ }
+
+}
+
+$tgt = isset ( $options['target'] ) ? $options['target'] : $IP;
+
+$repos = @$options['repository'];
+if ( !$repos ) $repos = @$options['repos'];
+if ( !$repos ) $repos = @$wgExtensionInstallerRepository;
+
+if ( !$repos && file_exists("$tgt/.svn") && is_dir("$tgt/.svn") ) {
+ $svn = file_get_contents( "$tgt/.svn/entries" );
+
+ if ( preg_match( '!url="(.*?)"!', $svn, $m ) ) {
+ $repos = dirname( $m[1] ) . '/extensions';
+ }
+}
+
+if ( !$repos ) $repos = 'http://svn.wikimedia.org/svnroot/mediawiki/trunk/extensions';
+
+if( !isset( $args[0] ) && !@$options['list'] ) {
+ die( "USAGE: installExtension.php [options] <name> [source]\n" .
+ "OPTIONS: \n" .
+ " --list list available extensions. <name> is ignored / may be omitted.\n" .
+ " --repository <n> repository to fetch extensions from. May be a local directoy,\n" .
+ " an SVN repository or a HTTP directory\n" .
+ " --target <dir> mediawiki installation directory to use\n" .
+ " --nopatch don't create a patched LocalSettings.php\n" .
+ " --hotpatch patched LocalSettings.php directly (creates a backup)\n" .
+ "SOURCE: specifies the package source directly. If given, the repository is ignored.\n" .
+ " The source my be a local file (tgz or zip) or directory, the URL of a\n" .
+ " remote file (tgz or zip), or a SVN path.\n"
+ );
+}
+
+$repository = InstallerRepository::makeRepository( $repos );
+
+if ( isset( $options['list'] ) ) {
+ $repository->printListing();
+ exit(0);
+}
+
+$name = $args[0];
+
+$src = isset( $args[1] ) ? $args[1] : $repository->getResource( $name );
+
+#TODO: detect $source mismatching $name !!
+
+$mode = EXTINST_WRITEPATCH;
+if ( isset( $options['nopatch'] ) || @$wgExtensionInstallerNoPatch ) $mode = EXTINST_NOPATCH;
+else if ( isset( $options['hotpatch'] ) || @$wgExtensionInstallerHotPatch ) $mode = EXTINST_HOTPATCH;
+
+if ( !file_exists( "$tgt/LocalSettings.php" ) ) {
+ die("can't find $tgt/LocalSettings.php\n");
+}
+
+if ( $mode == EXTINST_HOTPATCH && !is_writable( "$tgt/LocalSettings.php" ) ) {
+ die("can't write to $tgt/LocalSettings.php\n");
+}
+
+if ( !file_exists( "$tgt/extensions" ) ) {
+ die("can't find $tgt/extensions\n");
+}
+
+if ( !is_writable( "$tgt/extensions" ) ) {
+ die("can't write to $tgt/extensions\n");
+}
+
+$installer = new ExtensionInstaller( $name, $src, $tgt );
+
+$installer->note( "Installing extension {$installer->name} from {$installer->source->path} to {$installer->dir}" );
+
+print "\n";
+print "\tTHIS TOOL IS EXPERIMENTAL!\n";
+print "\tEXPECT THE UNEXPECTED!\n";
+print "\n";
+
+if ( !$installer->confirm("continue") ) die("aborted\n");
+
+$ok = $installer->fetchExtension();
+
+if ( $ok ) $ok = $installer->patchLocalSettings( $mode );
+
+if ( $ok ) $ok = $installer->printNotices();
+
+if ( $ok ) $installer->note( "$name extension installed." );
+?>
diff --git a/maintenance/language/alltrans.php b/maintenance/language/alltrans.php
new file mode 100644
index 00000000..f8db9c0d
--- /dev/null
+++ b/maintenance/language/alltrans.php
@@ -0,0 +1,16 @@
+<?php
+/**
+ * @package MediaWiki
+ * @subpackage Maintenance
+ *
+ * Get all the translations messages, as defined in the English language file.
+ */
+
+require_once( dirname(__FILE__).'/../commandLine.inc' );
+
+$wgEnglishMessages = array_keys( Language::getMessagesFor( 'en' ) );
+foreach( $wgEnglishMessages as $key ) {
+ echo "$key\n";
+}
+
+?>
diff --git a/maintenance/language/checkLanguage.php b/maintenance/language/checkLanguage.php
new file mode 100644
index 00000000..11c8ec92
--- /dev/null
+++ b/maintenance/language/checkLanguage.php
@@ -0,0 +1,177 @@
+<?php
+/**
+ * Check a language file.
+ *
+ * @package MediaWiki
+ * @subpackage Maintenance
+ */
+
+require_once( dirname(__FILE__).'/../commandLine.inc' );
+require_once( 'languages.inc' );
+
+/**
+ * Check a language.
+ *
+ * @param $code The language code.
+ */
+function checkLanguage( $code ) {
+ global $wgLanguages, $wgGeneralMessages, $wgRequiredMessagesNumber, $wgDisplayLevel, $wgLinks, $wgWikiLanguage, $wgChecks;
+
+ # Get messages
+ $messages = $wgLanguages->getMessages( $code );
+ $messagesNumber = count( $messages['translated'] );
+
+ # Skip the checks if specified
+ if ( $wgDisplayLevel == 0 ) {
+ return;
+ }
+
+ # Untranslated messages
+ if ( in_array( 'untranslated', $wgChecks ) ) {
+ $untranslatedMessages = $wgLanguages->getUntranslatedMessages( $code );
+ $untranslatedMessagesNumber = count( $untranslatedMessages );
+ $wgLanguages->outputMessagesList( $untranslatedMessages, $code, "\n$untranslatedMessagesNumber messages of $wgRequiredMessagesNumber are not translated to $code, but exist in en:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+
+ # Duplicate messages
+ if ( in_array( 'duplicate', $wgChecks ) ) {
+ $duplicateMessages = $wgLanguages->getDuplicateMessages( $code );
+ $duplicateMessagesNumber = count( $duplicateMessages );
+ $wgLanguages->outputMessagesList( $duplicateMessages, $code, "\n$duplicateMessagesNumber messages of $messagesNumber are translated the same in en and $code:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+
+ # Obsolete messages
+ if ( in_array( 'obsolete', $wgChecks ) ) {
+ $obsoleteMessages = $messages['obsolete'];
+ $obsoleteMessagesNumber = count( $obsoleteMessages );
+ $wgLanguages->outputMessagesList( $obsoleteMessages, $code, "\n$obsoleteMessagesNumber messages of $messagesNumber are not exist in en (or are in the ignored list), but still exist in $code:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+
+ # Messages without variables
+ if ( in_array( 'variables', $wgChecks ) ) {
+ $messagesWithoutVariables = $wgLanguages->getMessagesWithoutVariables( $code );
+ $messagesWithoutVariablesNumber = count( $messagesWithoutVariables );
+ $wgLanguages->outputMessagesList( $messagesWithoutVariables, $code, "\n$messagesWithoutVariablesNumber messages of $messagesNumber in $code don't use some variables while en uses them:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+
+ # Empty messages
+ if ( in_array( 'empty', $wgChecks ) ) {
+ $emptyMessages = $wgLanguages->getEmptyMessages( $code );
+ $emptyMessagesNumber = count( $emptyMessages );
+ $wgLanguages->outputMessagesList( $emptyMessages, $code, "\n$emptyMessagesNumber messages of $messagesNumber in $code are empty or -:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+
+ # Messages with whitespace
+ if ( in_array( 'whitespace', $wgChecks ) ) {
+ $messagesWithWhitespace = $wgLanguages->getMessagesWithWhitespace( $code );
+ $messagesWithWhitespaceNumber = count( $messagesWithWhitespace );
+ $wgLanguages->outputMessagesList( $messagesWithWhitespace, $code, "\n$messagesWithWhitespaceNumber messages of $messagesNumber in $code have a trailing whitespace:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+
+ # Non-XHTML messages
+ if ( in_array( 'xhtml', $wgChecks ) ) {
+ $nonXHTMLMessages = $wgLanguages->getNonXHTMLMessages( $code );
+ $nonXHTMLMessagesNumber = count( $nonXHTMLMessages );
+ $wgLanguages->outputMessagesList( $nonXHTMLMessages, $code, "\n$nonXHTMLMessagesNumber messages of $messagesNumber in $code are not well-formed XHTML:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+
+ # Messages with wrong characters
+ if ( in_array( 'chars', $wgChecks ) ) {
+ $messagesWithWrongChars = $wgLanguages->getMessagesWithWrongChars( $code );
+ $messagesWithWrongCharsNumber = count( $messagesWithWrongChars );
+ $wgLanguages->outputMessagesList( $messagesWithWrongChars, $code, "\n$messagesWithWrongCharsNumber messages of $messagesNumber in $code include hidden chars which should not be used in the messages:", $wgDisplayLevel, $wgLinks, $wgWikiLanguage );
+ }
+}
+
+# Show help
+if ( isset( $options['help'] ) ) {
+ echo <<<END
+Run this script to check a specific language file, or all of them.
+Parameters:
+ * lang: Language code (default: the installation default language). You can also specify "all" to check all the languages.
+ * help: Show this help.
+ * level: Show the following level (default: 2).
+ * links: Link the message values (default off).
+ * wikilang: For the links, what is the content language of the wiki to display the output in (default en).
+ * whitelist: Make only the following checks (form: code,code).
+ * blacklist: Don't make the following checks (form: code,code).
+ * duplicate: Additionally check for messages which are translated the same to English (default off).
+ * noexif: Don't check for EXIF messages (a bit hard and boring to translate), if you know that they are currently not translated and want to focus on other problems (default off).
+Check codes (ideally, all of them should result 0; all the checks are executed by default):
+ * untranslated: Messages which are required to translate, but are not translated.
+ * obsolete: Messages which are untranslatable, but translated.
+ * variables: Messages without variables which should be used.
+ * empty: Empty messages.
+ * whitespace: Messages which have trailing whitespace.
+ * xhtml: Messages which are not well-formed XHTML.
+ * chars: Messages with hidden characters.
+Display levels (default: 2):
+ * 0: Skip the checks (useful for checking syntax).
+ * 1: Show only the stub headers and number of wrong messages, without list of messages.
+ * 2: Show only the headers and the message keys, without the message values.
+ * 3: Show both the headers and the complete messages, with both keys and values.
+
+END;
+ exit();
+}
+
+# Get the language code
+if ( isset( $options['lang'] ) ) {
+ $wgCode = $options['lang'];
+} else {
+ $wgCode = $wgContLang->getCode();
+}
+
+# Get the display level
+if ( isset( $options['level'] ) ) {
+ $wgDisplayLevel = $options['level'];
+} else {
+ $wgDisplayLevel = 2;
+}
+
+# Get the links options
+$wgLinks = isset( $options['links'] );
+$wgWikiLanguage = isset( $options['wikilang'] ) ? $options['wikilang'] : 'en';
+
+# Get the checks to do
+$wgChecks = array( 'untranslated', 'obsolete', 'variables', 'empty', 'whitespace', 'xhtml', 'chars' );
+if ( isset( $options['whitelist'] ) ) {
+ $wgChecks = explode( ',', $options['whitelist'] );
+} elseif ( isset( $options['blacklist'] ) ) {
+ $wgChecks = array_diff( $wgChecks, explode( ',', $options['blacklist'] ) );
+}
+
+# Add duplicate option if specified
+if ( isset( $options['duplicate'] ) ) {
+ $wgChecks[] = 'duplicate';
+}
+
+# Should check for EXIF?
+$wgCheckEXIF = !isset( $options['noexif'] );
+
+# Get language objects
+$wgLanguages = new languages( $wgCheckEXIF );
+
+# Get the general messages
+$wgGeneralMessages = $wgLanguages->getGeneralMessages();
+$wgRequiredMessagesNumber = count( $wgGeneralMessages['required'] );
+
+# Check the language
+if ( $wgCode == 'all' ) {
+ foreach ( $wgLanguages->getLanguages() as $language ) {
+ if ( $language != 'en' && $language != 'enRTL' ) {
+ checkLanguage( $language );
+ }
+ }
+} else {
+ # Can't check English
+ if ( $wgCode == 'en' ) {
+ echo "Current selected language is English, which cannot be checked.\n";
+ } else if ( $wgCode == 'enRTL' ) {
+ echo "Current selected language is RTL English, which cannot be checked.\n";
+ } else {
+ checkLanguage( $wgCode );
+ }
+}
+
+?>
diff --git a/maintenance/language/checktrans.php b/maintenance/language/checktrans.php
new file mode 100644
index 00000000..a5772d47
--- /dev/null
+++ b/maintenance/language/checktrans.php
@@ -0,0 +1,44 @@
+<?php
+/**
+ * @package MediaWiki
+ * @subpackage Maintenance
+ * Check to see if all messages have been translated into the selected language.
+ * To run this script, you must have a working installation, and you can specify
+ * a language, or the script will check the installation language.
+ */
+
+/** */
+require_once(dirname(__FILE__).'/../commandLine.inc');
+
+if ( isset( $args[0] ) ) {
+ $code = $args[0];
+} else {
+ $code = $wgLang->getCode();
+}
+
+if ( $code == 'en' ) {
+ print "Current selected language is English. Cannot check translations.\n";
+ exit();
+}
+
+$filename = Language::getMessagesFileName( $code );
+if ( file_exists( $filename ) ) {
+ require( $filename );
+} else {
+ $messages = array();
+}
+
+$count = $total = 0;
+$wgEnglishMessages = Language::getMessagesFor( 'en' );
+$wgLocalMessages = $messages;
+
+foreach ( $wgEnglishMessages as $key => $msg ) {
+ ++$total;
+ if ( !isset( $wgLocalMessages[$key] ) ) {
+ print "'{$key}' => \"$msg\",\n";
+ ++$count;
+ }
+}
+
+print "{$count} messages of {$total} are not translated in the language {$code}.\n";
+?>
diff --git a/maintenance/language/date-formats.php b/maintenance/language/date-formats.php
new file mode 100644
index 00000000..962c2f8c
--- /dev/null
+++ b/maintenance/language/date-formats.php
@@ -0,0 +1,45 @@
+<?php
+
+$ts = '20010115123456';
+
+
+$IP = dirname( __FILE__ ) . '/../..';
+require_once( "$IP/maintenance/commandLine.inc" );
+
+foreach ( glob( "$IP/languages/messages/Messages*.php" ) as $filename ) {
+ $base = basename( $filename );
+ if ( !preg_match( '/Messages(.*)\.php$/', $base, $m ) ) {
+ continue;
+ }
+ $code = str_replace( '_', '-', strtolower( $m[1] ) );
+ print "$code ";
+ $lang = Language::factory( $code );
+ $prefs = $lang->getDatePreferences();
+ if ( !$prefs ) {
+ $prefs = array( 'default' );
+ }
+ print "date: ";
+ foreach ( $prefs as $index => $pref ) {
+ if ( $index > 0 ) {
+ print ' | ';
+ }
+ print $lang->date( $ts, false, $pref );
+ }
+ print "\n$code time: ";
+ foreach ( $prefs as $index => $pref ) {
+ if ( $index > 0 ) {
+ print ' | ';
+ }
+ print $lang->time( $ts, false, $pref );
+ }
+ print "\n$code both: ";
+ foreach ( $prefs as $index => $pref ) {
+ if ( $index > 0 ) {
+ print ' | ';
+ }
+ print $lang->timeanddate( $ts, false, $pref );
+ }
+ print "\n\n";
+}
+
+?>
diff --git a/maintenance/language/diffLanguage.php b/maintenance/language/diffLanguage.php
new file mode 100644
index 00000000..2aaa5902
--- /dev/null
+++ b/maintenance/language/diffLanguage.php
@@ -0,0 +1,159 @@
+<?php
+# MediaWiki web-based config/installation
+# Copyright (C) 2004 Ashar Voultoiz <thoane@altern.org> and others
+# http://www.mediawiki.org/
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+# http://www.gnu.org/copyleft/gpl.html
+
+/**
+ * Usage: php DiffLanguage.php [lang [file]]
+ *
+ * lang: Enter the language code following "Language" of the LanguageXX.php you
+ * want to check. If using linux you might need to follow case aka Zh and not
+ * zh.
+ *
+ * file: A php language file you want to include to compare mediawiki
+ * Language{Lang}.php against (for example Special:Allmessages PHP output).
+ *
+ * The goal is to get a list of messages not yet localised in a languageXX.php
+ * file using the language.php file as reference.
+ *
+ * The script then print a list of wgAllMessagesXX keys that aren't localised, a
+ * percentage of messages correctly localised and the number of messages to be
+ * translated.
+ *
+ * @package MediaWiki
+ * @subpackage Maintenance
+ */
+
+/** This script run from the commandline */
+require_once( dirname(__FILE__).'/../parserTests.inc' );
+require_once( dirname(__FILE__).'/../commandLine.inc' );
+
+if( isset($options['help']) ) { usage(); wfDie(); }
+
+$wgLanguageCode = ucfirstlcrest($wgLanguageCode);
+/** Language messages we will use as reference. By default 'en' */
+$referenceMessages = $wgAllMessagesEn;
+$referenceLanguage = 'En';
+$referenceFilename = 'Language'.$referenceLanguage.'.php';
+/** Language messages we will test. */
+$testMessages = array();
+$testLanguage = '';
+/** whereas we use an external language file */
+$externalRef = false;
+
+# FUNCTIONS
+/** @todo more informations !! */
+function usage() {
+echo 'php DiffLanguage.php [lang [file]] [--color=(yes|no|light)]'."\n";
+}
+
+/** Return a given string with first letter upper case, the rest lowercase */
+function ucfirstlcrest($string) {
+ return strtoupper(substr($string,0,1)).strtolower(substr($string,1));
+}
+
+/**
+ * Return a $wgAllmessages array shipped in MediaWiki
+ * @param string $languageCode Formated language code
+ * @return array The MediaWiki default $wgAllMessages array requested
+ */
+function getMediawikiMessages($languageCode = 'En') {
+
+ $foo = "wgAllMessages$languageCode";
+ global $$foo, $wgSkinNamesEn;
+
+ // it might already be loaded in LocalSettings.php
+ if(!isset($$foo)) {
+ global $IP;
+ $langFile = $IP.'/languages/classes/Language'.$languageCode.'.php';
+ if (file_exists( $langFile ) ) {
+ print "Including $langFile\n";
+ global $wgNamespaceNamesEn;
+ include($langFile);
+ } else wfDie("ERROR: The file $langFile does not exist !\n");
+ }
+ return $$foo;
+}
+
+/**
+ * Return a $wgAllmessages array in a given file. Language of the array
+ * need to be given cause we can not detect which language it provides
+ * @param string $filename Filename of the file containing a message array
+ * @param string $languageCode Language of the external array
+ * @return array A $wgAllMessages array from an external file.
+ */
+function getExternalMessages($filename, $languageCode) {
+ print "Including external file $filename.\n";
+ include($filename);
+ $foo = "wgAllMessages$languageCode";
+ return $$foo;
+}
+
+# MAIN ENTRY
+if ( isset($args[0]) ) {
+ $lang = ucfirstlcrest($args[0],1);
+
+ // eventually against another language file we will use as reference instead
+ // of the default english language.
+ if( isset($args[1])) {
+ // we assume the external file contain an array of messages for the
+ // lang we are testing
+ $referenceMessages = getExternalMessages( $args[1], $lang );
+ $referenceLanguage = $lang;
+ $referenceFilename = $args[1];
+ $externalRef = true;
+ }
+
+ // Load datas from MediaWiki
+ $testMessages = getMediawikiMessages($lang);
+ $testLanguage = $lang;
+} else {
+ usage();
+ wfDie();
+}
+
+/** parsertest is used to do differences */
+$myParserTest = new ParserTest();
+
+# Get all references messages and check if they exist in the tested language
+$i = 0;
+
+$msg = "MW Language{$testLanguage}.php against ";
+if($externalRef) { $msg .= 'external file '; }
+else { $msg .= 'internal file '; }
+$msg .= $referenceFilename.' ('.$referenceLanguage."):\n----\n";
+echo $msg;
+
+// process messages
+foreach($referenceMessages as $index => $ref)
+{
+ // message is not localized
+ if(!(isset($testMessages[$index]))) {
+ $i++;
+ print "'$index' => \"$ref\",\n";
+ // Messages in the same language differs
+ } elseif( ($lang == $referenceLanguage) AND ($testMessages[$index] != $ref)) {
+ print "\n$index differs:\n";
+ print $myParserTest->quickDiff($testMessages[$index],$ref,'tested','reference');
+ }
+}
+
+echo "\n----\n".$msg;
+echo "$referenceLanguage language is complete at ".number_format((100 - $i/count($wgAllMessagesEn) * 100),2)."%\n";
+echo "$i unlocalised messages of the ".count($wgAllMessagesEn)." messages available.\n";
+?>
diff --git a/maintenance/language/dumpMessages.php b/maintenance/language/dumpMessages.php
new file mode 100644
index 00000000..bd7e2aed
--- /dev/null
+++ b/maintenance/language/dumpMessages.php
@@ -0,0 +1,20 @@
+<?php
+/**
+ * @todo document
+ * @package MediaWiki
+ * @subpackage Maintenance
+ */
+
+/** */
+require_once( dirname(__FILE__).'/../commandLine.inc' );
+$wgMessageCache->disableTransform();
+$messages = array();
+$wgEnglishMessages = array_keys( Language::getMessagesFor( 'en' ) );
+foreach ( $wgEnglishMessages as $key )
+{
+ $messages[$key] = wfMsg( $key );
+}
+print "MediaWiki $wgVersion language file\n";
+print serialize( $messages );
+
+?>
diff --git a/maintenance/language/duplicatetrans.php b/maintenance/language/duplicatetrans.php
new file mode 100644
index 00000000..9273ee6e
--- /dev/null
+++ b/maintenance/language/duplicatetrans.php
@@ -0,0 +1,43 @@
+<?php
+/**
+ * Prints out messages that are the same as the message with the corrisponding
+ * key in the English file
+ *
+ * @package MediaWiki
+ * @subpackage Maintenance
+ */
+
+require_once(dirname(__FILE__).'/../commandLine.inc');
+
+if ( isset( $args[0] ) ) {
+ $code = $args[0];
+} else {
+ $code = $wgLang->getCode();
+}
+
+if ( $code == 'en' ) {
+ print "Current selected language is English. Cannot check translations.\n";
+ exit();
+}
+
+$filename = Language::getMessagesFileName( $code );
+if ( file_exists( $filename ) ) {
+ require( $filename );
+} else {
+ $messages = array();
+}
+
+$count = $total = 0;
+$wgEnglishMessages = Language::getMessagesFor( 'en' );
+$wgLocalMessages = $messages;
+
+foreach ( $wgLocalMessages as $key => $msg ) {
+ ++$total;
+ if ( @$wgEnglishMessages[$key] == $msg ) {
+ echo "* $key\n";
+ ++$count;
+ }
+}
+
+echo "{$count} messages of {$total} are duplicates in the language {$code}\n";
+?>
diff --git a/maintenance/language/function-list.php b/maintenance/language/function-list.php
new file mode 100644
index 00000000..84efb29d
--- /dev/null
+++ b/maintenance/language/function-list.php
@@ -0,0 +1,44 @@
+<?php
+
+define( 'MEDIAWIKI', 1 );
+define( 'NOT_REALLY_MEDIAWIKI', 1 );
+
+class Language {}
+foreach ( glob( 'Language*.php' ) as $file ) {
+ if ( $file != 'Language.php' ) {
+ require_once( $file );
+ }
+}
+
+$removedFunctions = array( 'date', 'time', 'timeanddate', 'formatMonth', 'formatDay',
+ 'getMonthName', 'getMonthNameGen', 'getMonthAbbreviation', 'getWeekdayName',
+ 'userAdjust', 'dateFormat', 'timeSeparator', 'timeDateSeparator', 'timeBeforeDate',
+ 'monthByLatinNumber', 'getSpecialMonthName',
+
+ 'commafy'
+);
+
+$numRemoved = 0;
+$total = 0;
+$classes = get_declared_classes();
+ksort( $classes );
+foreach ( $classes as $class ) {
+ if ( !preg_match( '/^Language/', $class ) || $class == 'Language' || $class == 'LanguageConverter' ) {
+ continue;
+ }
+
+ print "$class\n";
+ $methods = get_class_methods( $class );
+ print_r( $methods );
+
+ if ( !count( array_diff( $methods, $removedFunctions ) ) ) {
+ print "removed\n";
+ $numRemoved++;
+ }
+ $total++;
+ print "\n";
+}
+
+print "$numRemoved will be removed out of $total\n";
+
+?>
diff --git a/maintenance/language/lang2po.php b/maintenance/language/lang2po.php
new file mode 100644
index 00000000..520d8d6e
--- /dev/null
+++ b/maintenance/language/lang2po.php
@@ -0,0 +1,154 @@
+<?php
+/**
+ * Convert Language files to .po files !
+ *
+ * Todo:
+ * - generate .po header
+ * - fix escaping of \
+ */
+
+/** This is a command line script */
+require_once(dirname(__FILE__).'/../commandLine.inc');
+require_once(dirname(__FILE__).'/languages.inc');
+
+define('ALL_LANGUAGES', true);
+define('XGETTEXT_BIN', 'xgettext');
+define('MSGMERGE_BIN', 'msgmerge');
+
+// used to generate the .pot
+define('XGETTEXT_OPTIONS', '-n --keyword=wfMsg --keyword=wfMsgForContent --keyword=wfMsgHtml --keyword=wfMsgWikiHtml ');
+define('MSGMERGE_OPTIONS', ' -v ');
+
+define('LOCALE_OUTPUT_DIR', $IP.'/locale');
+
+
+if( isset($options['help']) ) { usage(); wfDie(); }
+// default output is WikiText
+if( !isset($options['lang']) ) { $options['lang'] = ALL_LANGUAGES; }
+
+function usage() {
+print <<<END
+Usage: php lang2po.php [--help] [--lang=<langcode>] [--stdout]
+ --help: this message.
+ --lang: a lang code you want to generate a .po for (default: all languages).
+
+END;
+}
+
+
+/**
+ * Return a dummy header for later edition.
+ * @return string A dummy header
+ */
+function poHeader() {
+return
+'# SOME DESCRIPTIVE TITLE.
+# Copyright (C) 2005 MediaWiki
+# This file is distributed under the same license as the MediaWiki package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+#, fuzzy
+msgid ""
+msgstr ""
+"Project-Id-Version: PACKAGE VERSION\n"
+"Report-Msgid-Bugs-To: bugzilllaaaaa\n"
+"POT-Creation-Date: 2005-08-16 20:13+0200\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: VARIOUS <nobody>\n"
+"Language-Team: LANGUAGE <nobody>\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+';
+}
+
+/**
+ * generate and write a file in .po format.
+ *
+ * @param string $langcode Code of a language it will process.
+ * @param array &$messages Array containing the various messages.
+ * @return string Filename where stuff got saved or false.
+ */
+function generatePo($langcode, &$messages) {
+ $data = poHeader();
+
+ // Generate .po entries
+ foreach($messages as $identifier => $content) {
+ $data .= "msgid \"$identifier\"\n";
+
+ // Escape backslashes
+ $tmp = str_replace('\\', '\\\\', $content);
+ // Escape doublelquotes
+ $tmp = preg_replace( "/(?<!\\\\)\"/", '\"', $tmp);
+ // Rewrite multilines to gettext format
+ $tmp = str_replace("\n", "\"\n\"", $tmp);
+
+ $data .= 'msgstr "'. $tmp . "\"\n\n";
+ }
+
+ // Write the content to a file in locale/XX/messages.po
+ $dir = LOCALE_OUTPUT_DIR.'/'.$langcode;
+ if( !is_dir($dir) ) { mkdir( $dir, 0770 ); }
+ $filename = $dir.'/fromlanguagefile.po';
+
+ $file = fopen( $filename , 'wb' );
+ if( fwrite( $file, $data ) ) {
+ fclose( $file );
+ return $filename;
+ } else {
+ fclose( $file );
+ return false;
+ }
+}
+
+function generatePot() {
+ global $IP;
+ $curdir = getcwd();
+ chdir($IP);
+ exec( XGETTEXT_BIN
+ .' '.XGETTEXT_OPTIONS
+ .' -o '.LOCALE_OUTPUT_DIR.'/wfMsg.pot'
+ .' includes/*php'
+ );
+ chdir($curdir);
+}
+
+function applyPot($langcode) {
+ $langdir = LOCALE_OUTPUT_DIR.'/'.$langcode;
+
+ $from = $langdir.'/fromlanguagefile.po';
+ $pot = LOCALE_OUTPUT_DIR.'/wfMsg.pot';
+ $dest = $langdir.'/messages.po';
+
+ // Merge template and generate file to get final .po
+ exec(MSGMERGE_BIN.MSGMERGE_OPTIONS." $from $pot -o $dest ");
+ // delete no more needed file
+// unlink($from);
+}
+
+// Generate a template .pot based on source tree
+echo "Getting 'gettext' default messages from sources:";
+generatePot();
+echo "done.\n";
+
+
+$langTool = new languages();
+
+// Do all languages
+foreach ( $langTool->getMessages() as $langcode) {
+ echo "Loading messages for $langcode:\t";
+ require_once( Language::getClassFileName( $langcode ) );
+ $arr = 'wgAllMessages'.$langcode;
+ if(!@is_array($$arr)) {
+ echo "NONE FOUND\n";
+ } else {
+ echo "ok\n";
+ if( ! generatePo($langcode, $$arr) ) {
+ echo "ERROR: Failed to wrote file.\n";
+ } else {
+ echo "Applying template:";
+ applyPot($langcode);
+ }
+ }
+}
+?>
diff --git a/maintenance/language/langmemusage.php b/maintenance/language/langmemusage.php
new file mode 100644
index 00000000..974bb0d8
--- /dev/null
+++ b/maintenance/language/langmemusage.php
@@ -0,0 +1,30 @@
+<?php
+/**
+ * Dumb program that tries to get the memory usage
+ * for each language file.
+ */
+
+/** This is a command line script */
+require_once(dirname(__FILE__).'/../commandLine.inc');
+require_once(dirname(__FILE__).'/languages.inc');
+
+$langtool = new languages();
+
+if ( ! function_exists( 'memory_get_usage' ) )
+ wfDie( "You must compile PHP with --enable-memory-limit\n" );
+
+$memlast = $memstart = memory_get_usage();
+
+print 'Base memory usage: '.$memstart."\n";
+
+foreach ( $langtool->getLanguages() as $langcode ) {
+ require_once( Language::getClassFileName( $langcode ) );
+ $memstep = memory_get_usage();
+ printf( "%12s: %d\n", $langcode, ($memstep- $memlast) );
+ $memlast = $memstep;
+}
+
+$memend = memory_get_usage();
+
+echo ' Total Usage: '.($memend - $memstart)."\n";
+?>
diff --git a/maintenance/language/languages.inc b/maintenance/language/languages.inc
new file mode 100644
index 00000000..86cd0869
--- /dev/null
+++ b/maintenance/language/languages.inc
@@ -0,0 +1,686 @@
+<?php
+/**
+ * Handle messages in the language files.
+ *
+ * @package MediaWiki
+ * @subpackage Maintenance
+ */
+
+class languages {
+ private $mLanguages; # List of languages
+ private $mRawMessages; # Raw list of the messages in each language
+ private $mMessages; # Messages in each language (except for English), divided to groups
+ private $mGeneralMessages; # General messages in English, divided to groups
+ private $mIgnoredMessages = array(
+ 'sidebar',
+ 'addsection',
+ 'anonnotice',
+ 'catseparator',
+ 'googlesearch',
+ 'exif-make-value',
+ 'exif-model-value',
+ 'exif-software-value',
+ 'history_copyright',
+ 'licenses',
+ 'loginend',
+ 'loginlanguagelinks',
+ 'markaspatrolledlink',
+ 'newarticletextanon',
+ 'noarticletextanon',
+ 'number_of_watching_users_RCview',
+ 'pubmedurl',
+ 'randompage-url',
+ 'recentchanges-url',
+ 'rfcurl',
+ 'shareddescriptionfollows',
+ 'signupend',
+ 'sitenotice',
+ 'sitesubtitle',
+ 'sitetitle',
+ 'talkpagetext',
+ 'trackback',
+ 'trackbackexcerpt',
+ 'widthheight',
+ ); # All the messages which should be exist only in the English file
+ private $mOptionalMessages = array(
+ 'imgmultigotopost',
+ 'linkprefix',
+ 'allpages-summary',
+ 'booksources-summary',
+ 'ipblocklist-summary',
+ 'listusers-summary',
+ 'longpages-summary',
+ 'preferences-summary',
+ 'specialpages-summary',
+ 'whatlinkshere-summary',
+ 'whatlinkshere-barrow',
+ 'imagelist-summary',
+ 'mimesearch-summary',
+ 'listredirects-summary',
+ 'uncategorizedpages-summary',
+ 'uncategorizedcategories-summary',
+ 'uncategorizedimages-summary',
+ 'popularpages-summary',
+ 'wantedcategories-summary',
+ 'wantedpages-summary',
+ 'mostlinked-summary',
+ 'mostlinkedcategories-summary',
+ 'mostcategories-summary',
+ 'mostimages-summary',
+ 'mostrevisions-summary',
+ 'prefixindex-summary',
+ 'shortpages-summary',
+ 'newpages-summary',
+ 'ancientpages-summary',
+ 'newimages-summary',
+ 'unwatchedpages-summary',
+ 'userrights-summary',
+ 'variantname-zh-cn',
+ 'variantname-zh-tw',
+ 'variantname-zh-hk',
+ 'variantname-zh-sg',
+ 'variantname-zh',
+ 'variantname-sr-ec',
+ 'variantname-sr-el',
+ 'variantname-sr-jc',
+ 'variantname-sr-jl',
+ 'variantname-sr',
+ 'variantname-kk-tr',
+ 'variantname-kk-kz',
+ 'variantname-kk-cn',
+ 'variantname-kk',
+ ); # All the messages which may be translated or not, depending on the language
+ private $mEXIFMessages = array(
+ 'exif-imagewidth',
+ 'exif-imagelength',
+ 'exif-bitspersample',
+ 'exif-compression',
+ 'exif-photometricinterpretation',
+ 'exif-orientation',
+ 'exif-samplesperpixel',
+ 'exif-planarconfiguration',
+ 'exif-ycbcrsubsampling',
+ 'exif-ycbcrpositioning',
+ 'exif-xresolution',
+ 'exif-yresolution',
+ 'exif-resolutionunit',
+ 'exif-stripoffsets',
+ 'exif-rowsperstrip',
+ 'exif-stripbytecounts',
+ 'exif-jpeginterchangeformat',
+ 'exif-jpeginterchangeformatlength',
+ 'exif-transferfunction',
+ 'exif-whitepoint',
+ 'exif-primarychromaticities',
+ 'exif-ycbcrcoefficients',
+ 'exif-referenceblackwhite',
+ 'exif-datetime',
+ 'exif-imagedescription',
+ 'exif-make',
+ 'exif-model',
+ 'exif-software',
+ 'exif-artist',
+ 'exif-copyright',
+ 'exif-exifversion',
+ 'exif-flashpixversion',
+ 'exif-colorspace',
+ 'exif-componentsconfiguration',
+ 'exif-compressedbitsperpixel',
+ 'exif-pixelydimension',
+ 'exif-pixelxdimension',
+ 'exif-makernote',
+ 'exif-usercomment',
+ 'exif-relatedsoundfile',
+ 'exif-datetimeoriginal',
+ 'exif-datetimedigitized',
+ 'exif-subsectime',
+ 'exif-subsectimeoriginal',
+ 'exif-subsectimedigitized',
+ 'exif-exposuretime',
+ 'exif-exposuretime-format',
+ 'exif-fnumber',
+ 'exif-fnumber-format',
+ 'exif-exposureprogram',
+ 'exif-spectralsensitivity',
+ 'exif-isospeedratings',
+ 'exif-oecf',
+ 'exif-shutterspeedvalue',
+ 'exif-aperturevalue',
+ 'exif-brightnessvalue',
+ 'exif-exposurebiasvalue',
+ 'exif-maxaperturevalue',
+ 'exif-subjectdistance',
+ 'exif-meteringmode',
+ 'exif-lightsource',
+ 'exif-flash',
+ 'exif-focallength',
+ 'exif-focallength-format',
+ 'exif-subjectarea',
+ 'exif-flashenergy',
+ 'exif-spatialfrequencyresponse',
+ 'exif-focalplanexresolution',
+ 'exif-focalplaneyresolution',
+ 'exif-focalplaneresolutionunit',
+ 'exif-subjectlocation',
+ 'exif-exposureindex',
+ 'exif-sensingmethod',
+ 'exif-filesource',
+ 'exif-scenetype',
+ 'exif-cfapattern',
+ 'exif-customrendered',
+ 'exif-exposuremode',
+ 'exif-whitebalance',
+ 'exif-digitalzoomratio',
+ 'exif-focallengthin35mmfilm',
+ 'exif-scenecapturetype',
+ 'exif-gaincontrol',
+ 'exif-contrast',
+ 'exif-saturation',
+ 'exif-sharpness',
+ 'exif-devicesettingdescription',
+ 'exif-subjectdistancerange',
+ 'exif-imageuniqueid',
+ 'exif-gpsversionid',
+ 'exif-gpslatituderef',
+ 'exif-gpslatitude',
+ 'exif-gpslongituderef',
+ 'exif-gpslongitude',
+ 'exif-gpsaltituderef',
+ 'exif-gpsaltitude',
+ 'exif-gpstimestamp',
+ 'exif-gpssatellites',
+ 'exif-gpsstatus',
+ 'exif-gpsmeasuremode',
+ 'exif-gpsdop',
+ 'exif-gpsspeedref',
+ 'exif-gpsspeed',
+ 'exif-gpstrackref',
+ 'exif-gpstrack',
+ 'exif-gpsimgdirectionref',
+ 'exif-gpsimgdirection',
+ 'exif-gpsmapdatum',
+ 'exif-gpsdestlatituderef',
+ 'exif-gpsdestlatitude',
+ 'exif-gpsdestlongituderef',
+ 'exif-gpsdestlongitude',
+ 'exif-gpsdestbearingref',
+ 'exif-gpsdestbearing',
+ 'exif-gpsdestdistanceref',
+ 'exif-gpsdestdistance',
+ 'exif-gpsprocessingmethod',
+ 'exif-gpsareainformation',
+ 'exif-gpsdatestamp',
+ 'exif-gpsdifferential',
+ 'exif-compression-1',
+ 'exif-compression-6',
+ 'exif-photometricinterpretation-2',
+ 'exif-photometricinterpretation-6',
+ 'exif-orientation-1',
+ 'exif-orientation-2',
+ 'exif-orientation-3',
+ 'exif-orientation-4',
+ 'exif-orientation-5',
+ 'exif-orientation-6',
+ 'exif-orientation-7',
+ 'exif-orientation-8',
+ 'exif-planarconfiguration-1',
+ 'exif-planarconfiguration-2',
+ 'exif-xyresolution-i',
+ 'exif-xyresolution-c',
+ 'exif-colorspace-1',
+ 'exif-colorspace-ffff.h',
+ 'exif-componentsconfiguration-0',
+ 'exif-componentsconfiguration-1',
+ 'exif-componentsconfiguration-2',
+ 'exif-componentsconfiguration-3',
+ 'exif-componentsconfiguration-4',
+ 'exif-componentsconfiguration-5',
+ 'exif-componentsconfiguration-6',
+ 'exif-exposureprogram-0',
+ 'exif-exposureprogram-1',
+ 'exif-exposureprogram-2',
+ 'exif-exposureprogram-3',
+ 'exif-exposureprogram-4',
+ 'exif-exposureprogram-5',
+ 'exif-exposureprogram-6',
+ 'exif-exposureprogram-7',
+ 'exif-exposureprogram-8',
+ 'exif-subjectdistance-value',
+ 'exif-meteringmode-0',
+ 'exif-meteringmode-1',
+ 'exif-meteringmode-2',
+ 'exif-meteringmode-3',
+ 'exif-meteringmode-4',
+ 'exif-meteringmode-5',
+ 'exif-meteringmode-6',
+ 'exif-meteringmode-255',
+ 'exif-lightsource-0',
+ 'exif-lightsource-1',
+ 'exif-lightsource-2',
+ 'exif-lightsource-3',
+ 'exif-lightsource-4',
+ 'exif-lightsource-9',
+ 'exif-lightsource-10',
+ 'exif-lightsource-11',
+ 'exif-lightsource-12',
+ 'exif-lightsource-13',
+ 'exif-lightsource-14',
+ 'exif-lightsource-15',
+ 'exif-lightsource-17',
+ 'exif-lightsource-18',
+ 'exif-lightsource-19',
+ 'exif-lightsource-20',
+ 'exif-lightsource-21',
+ 'exif-lightsource-22',
+ 'exif-lightsource-23',
+ 'exif-lightsource-24',
+ 'exif-lightsource-255',
+ 'exif-focalplaneresolutionunit-2',
+ 'exif-sensingmethod-1',
+ 'exif-sensingmethod-2',
+ 'exif-sensingmethod-3',
+ 'exif-sensingmethod-4',
+ 'exif-sensingmethod-5',
+ 'exif-sensingmethod-7',
+ 'exif-sensingmethod-8',
+ 'exif-filesource-3',
+ 'exif-scenetype-1',
+ 'exif-customrendered-0',
+ 'exif-customrendered-1',
+ 'exif-exposuremode-0',
+ 'exif-exposuremode-1',
+ 'exif-exposuremode-2',
+ 'exif-whitebalance-0',
+ 'exif-whitebalance-1',
+ 'exif-scenecapturetype-0',
+ 'exif-scenecapturetype-1',
+ 'exif-scenecapturetype-2',
+ 'exif-scenecapturetype-3',
+ 'exif-gaincontrol-0',
+ 'exif-gaincontrol-1',
+ 'exif-gaincontrol-2',
+ 'exif-gaincontrol-3',
+ 'exif-gaincontrol-4',
+ 'exif-contrast-0',
+ 'exif-contrast-1',
+ 'exif-contrast-2',
+ 'exif-saturation-0',
+ 'exif-saturation-1',
+ 'exif-saturation-2',
+ 'exif-sharpness-0',
+ 'exif-sharpness-1',
+ 'exif-sharpness-2',
+ 'exif-subjectdistancerange-0',
+ 'exif-subjectdistancerange-1',
+ 'exif-subjectdistancerange-2',
+ 'exif-subjectdistancerange-3',
+ 'exif-gpslatitude-n',
+ 'exif-gpslatitude-s',
+ 'exif-gpslongitude-e',
+ 'exif-gpslongitude-w',
+ 'exif-gpsstatus-a',
+ 'exif-gpsstatus-v',
+ 'exif-gpsmeasuremode-2',
+ 'exif-gpsmeasuremode-3',
+ 'exif-gpsspeed-k',
+ 'exif-gpsspeed-m',
+ 'exif-gpsspeed-n',
+ 'exif-gpsdirection-t',
+ 'exif-gpsdirection-m',
+ ); # All the EXIF messages, may be set as optional if defined as such
+
+ /**
+ * Load the list of languages: all the Messages*.php
+ * files in the languages directory.
+ *
+ * @param $exif Treat the EXIF messages?
+ */
+ function __construct( $exif = true ) {
+ $this->mLanguages = array_keys( Language::getLanguageNames( true ) );
+ sort( $this->mLanguages );
+ if ( !$exif ) {
+ $this->mOptionalMessages = array_merge( $this->mOptionalMessages, $this->mEXIFMessages );
+ }
+ }
+
+ /**
+ * Get the language list.
+ *
+ * @return The language list.
+ */
+ public function getLanguages() {
+ return $this->mLanguages;
+ }
+
+ /**
+ * Load the raw messages for a specific langauge from the messages file.
+ *
+ * @param $code The langauge code.
+ */
+ private function loadRawMessages( $code ) {
+ if ( isset( $this->mRawMessages[$code] ) ) {
+ return;
+ }
+ $filename = Language::getMessagesFileName( $code );
+ if ( file_exists( $filename ) ) {
+ require( $filename );
+ if ( isset( $messages ) ) {
+ $this->mRawMessages[$code] = $messages;
+ } else {
+ $this->mRawMessages[$code] = array();
+ }
+ } else {
+ $this->mRawMessages[$code] = array();
+ }
+ }
+
+ /**
+ * Load the messages for a specific language (which is not English) and divide them to groups:
+ * all - all the messages.
+ * required - messages which should be translated in order to get a complete translation.
+ * optional - messages which can be translated, the fallback translation is used if not translated.
+ * obsolete - messages which should not be translated, either because they are not exist, or they are ignored messages.
+ * translated - messages which are either required or optional, but translated from English and needed.
+ *
+ * @param $code The language code.
+ */
+ private function loadMessages( $code ) {
+ if ( isset( $this->mMessages[$code] ) ) {
+ return;
+ }
+ $this->loadRawMessages( $code );
+ $this->loadGeneralMessages();
+ $this->mMessages[$code]['all'] = $this->mRawMessages[$code];
+ $this->mMessages[$code]['required'] = array();
+ $this->mMessages[$code]['optional'] = array();
+ $this->mMessages[$code]['obsolete'] = array();
+ $this->mMessages[$code]['translated'] = array();
+ foreach ( $this->mMessages[$code]['all'] as $key => $value ) {
+ if ( isset( $this->mGeneralMessages['required'][$key] ) ) {
+ $this->mMessages[$code]['required'][$key] = $value;
+ $this->mMessages[$code]['translated'][$key] = $value;
+ } else if ( isset( $this->mGeneralMessages['optional'][$key] ) ) {
+ $this->mMessages[$code]['optional'][$key] = $value;
+ $this->mMessages[$code]['translated'][$key] = $value;
+ } else {
+ $this->mMessages[$code]['obsolete'][$key] = $value;
+ }
+ }
+ }
+
+ /**
+ * Load the messages for English and divide them to groups:
+ * all - all the messages.
+ * required - messages which should be translated to other languages in order to get a complete translation.
+ * optional - messages which can be translated to other languages, but it's not required for a complete translation.
+ * ignored - messages which should not be translated to other languages.
+ * translatable - messages which are either required or optional, but can be translated from English.
+ */
+ private function loadGeneralMessages() {
+ if ( isset( $this->mGeneralMessages ) ) {
+ return;
+ }
+ $this->loadRawMessages( 'en' );
+ $this->mGeneralMessages['all'] = $this->mRawMessages['en'];
+ $this->mGeneralMessages['required'] = array();
+ $this->mGeneralMessages['optional'] = array();
+ $this->mGeneralMessages['ignored'] = array();
+ $this->mGeneralMessages['translatable'] = array();
+ foreach ( $this->mGeneralMessages['all'] as $key => $value ) {
+ if ( in_array( $key, $this->mIgnoredMessages ) ) {
+ $this->mGeneralMessages['ignored'][$key] = $value;
+ } else if ( in_array( $key, $this->mOptionalMessages ) ) {
+ $this->mGeneralMessages['optional'][$key] = $value;
+ $this->mGeneralMessages['translatable'][$key] = $value;
+ } else {
+ $this->mGeneralMessages['required'][$key] = $value;
+ $this->mGeneralMessages['translatable'][$key] = $value;
+ }
+ }
+ }
+
+ /**
+ * Get all the messages for a specific langauge (not English), without the
+ * fallback language messages, divided to groups:
+ * all - all the messages.
+ * required - messages which should be translated in order to get a complete translation.
+ * optional - messages which can be translated, the fallback translation is used if not translated.
+ * obsolete - messages which should not be translated, either because they are not exist, or they are ignored messages.
+ * translated - messages which are either required or optional, but translated from English and needed.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The messages in this language.
+ */
+ public function getMessages( $code ) {
+ $this->loadMessages( $code );
+ return $this->mMessages[$code];
+ }
+
+ /**
+ * Get all the general English messages, divided to groups:
+ * all - all the messages.
+ * required - messages which should be translated to other languages in order to get a complete translation.
+ * optional - messages which can be translated to other languages, but it's not required for a complete translation.
+ * ignored - messages which should not be translated to other languages.
+ * translatable - messages which are either required or optional, but can be translated from English.
+ *
+ * @return The general English messages.
+ */
+ public function getGeneralMessages() {
+ $this->loadGeneralMessages();
+ return $this->mGeneralMessages;
+ }
+
+ /**
+ * Get the untranslated messages for a specific language.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The untranslated messages for this language.
+ */
+ public function getUntranslatedMessages( $code ) {
+ $this->loadGeneralMessages();
+ $this->loadMessages( $code );
+ $requiredGeneralMessages = array_keys( $this->mGeneralMessages['required'] );
+ $requiredMessages = array_keys( $this->mMessages[$code]['required'] );
+ $untranslatedMessages = array();
+ foreach ( array_diff( $requiredGeneralMessages, $requiredMessages ) as $key ) {
+ $untranslatedMessages[$key] = $this->mGeneralMessages['required'][$key];
+ }
+ return $untranslatedMessages;
+ }
+
+ /**
+ * Get the duplicate messages for a specific language.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The duplicate messages for this language.
+ */
+ public function getDuplicateMessages( $code ) {
+ $this->loadGeneralMessages();
+ $this->loadMessages( $code );
+ $duplicateMessages = array();
+ foreach ( $this->mMessages[$code]['translated'] as $key => $value ) {
+ if ( $this->mGeneralMessages['translatable'][$key] == $value ) {
+ $duplicateMessages[$key] = $value;
+ }
+ }
+ return $duplicateMessages;
+ }
+
+ /**
+ * Get the messages which do not use some variables.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The messages which do not use some variables in this language.
+ */
+ public function getMessagesWithoutVariables( $code ) {
+ $this->loadGeneralMessages();
+ $this->loadMessages( $code );
+ $variables = array( '\$1', '\$2', '\$3', '\$4', '\$5', '\$6', '\$7', '\$8', '\$9' );
+ $messagesWithoutVariables = array();
+ foreach ( $this->mMessages[$code]['translated'] as $key => $value ) {
+ $missing = false;
+ foreach ( $variables as $var ) {
+ if ( preg_match( "/$var/sU", $this->mGeneralMessages['translatable'][$key] ) &&
+ !preg_match( "/$var/sU", $value ) ) {
+ $missing = true;
+ }
+ }
+ if ( $missing ) {
+ $messagesWithoutVariables[$key] = $value;
+ }
+ }
+ return $messagesWithoutVariables;
+ }
+
+ /**
+ * Get the empty messages.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The empty messages for this language.
+ */
+ public function getEmptyMessages( $code ) {
+ $this->loadGeneralMessages();
+ $this->loadMessages( $code );
+ $emptyMessages = array();
+ foreach ( $this->mMessages[$code]['translated'] as $key => $value ) {
+ if ( $value === '' || $value === '-' ) {
+ $emptyMessages[$key] = $value;
+ }
+ }
+ return $emptyMessages;
+ }
+
+ /**
+ * Get the messages with trailing whitespace.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The messages with trailing whitespace in this language.
+ */
+ public function getMessagesWithWhitespace( $code ) {
+ $this->loadGeneralMessages();
+ $this->loadMessages( $code );
+ $messagesWithWhitespace = array();
+ foreach ( $this->mMessages[$code]['translated'] as $key => $value ) {
+ if ( $this->mGeneralMessages['translatable'][$key] !== '' && $value !== rtrim( $value ) ) {
+ $messagesWithWhitespace[$key] = $value;
+ }
+ }
+ return $messagesWithWhitespace;
+ }
+
+ /**
+ * Get the non-XHTML messages.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The non-XHTML messages for this language.
+ */
+ public function getNonXHTMLMessages( $code ) {
+ $this->loadGeneralMessages();
+ $this->loadMessages( $code );
+ $wrongPhrases = array(
+ '<hr *\\?>',
+ '<br *\\?>',
+ '<hr/>',
+ '<br/>',
+ );
+ $wrongPhrases = '~(' . implode( '|', $wrongPhrases ) . ')~sDu';
+ $nonXHTMLMessages = array();
+ foreach ( $this->mMessages[$code]['translated'] as $key => $value ) {
+ if ( preg_match( $wrongPhrases, $value ) ) {
+ $nonXHTMLMessages[$key] = $value;
+ }
+ }
+ return $nonXHTMLMessages;
+ }
+
+ /**
+ * Get the messages which include wrong characters.
+ *
+ * @param $code The langauge code.
+ *
+ * @return The messages which include wrong characters in this language.
+ */
+ public function getMessagesWithWrongChars( $code ) {
+ $this->loadGeneralMessages();
+ $this->loadMessages( $code );
+ $wrongChars = array(
+ '[LRM]' => "\xE2\x80\x8E",
+ '[RLM]' => "\xE2\x80\x8F",
+ '[LRE]' => "\xE2\x80\xAA",
+ '[RLE]' => "\xE2\x80\xAB",
+ '[POP]' => "\xE2\x80\xAC",
+ '[LRO]' => "\xE2\x80\xAD",
+ '[RLO]' => "\xE2\x80\xAB",
+ '[ZWSP]'=> "\xE2\x80\x8B",
+ '[NBSP]'=> "\xC2\xA0",
+ '[WJ]' => "\xE2\x81\xA0",
+ '[BOM]' => "\xEF\xBB\xBF",
+ '[FFFD]'=> "\xEF\xBF\xBD",
+ );
+ $wrongRegExp = '/(' . implode( '|', array_values( $wrongChars ) ) . ')/sDu';
+ $wrongCharsMessages = array();
+ foreach ( $this->mMessages[$code]['translated'] as $key => $value ) {
+ if ( preg_match( $wrongRegExp, $value ) ) {
+ foreach ( $wrongChars as $viewableChar => $hiddenChar ) {
+ $value = str_replace( $hiddenChar, $viewableChar, $value );
+ }
+ $wrongCharsMessages[$key] = $value;
+ }
+ }
+ return $wrongCharsMessages;
+ }
+
+ /**
+ * Output a messages list
+ *
+ * @param $messages The messages list
+ * @param $code The language code
+ * @param $text The text to show before the list (optional)
+ * @param $level The display level (optional)
+ * @param $links Show links (optional)
+ * @param $wikilang The langauge of the wiki to display the list in, for the links (optional)
+ */
+ public function outputMessagesList( $messages, $code, $text = '', $level = 2, $links = false, $wikilang = null ) {
+ if ( count( $messages ) == 0 ) {
+ return;
+ }
+ if ( $text ) {
+ echo "$text\n";
+ }
+ if ( $level == 1 ) {
+ echo "[messages are hidden]\n";
+ } else {
+ foreach ( $messages as $key => $value ) {
+ if ( $links ) {
+ $displayKey = ucfirst( $key );
+ if ( !isset( $wikilang ) ) {
+ global $wgContLang;
+ $wikilang = $wgContLang->getCode();
+ }
+ if ( $code == $wikilang ) {
+ $displayKey = "[[MediaWiki:$displayKey|$key]]";
+ } else {
+ $displayKey = "[[MediaWiki:$displayKey/$code|$key]]";
+ }
+ } else {
+ $displayKey = $key;
+ }
+ if ( $level == 2 ) {
+ echo "* $displayKey\n";
+ } else {
+ echo "* $displayKey: '$value'\n";
+ }
+ }
+ }
+ }
+}
+
+?>
diff --git a/maintenance/language/splitLanguageFiles.inc b/maintenance/language/splitLanguageFiles.inc
new file mode 100644
index 00000000..c2500778
--- /dev/null
+++ b/maintenance/language/splitLanguageFiles.inc
@@ -0,0 +1,1168 @@
+<?php
+/**
+ * This is an experimental list. It will later be used with a script to split
+ * the languages files in several parts then the message system will only load
+ * in memory the parts which are actually needed.
+ *
+ * Generated using: grep -r foobar *
+ *
+ * $commonMsg is the default array. Other arrays will only be loaded if needed.
+ */
+$installerMsg = array (
+'mainpagetext',
+'mainpagedocfooter',
+);
+
+$ActionMsg = array (
+'delete' => array(
+ 'delete',
+ 'deletethispage',
+ 'undelete_short1',
+ 'undelete_short',
+ 'undelete',
+ 'undeletepage',
+ 'undeletepagetext',
+ 'undeletearticle',
+ 'undeleterevisions',
+ 'undeletehistory',
+ 'undeleterevision',
+ 'undeletebtn',
+ 'undeletedarticle',
+ 'undeletedrevisions',
+ 'undeletedtext',
+ ),
+'move' => array(
+ 'move',
+ 'movethispage',
+),
+'revert' => array(
+
+),
+'protect' => array(
+ 'confirmprotect',
+ 'confirmprotecttext',
+ 'confirmunprotect',
+ 'confirmunprotecttext',
+ 'protect',
+ 'protectcomment',
+ 'protectmoveonly',
+ 'protectpage',
+ 'protectsub',
+ 'protectthispage',
+ 'unprotect',
+ 'unprotectthispage',
+ 'unprotectsub',
+ 'unprotectcomment',
+),
+);
+
+$CreditsMsg = array(
+'anonymous',
+'siteuser',
+'lastmodifiedby',
+'and',
+'othercontribs',
+'others',
+'siteusers',
+'creditspage',
+'nocredits',
+);
+
+// When showing differences
+$DifferenceMsg = array(
+'previousdiff',
+'nextdiff',
+);
+
+// used on page edition
+$EditMsg = array(
+'bold_sample',
+'bold_tip',
+'italic_sample',
+'italic_tip',
+'link_sample',
+'link_tip',
+'extlink_sample',
+'extlink_tip',
+'headline_sample',
+'headline_tip',
+'math_sample',
+'math_tip',
+'nowiki_sample',
+'nowiki_tip',
+'image_sample',
+'image_tip',
+'media_sample',
+'media_tip',
+'sig_tip',
+'hr_tip',
+
+'accesskey-search',
+'accesskey-minoredit',
+'accesskey-save',
+'accesskey-preview',
+'accesskey-diff',
+'accesskey-compareselectedversions',
+'tooltip-search',
+'tooltip-minoredit',
+'tooltip-save',
+'tooltip-preview',
+'tooltip-diff',
+'tooltip-compareselectedversions',
+'tooltip-watch',
+
+'copyrightwarning',
+'copyrightwarning2',
+'editconflict',
+'editing',
+'editingcomment',
+'editingold',
+'editingsection',
+'explainconflict',
+'infobox',
+'infobox_alert',
+'longpagewarning',
+'nonunicodebrowser',
+'previewconflict',
+'previewnote',
+'protectedpagewarning',
+'readonlywarning',
+'spamprotectiontitle',
+'spamprotectiontext',
+'spamprotectionmatch',
+'templatesused',
+'yourdiff',
+'yourtext',
+);
+
+// Per namespace
+$NamespaceCategory = array (
+'category_header',
+'categoryarticlecount',
+'categoryarticlecount1',
+'listingcontinuesabbrev',
+'subcategories',
+'subcategorycount',
+'subcategorycount1',
+'usenewcategorypage',
+);
+
+$NamespaceImage = array (
+'deletedrevision',
+'edit-externally',
+'edit-externally-help',
+'showbigimage',
+);
+
+$NamespaceSpecialMsg = array(
+'nosuchspecialpage',
+'nospecialpagetext',
+);
+
+
+
+// per special pages
+$SpecialAllMessages = array(
+'allmessages',
+'allmessagesname',
+'allmessagesdefault',
+'allmessagescurrent',
+'allmessagestext',
+'allmessagesnotsupportedUI',
+'allmessagesnotsupportedDB',
+);
+
+
+$SpecialAllPages = array(
+'articlenamespace',
+'allpagesformtext1',
+'allpagesformtext2',
+'allarticles',
+'allpagesprev',
+'allpagesnext',
+'allpagesnamespace',
+'allpagessubmit',
+);
+
+
+$SpecialAskSQLMsg = array(
+'asksql',
+'asksqltext',
+'sqlislogged',
+'sqlquery',
+'querybtn',
+'selectonly',
+'querysuccessful',
+);
+
+$SpecialBlockip = array(
+'blockip',
+'blockiptext',
+'range_block_disabled',
+'ipb_expiry_invalid',
+'ip_range_invalid',
+'ipbexpiry',
+'ipbsubmit',
+);
+
+$SpecialContributions = array(
+'contribsub',
+'contributionsall',
+'newbies',
+'nocontribs',
+'ucnote',
+'uclinks',
+'uctop',
+);
+
+$SpecialExportMsg = array (
+'export',
+'exporttext',
+'exportcuronly',
+);
+
+$SpecialImagelist = array(
+'imagelistall',
+);
+
+$SpecialImportMsg = array (
+'import',
+'importtext',
+'importfailed',
+'importnotext',
+'importsuccess',
+'importhistoryconflict',
+);
+
+$SpecialLockdbMsg = array(
+'lockdb',
+'unlockdb',
+'lockdbtext',
+'unlockdbtext',
+'lockconfirm',
+'unlockconfirm',
+'lockbtn',
+'unlockbtn',
+'locknoconfirm',
+'lockdbsuccesssub',
+'unlockdbsuccesssub',
+'lockdbsuccesstext',
+'unlockdbsuccesstext',
+);
+
+$SpecialLogMsg = array(
+'specialloguserlabel',
+'speciallogtitlelabel',
+);
+
+$SpecialMaintenance = array(
+'maintenance',
+'maintnancepagetext',
+'maintenancebacklink',
+'disambiguations',
+'disambiguationspage',
+'disambiguationstext',
+'doubleredirects',
+'doubleredirectstext',
+'brokenredirects',
+'brokenredirectstext',
+'selflinks',
+'selflinkstext',
+'mispeelings',
+'mispeelingstext',
+'mispeelingspage',
+'missinglanguagelinks',
+'missinglanguagelinksbutton',
+'missinglanguagelinkstext',
+);
+
+$SpecialMakeSysopMsg = array (
+'already_bureaucrat',
+'already_sysop',
+'makesysop',
+'makesysoptitle',
+'makesysoptext',
+'makesysopname',
+'makesysopsubmit',
+'makesysopok',
+'makesysopfail',
+'rights',
+'set_rights_fail',
+'set_user_rights',
+'user_rights_set',
+);
+
+$SpecialMovepageMsg = array(
+'newtitle',
+'movearticle',
+'movenologin',
+'movenologintext',
+'movepage',
+'movepagebtn',
+'movepagetalktext',
+'movepagetext',
+'movetalk',
+'pagemovedsub',
+'pagemovedtext',
+'talkexists',
+'talkpagemoved',
+'talkpagenotmoved',
+
+);
+
+$SpecialPreferencesMsg = array(
+'tog-underline',
+'tog-highlightbroken',
+'tog-justify',
+'tog-hideminor',
+'tog-usenewrc',
+'tog-numberheadings',
+'tog-showtoolbar',
+'tog-editondblclick',
+'tog-editsection',
+'tog-editsectiononrightclick',
+'tog-showtoc',
+'tog-rememberpassword',
+'tog-editwidth',
+'tog-watchdefault',
+'tog-minordefault',
+'tog-previewontop',
+'tog-previewonfirst',
+'tog-nocache',
+'tog-enotifwatchlistpages',
+'tog-enotifusertalkpages',
+'tog-enotifminoredits',
+'tog-enotifrevealaddr',
+'tog-shownumberswatching',
+'tog-rcusemodstyle',
+'tog-showupdated',
+'tog-fancysig',
+'tog-externaleditor',
+
+'imagemaxsize',
+'prefs-help-email',
+'prefs-help-email-enotif',
+'prefs-help-realname',
+'prefs-help-userdata',
+'prefs-misc',
+'prefs-personal',
+'prefs-rc',
+'resetprefs',
+'saveprefs',
+'oldpassword',
+'newpassword',
+'retypenew',
+'textboxsize',
+'rows',
+'columns',
+'searchresultshead',
+'resultsperpage',
+'contextlines',
+'contextchars',
+'stubthreshold',
+'recentchangescount',
+'savedprefs',
+'timezonelegend',
+'timezonetext',
+'localtime',
+'timezoneoffset',
+'servertime',
+'guesstimezone',
+'emailflag',
+'defaultns',
+'default',
+);
+
+$SpecialRecentchangesMsg = array(
+'changes',
+'recentchanges',
+'recentchanges-url',
+'recentchangestext',
+'rcloaderr',
+'rcnote',
+'rcnotefrom',
+'rclistfrom',
+'showhideminor',
+'rclinks',
+'rchide',
+'rcliu',
+'diff',
+'hist',
+'hide',
+'show',
+'tableform',
+'listform',
+'nchanges',
+'minoreditletter',
+'newpageletter',
+'sectionlink',
+'number_of_watching_users_RCview',
+'number_of_watching_users_pageview',
+'recentchangesall',
+);
+
+$SpecialRecentchangeslinkedMsg = array(
+'rclsub',
+);
+
+$SpecialSearchMsg = array(
+'searchresults',
+'searchresulttext',
+'searchquery',
+'badquery',
+'badquerytext',
+'matchtotals',
+'nogomatch',
+'titlematches',
+'notitlematches',
+'textmatches',
+'notextmatches',
+);
+
+$SpecialSitesettingsMsg = array(
+'sitesettings',
+'sitesettings-features',
+'sitesettings-permissions',
+'sitesettings-memcached',
+'sitesettings-debugging',
+'sitesettings-caching',
+'sitesettings-wgShowIPinHeader',
+'sitesettings-wgUseDatabaseMessages',
+'sitesettings-wgUseCategoryMagic',
+'sitesettings-wgUseCategoryBrowser',
+'sitesettings-wgHitcounterUpdateFreq',
+'sitesettings-wgAllowExternalImages',
+'sitesettings-permissions-readonly',
+'sitesettings-permissions-whitelist',
+'sitesettings-permissions-banning',
+'sitesettings-permissions-miser',
+'sitesettings-wgReadOnly',
+'sitesettings-wgReadOnlyFile',
+'sitesettings-wgWhitelistEdit',
+'sitesettings-wgWhitelistRead',
+'sitesettings-wgWhitelistAccount-user',
+'sitesettings-wgWhitelistAccount-sysop',
+'sitesettings-wgWhitelistAccount-developer',
+'sitesettings-wgSysopUserBans',
+'sitesettings-wgSysopRangeBans',
+'sitesettings-wgDefaultBlockExpiry',
+'sitesettings-wgMiserMode',
+'sitesettings-wgDisableQueryPages',
+'sitesettings-wgUseWatchlistCache',
+'sitesettings-wgWLCacheTimeout',
+'sitesettings-cookies',
+'sitesettings-performance',
+'sitesettings-images',
+);
+
+$SpecialStatisticsMsg = array(
+'statistics',
+'sitestats',
+'userstats',
+'sitestatstext',
+'userstatstext',
+);
+
+$SpecialUndelte = array(
+'deletepage',
+);
+
+$SpecialUploadMsg = array(
+'affirmation',
+'badfilename',
+'badfiletype',
+'emptyfile',
+'fileexists',
+'filedesc',
+'filename',
+'filesource',
+'filestatus',
+'fileuploaded',
+'ignorewarning',
+'illegalfilename',
+'largefile',
+'minlength',
+'noaffirmation',
+'reupload',
+'reuploaddesc',
+'savefile',
+'successfulupload',
+'upload',
+'uploadbtn',
+'uploadcorrupt',
+'uploaddisabled',
+'uploadfile',
+'uploadedimage',
+'uploaderror',
+'uploadlink',
+'uploadlog',
+'uploadlogpage',
+'uploadlogpagetext',
+'uploadnologin',
+'uploadnologintext',
+'uploadtext',
+'uploadwarning',
+);
+
+$SpecialUserlevelsMsg = array(
+'saveusergroups',
+'userlevels-editusergroup',
+'userlevels-groupsavailable',
+'userlevels-groupshelp',
+'userlevels-groupsmember',
+);
+
+$SpecialUserloginMsg = array(
+'acct_creation_throttle_hit',
+'loginend',
+'loginsuccesstitle',
+'loginsuccess',
+'nocookiesnew',
+'nocookieslogin',
+'noemail',
+'noname',
+'nosuchuser',
+'mailmypassword',
+'mailmypasswordauthent',
+'passwordremindermailsubject',
+'passwordremindermailbody',
+'passwordsent',
+'passwordsentforemailauthentication',
+'userexists',
+'wrongpassword',
+);
+
+$SpecialValidateMsg = array(
+'val_yes',
+'val_no',
+'val_revision',
+'val_time',
+'val_list_header',
+'val_add',
+'val_del',
+'val_warning',
+'val_rev_for',
+'val_rev_stats_link',
+'val_iamsure',
+'val_clear_old',
+'val_merge_old',
+'val_form_note',
+'val_noop',
+'val_percent',
+'val_percent_single',
+'val_total',
+'val_version',
+'val_tab',
+'val_this_is_current_version',
+'val_version_of',
+'val_table_header',
+'val_stat_link_text',
+'val_view_version',
+'val_validate_version',
+'val_user_validations',
+'val_no_anon_validation',
+'val_validate_article_namespace_only',
+'val_validated',
+'val_article_lists',
+'val_page_validation_statistics',
+);
+
+$SpecialVersionMsg = array(
+'special_version_prefix',
+'special_version_postfix'
+);
+
+$SpecialWatchlistMsg = array(
+'watchlistall1',
+'watchlistall2',
+'wlnote',
+'wlshowlast',
+'wlsaved',
+'wlhideshowown',
+'wlshow',
+'wlhide',
+);
+
+$SpecialWhatlinkshereMsg = array(
+'linklistsub',
+'nolinkshere',
+'isredirect',
+);
+
+
+$commonMsg = array (
+'sunday',
+'monday',
+'tuesday',
+'wednesday',
+'thursday',
+'friday',
+'saturday',
+'january',
+'february',
+'march',
+'april',
+'may_long',
+'june',
+'july',
+'august',
+'september',
+'october',
+'november',
+'december',
+'jan',
+'feb',
+'mar',
+'apr',
+'may',
+'jun',
+'jul',
+'aug',
+'sep',
+'oct',
+'nov',
+'dec',
+'categories',
+'category',
+'linktrail',
+'mainpage',
+'portal',
+'portal-url',
+'about',
+'aboutsite',
+'aboutpage',
+'article',
+'help',
+'helppage',
+'wikititlesuffix',
+'bugreports',
+'bugreportspage',
+'sitesupport',
+'sitesupport-url',
+'faq',
+'faqpage',
+'edithelp',
+'newwindow',
+'edithelppage',
+'cancel',
+'qbfind',
+'qbbrowse',
+'qbedit',
+'qbpageoptions',
+'qbpageinfo',
+'qbmyoptions',
+'qbspecialpages',
+'moredotdotdot',
+'mypage',
+'mytalk',
+'anontalk',
+'navigation',
+'metadata',
+'metadata_page',
+'currentevents',
+'currentevents-url',
+'disclaimers',
+'disclaimerpage',
+'errorpagetitle',
+'returnto',
+'tagline',
+'whatlinkshere',
+'search',
+'go',
+'history',
+'history_short',
+'info_short',
+'printableversion',
+'edit',
+'editthispage',
+'newpage',
+'talkpage',
+'specialpage',
+'personaltools',
+'postcomment',
+'addsection',
+'articlepage',
+'subjectpage',
+'talk',
+'toolbox',
+'userpage',
+'wikipediapage',
+'imagepage',
+'viewtalkpage',
+'otherlanguages',
+'redirectedfrom',
+'lastmodified',
+'viewcount',
+'copyright',
+'poweredby',
+'printsubtitle',
+'protectedpage',
+'administrators',
+'sysoptitle',
+'sysoptext',
+'developertitle',
+'developertext',
+'bureaucrattitle',
+'bureaucrattext',
+'nbytes',
+'ok',
+'sitetitle',
+'pagetitle',
+'sitesubtitle',
+'retrievedfrom',
+'newmessages',
+'newmessageslink',
+'editsection',
+'toc',
+'showtoc',
+'hidetoc',
+'thisisdeleted',
+'restorelink',
+'feedlinks',
+'sitenotice',
+'nstab-main',
+'nstab-user',
+'nstab-media',
+'nstab-special',
+'nstab-wp',
+'nstab-image',
+'nstab-mediawiki',
+'nstab-template',
+'nstab-help',
+'nstab-category',
+'nosuchaction',
+'nosuchactiontext',
+
+
+'error',
+'databaseerror',
+'dberrortext',
+'dberrortextcl',
+'noconnect',
+'nodb',
+'cachederror',
+'laggedslavemode',
+'readonly',
+'enterlockreason',
+'readonlytext',
+'missingarticle',
+'internalerror',
+'filecopyerror',
+'filerenameerror',
+'filedeleteerror',
+'filenotfound',
+'unexpected',
+'formerror',
+'badarticleerror',
+'cannotdelete',
+'badtitle',
+'badtitletext',
+'perfdisabled',
+'perfdisabledsub',
+'perfcached',
+'wrong_wfQuery_params',
+'viewsource',
+'protectedtext',
+'seriousxhtmlerrors',
+'logouttitle',
+'logouttext',
+'welcomecreation',
+
+'loginpagetitle',
+'yourname',
+'yourpassword',
+'yourpasswordagain',
+'newusersonly',
+'remembermypassword',
+'loginproblem',
+'alreadyloggedin',
+'login',
+'loginprompt',
+'userlogin',
+'logout',
+'userlogout',
+'notloggedin',
+'createaccount',
+'createaccountmail',
+'badretype',
+
+'youremail',
+'yourrealname',
+'yourlanguage',
+'yourvariant',
+'yournick',
+'emailforlost',
+'loginerror',
+'nosuchusershort',
+
+'mailerror',
+'emailauthenticated',
+'emailnotauthenticated',
+'invalidemailaddress',
+'disableduntilauthent',
+'disablednoemail',
+
+'summary',
+'subject',
+'minoredit',
+'watchthis',
+'savearticle',
+'preview',
+'showpreview',
+'showdiff',
+'blockedtitle',
+'blockedtext',
+'whitelistedittitle',
+'whitelistedittext',
+'whitelistreadtitle',
+'whitelistreadtext',
+'whitelistacctitle',
+'whitelistacctext',
+'loginreqtitle',
+'loginreqtext',
+'accmailtitle',
+'accmailtext',
+'newarticle',
+'newarticletext',
+'talkpagetext',
+'anontalkpagetext',
+'noarticletext',
+'clearyourcache',
+'usercssjsyoucanpreview',
+'usercsspreview',
+'userjspreview',
+'updated',
+'note',
+'storedversion', // not used ? Editpage ?
+'revhistory',
+'nohistory',
+'revnotfound',
+'revnotfoundtext',
+'loadhist',
+'currentrev',
+'revisionasof',
+'revisionasofwithlink',
+'previousrevision',
+'nextrevision',
+'currentrevisionlink',
+'cur',
+'next',
+'last',
+'orig',
+'histlegend',
+'history_copyright',
+'difference',
+'loadingrev',
+'lineno',
+'editcurrent',
+'selectnewerversionfordiff',
+'selectolderversionfordiff',
+'compareselectedversions',
+
+'prevn',
+'nextn',
+'viewprevnext',
+'showingresults',
+'showingresultsnum',
+'nonefound',
+'powersearch',
+'powersearchtext',
+'searchdisabled',
+'googlesearch',
+'blanknamespace',
+'preferences',
+'prefsnologin',
+'prefsnologintext',
+'prefslogintext',
+'prefsreset',
+'qbsettings',
+'qbsettingsnote',
+'changepassword',
+'skin',
+'math',
+'dateformat',
+
+'math_failure',
+'math_unknown_error',
+'math_unknown_function',
+'math_lexing_error',
+'math_syntax_error',
+'math_image_error',
+'math_bad_tmpdir',
+'math_bad_output',
+'math_notexvc',
+
+
+
+
+
+
+'grouplevels-lookup-group',
+'grouplevels-group-edit',
+'editgroup',
+'addgroup',
+'userlevels-lookup-user',
+'userlevels-user-editname',
+'editusergroup',
+'grouplevels-editgroup',
+'grouplevels-addgroup',
+'grouplevels-editgroup-name',
+'grouplevels-editgroup-description',
+'savegroup',
+
+// common to several pages
+'copyrightpage',
+'copyrightpagename',
+'imagelist',
+'imagelisttext',
+'ilshowmatch',
+'ilsubmit',
+'showlast',
+'byname',
+'bydate',
+'bysize',
+
+
+
+'imgdelete',
+'imgdesc',
+'imglegend',
+'imghistory',
+'revertimg',
+'deleteimg',
+'deleteimgcompletely',
+'imghistlegend',
+'imagelinks',
+'linkstoimage',
+'nolinkstoimage',
+
+// unused ??
+'uploadedfiles',
+'getimagelist',
+
+
+'sharedupload',
+'shareduploadwiki',
+
+// Special pages names
+'orphans',
+'geo',
+'validate',
+'lonelypages',
+'uncategorizedpages',
+'uncategorizedcategories',
+'unusedimages',
+'popularpages',
+'nviews',
+'wantedpages',
+'nlinks',
+'allpages',
+'randompage',
+'randompage-url',
+'shortpages',
+'longpages',
+'deadendpages',
+'listusers',
+'specialpages',
+'spheading',
+'restrictedpheading',
+'recentchangeslinked',
+
+
+'debug',
+'newpages',
+'ancientpages',
+'intl',
+'unusedimagestext',
+'booksources',
+'categoriespagetext',
+'data',
+'userlevels',
+'grouplevels',
+'booksourcetext',
+'isbn',
+'rfcurl',
+'pubmedurl',
+'alphaindexline',
+'version',
+'log',
+'alllogstext',
+'nextpage',
+'mailnologin',
+'mailnologintext',
+'emailuser',
+'emailpage',
+'emailpagetext',
+'usermailererror',
+'defemailsubject',
+'noemailtitle',
+'noemailtext',
+'emailfrom',
+'emailto',
+'emailsubject',
+'emailmessage',
+'emailsend',
+'emailsent',
+'emailsenttext',
+'watchlist',
+'watchlistsub',
+'nowatchlist',
+'watchnologin',
+'watchnologintext',
+'addedwatch',
+'addedwatchtext',
+'removedwatch',
+'removedwatchtext',
+'watch',
+'watchthispage',
+'unwatch',
+'unwatchthispage',
+'notanarticle',
+'watchnochange',
+'watchdetails',
+'watchmethod-recent',
+'watchmethod-list',
+'removechecked',
+'watchlistcontains',
+'watcheditlist',
+'removingchecked',
+'couldntremove',
+'iteminvalidname',
+
+'updatedmarker',
+'email_notification_mailer',
+'email_notification_infotext',
+'email_notification_reset',
+'email_notification_newpagetext',
+'email_notification_to',
+'email_notification_subject',
+'email_notification_lastvisitedrevisiontext',
+'email_notification_body',
+
+'confirm',
+'excontent',
+'exbeforeblank',
+'exblank',
+'confirmdelete',
+'deletesub',
+'historywarning',
+'confirmdeletetext',
+'actioncomplete',
+'deletedtext',
+'deletedarticle',
+'dellogpage',
+'dellogpagetext',
+'deletionlog',
+'reverted',
+'deletecomment',
+'imagereverted',
+'rollback',
+'rollback_short',
+'rollbacklink',
+'rollbackfailed',
+'cantrollback',
+'alreadyrolled',
+'revertpage',
+'editcomment',
+'sessionfailure',
+
+'protectlogpage',
+'protectlogtext',
+
+'protectedarticle',
+'unprotectedarticle',
+
+'contributions',
+'mycontris',
+'notargettitle', // not used ?
+'notargettext', // not used ?
+
+'linkshere',
+
+'ipaddress',
+'ipadressorusername', // not used ?
+'ipbreason',
+
+'badipaddress',
+'noblockreason',
+'blockipsuccesssub',
+'blockipsuccesstext',
+'unblockip',
+'unblockiptext',
+'ipusubmit',
+'ipusuccess',
+'ipblocklist',
+'blocklistline',
+'blocklink',
+'unblocklink',
+'contribslink',
+'autoblocker',
+'blocklogpage',
+'blocklogentry',
+'blocklogtext',
+'unblocklogentry', // not used ?
+
+'proxyblocker',
+'proxyblockreason',
+'proxyblocksuccess',
+'sorbs',
+'sorbsreason',
+
+'setbureaucratflag',
+'bureaucratlog',
+'rightslogtext',
+'bureaucratlogentry',
+
+'articleexists', // not used ?
+
+'movedto',
+'1movedto2',
+'1movedto2_redir',
+'movelogpage',
+'movelogpagetext',
+
+'thumbnail-more',
+'missingimage',
+'filemissing',
+'Monobook.css',
+'nodublincore',
+'nocreativecommons',
+'notacceptable',
+
+// used in Article::
+'infosubtitle',
+'numedits',
+'numtalkedits',
+'numwatchers',
+'numauthors',
+'numtalkauthors',
+
+// not used ?
+'mw_math_png',
+'mw_math_simple',
+'mw_math_html',
+'mw_math_source',
+'mw_math_modern',
+'mw_math_mathml',
+
+// Patrolling
+'markaspatrolleddiff',
+'markaspatrolledlink',
+'markaspatrolledtext',
+'markedaspatrolled',
+'markedaspatrolledtext',
+'rcpatroldisabled', // not used ?
+'rcpatroldisabledtext', // not used ?
+
+'Monobook.js',
+'newimages',
+'noimages',
+'variantname-zh-cn',
+'variantname-zh-tw',
+'variantname-zh-hk',
+'variantname-zh-sg',
+'variantname-zh',
+'zhconversiontable',
+'passwordtooshort', // sp preferences / userlogin
+);
+?>
diff --git a/maintenance/language/splitLanguageFiles.php b/maintenance/language/splitLanguageFiles.php
new file mode 100644
index 00000000..2263e611
--- /dev/null
+++ b/maintenance/language/splitLanguageFiles.php
@@ -0,0 +1,13 @@
+<?php
+/**
+ * splitLanguageFiles
+ * Should read each of the languages files then split them in several subpart
+ * under ./languages/XX/ according to the arrays in splitLanguageFiles.inc .
+ *
+ * Also need to rewrite the wfMsg system / message-cache.
+ */
+
+include(dirname(__FILE__).'/../commandLine.inc');
+
+
+?>
diff --git a/maintenance/language/transstat.php b/maintenance/language/transstat.php
new file mode 100644
index 00000000..590da121
--- /dev/null
+++ b/maintenance/language/transstat.php
@@ -0,0 +1,211 @@
+<?php
+/**
+ * Statistics about the localisation.
+ *
+ * @package MediaWiki
+ * @subpackage Maintenance
+ *
+ * @author Ævar Arnfjörð Bjarmason <avarab@gmail.com>
+ * @author Ashar Voultoiz <thoane@altern.org>
+ *
+ * Output is posted from time to time on:
+ * http://meta.wikimedia.org/wiki/Localization_statistics
+ */
+
+require_once( 'maintenance/commandLine.inc' );
+require_once( 'languages.inc' );
+
+if ( isset( $options['help'] ) ) {
+ showUsage();
+}
+
+# Default output is WikiText
+if ( !isset( $options['output'] ) ) {
+ $options['output'] = 'wiki';
+}
+
+/** Print a usage message*/
+function showUsage() {
+ print <<<END
+Usage: php transstat.php [--help] [--output=csv|text|wiki]
+ --help : this helpful message
+ --output : select an output engine one of:
+ * 'csv' : Comma Separated Values.
+ * 'wiki' : MediaWiki syntax (default).
+ * 'metawiki' : MediaWiki syntax used for Meta-Wiki.
+ * 'text' : Text with tabs.
+Example: php maintenance/transstat.php --output=text
+
+END;
+ exit();
+}
+
+/** A general output object. Need to be overriden */
+class statsOutput {
+ function formatPercent( $subset, $total, $revert = false, $accuracy = 2 ) {
+ return @sprintf( '%.' . $accuracy . 'f%%', 100 * $subset / $total );
+ }
+
+ # Override the following methods
+ function heading() {
+ }
+ function footer() {
+ }
+ function blockstart() {
+ }
+ function blockend() {
+ }
+ function element( $in, $heading = false ) {
+ }
+}
+
+/** Outputs WikiText */
+class wikiStatsOutput extends statsOutput {
+ function heading() {
+ global $IP;
+ $version = SpecialVersion::getVersion( $IP );
+ echo "'''Statistics are based on:''' <code>" . $version . "</code>\n\n";
+ echo "'''Note:''' These statistics can be generated by running <code>php maintenance/language/transstat.php</code>.\n\n";
+ echo "For additional information on specific languages (the message names, the actual problems, etc.), run <code>php maintenance/language/checkLanguage.php --lang=foo</code>.\n\n";
+ echo '{| border="2" cellpadding="4" cellspacing="0" style="background-color: #F9F9F9; border: 1px #AAAAAA solid; border-collapse: collapse;" width="100%"'."\n";
+ }
+ function footer() {
+ echo "|}\n";
+ }
+ function blockstart() {
+ echo "|-\n";
+ }
+ function blockend() {
+ echo '';
+ }
+ function element( $in, $heading = false ) {
+ echo ($heading ? '!' : '|') . " $in\n";
+ }
+ function formatPercent( $subset, $total, $revert = false, $accuracy = 2 ) {
+ $v = @round(255 * $subset / $total);
+ if ( $revert ) {
+ $v = 255 - $v;
+ }
+ if ( $v < 128 ) {
+ # Red to Yellow
+ $red = 'FF';
+ $green = sprintf( '%02X', 2 * $v );
+ } else {
+ # Yellow to Green
+ $red = sprintf('%02X', 2 * ( 255 - $v ) );
+ $green = 'FF';
+ }
+ $blue = '00';
+ $color = $red . $green . $blue;
+
+ $percent = statsOutput::formatPercent( $subset, $total, $revert, $accuracy );
+ return 'bgcolor="#'. $color .'" | '. $percent;
+ }
+}
+
+/** Outputs WikiText and appends category and text only used for Meta-Wiki */
+class metawikiStatsOutput extends wikiStatsOutput {
+ function heading() {
+ echo "See [[MediaWiki localisation]] to learn how you can help translating MediaWiki.\n\n";
+ parent::heading();
+ }
+ function footer() {
+ parent::footer();
+ echo "\n[[Category:Localisation|Statistics]]\n";
+ }
+}
+
+/** Output text. To be used on a terminal for example. */
+class textStatsOutput extends statsOutput {
+ function element( $in, $heading = false ) {
+ echo $in."\t";
+ }
+ function blockend() {
+ echo "\n";
+ }
+}
+
+/** csv output. Some people love excel */
+class csvStatsOutput extends statsOutput {
+ function element( $in, $heading = false ) {
+ echo $in . ";";
+ }
+ function blockend() {
+ echo "\n";
+ }
+}
+
+# Select an output engine
+switch ( $options['output'] ) {
+ case 'wiki':
+ $wgOut = new wikiStatsOutput();
+ break;
+ case 'metawiki':
+ $wgOut = new metawikiStatsOutput();
+ break;
+ case 'text':
+ $wgOut = new textStatsOutput();
+ break;
+ case 'csv':
+ $wgOut = new csvStatsOutput();
+ break;
+ default:
+ showUsage();
+}
+
+# Languages
+$wgLanguages = new languages();
+
+# Header
+$wgOut->heading();
+$wgOut->blockstart();
+$wgOut->element( 'Language', true );
+$wgOut->element( 'Translated', true );
+$wgOut->element( '%', true );
+$wgOut->element( 'Obsolete', true );
+$wgOut->element( '%', true );
+$wgOut->element( 'Problematic', true );
+$wgOut->element( '%', true );
+$wgOut->blockend();
+
+$wgGeneralMessages = $wgLanguages->getGeneralMessages();
+$wgRequiredMessagesNumber = count( $wgGeneralMessages['required'] );
+
+foreach ( $wgLanguages->getLanguages() as $code ) {
+ # Don't check English or RTL English
+ if ( $code == 'en' || $code == 'enRTL' ) {
+ continue;
+ }
+
+ # Calculate the numbers
+ $language = $wgContLang->getLanguageName( $code );
+ $messages = $wgLanguages->getMessages( $code );
+ $messagesNumber = count( $messages['translated'] );
+ $requiredMessagesNumber = count( $messages['required'] );
+ $requiredMessagesPercent = $wgOut->formatPercent( $requiredMessagesNumber, $wgRequiredMessagesNumber );
+ $obsoleteMessagesNumber = count( $messages['obsolete'] );
+ $obsoleteMessagesPercent = $wgOut->formatPercent( $obsoleteMessagesNumber, $messagesNumber, true );
+ $messagesWithoutVariables = $wgLanguages->getMessagesWithoutVariables( $code );
+ $emptyMessages = $wgLanguages->getEmptyMessages( $code );
+ $messagesWithWhitespace = $wgLanguages->getMessagesWithWhitespace( $code );
+ $nonXHTMLMessages = $wgLanguages->getNonXHTMLMessages( $code );
+ $messagesWithWrongChars = $wgLanguages->getMessagesWithWrongChars( $code );
+ $problematicMessagesNumber = count( array_unique( array_merge( $messagesWithoutVariables, $emptyMessages, $messagesWithWhitespace, $nonXHTMLMessages, $messagesWithWrongChars ) ) );
+ $problematicMessagesPercent = $wgOut->formatPercent( $problematicMessagesNumber, $messagesNumber, true );
+
+ # Output them
+ $wgOut->blockstart();
+ $wgOut->element( "$language ($code)" );
+ $wgOut->element( "$requiredMessagesNumber/$wgRequiredMessagesNumber" );
+ $wgOut->element( $requiredMessagesPercent );
+ $wgOut->element( "$obsoleteMessagesNumber/$messagesNumber" );
+ $wgOut->element( $obsoleteMessagesPercent );
+ $wgOut->element( "$problematicMessagesNumber/$messagesNumber" );
+ $wgOut->element( $problematicMessagesPercent );
+ $wgOut->blockend();
+}
+
+# Footer
+$wgOut->footer();
+
+?>
diff --git a/maintenance/language/unusedMessages.php b/maintenance/language/unusedMessages.php
new file mode 100644
index 00000000..8b117eca
--- /dev/null
+++ b/maintenance/language/unusedMessages.php
@@ -0,0 +1,42 @@
+<?php
+/**
+ * Prints out messages in localisation files that are no longer used.
+ *
+ * @package MediaWiki
+ * @subpackage Maintenance
+ */
+
+require_once(dirname(__FILE__).'/../commandLine.inc');
+
+if ( isset( $args[0] ) ) {
+ $code = $args[0];
+} else {
+ $code = $wgLang->getCode();
+}
+
+if ( $code == 'en' ) {
+ print "Current selected language is English. Cannot check translations.\n";
+ exit();
+}
+
+$filename = Language::getMessagesFileName( $code );
+if ( file_exists( $filename ) ) {
+ require( $filename );
+} else {
+ $messages = array();
+}
+
+$count = $total = 0;
+$wgEnglishMessages = Language::getMessagesFor( 'en' );
+$wgLocalMessages = $messages;
+
+foreach ( $wgLocalMessages as $key => $msg ) {
+ ++$total;
+ if ( !isset( $wgEnglishMessages[$key] ) ) {
+ print "* $key\n";
+ ++$count;
+ }
+}
+
+print "{$count} messages of {$total} are unused in the language {$code}\n";
+?>
diff --git a/maintenance/language/validate.php b/maintenance/language/validate.php
new file mode 100644
index 00000000..10d98d37
--- /dev/null
+++ b/maintenance/language/validate.php
@@ -0,0 +1,40 @@
+<?php
+
+if ( !isset( $argv[1] ) ) {
+ print "Usage: php {$argv[0]} <filename>\n";
+ exit( 1 );
+}
+array_shift( $argv );
+
+define( 'MEDIAWIKI', 1 );
+define( 'NOT_REALLY_MEDIAWIKI', 1 );
+
+$IP = dirname( __FILE__ ) . '/../..';
+
+require_once( "$IP/includes/Defines.php" );
+require_once( "$IP/languages/Language.php" );
+
+$files = array();
+foreach ( $argv as $arg ) {
+ $files = array_merge( $files, glob( $arg ) );
+}
+
+foreach ( $files as $filename ) {
+ print "$filename...";
+ $vars = getVars( $filename );
+ $keys = array_keys( $vars );
+ $diff = array_diff( $keys, Language::$mLocalisationKeys );
+ if ( $diff ) {
+ print "\nWarning: unrecognised variable(s): " . implode( ', ', $diff ) ."\n";
+ } else {
+ print " ok\n";
+ }
+}
+
+function getVars( $filename ) {
+ require( $filename );
+ $vars = get_defined_vars();
+ unset( $vars['filename'] );
+ return $vars;
+}
+?>
diff --git a/maintenance/mctest.php b/maintenance/mctest.php
index 95249b29..f8f4b965 100644
--- a/maintenance/mctest.php
+++ b/maintenance/mctest.php
@@ -1,10 +1,17 @@
<?php
-/* $Id: mctest.php 12896 2006-01-28 08:22:24Z timstarling $ */
+/* $Id: mctest.php 16738 2006-10-02 17:04:13Z brion $ */
$optionsWithArgs = array( 'i' );
require_once('commandLine.inc');
+function microtime_float()
+{
+ list($usec, $sec) = explode(" ", microtime());
+ return ((float)$usec + (float)$sec);
+}
+
+
#$wgDebugLogFile = '/dev/stdout';
if ( isset( $args[0] ) ) {
@@ -49,11 +56,5 @@ foreach ( $wgMemCachedServers as $server ) {
print "set: $set incr: $incr get: $get time: $exectime\n";
}
-function microtime_float()
-{
- list($usec, $sec) = explode(" ", microtime());
- return ((float)$usec + (float)$sec);
-}
-
?>
diff --git a/maintenance/mysql5/tables.sql b/maintenance/mysql5/tables.sql
index cc6818d3..81a4690a 100644
--- a/maintenance/mysql5/tables.sql
+++ b/maintenance/mysql5/tables.sql
@@ -583,8 +583,14 @@ CREATE TABLE /*$wgDBprefix*/ipblocks (
-- Indicates that the IP address was banned because a banned
-- user accessed a page through it. If this is 1, ipb_address
-- will be hidden, and the block identified by block ID number.
- ipb_auto tinyint(1) NOT NULL default '0',
+ ipb_auto bool NOT NULL default '0',
+ -- If set to 1, block applies only to logged-out users
+ ipb_anon_only bool NOT NULL default 0,
+
+ -- Block prevents account creation from matching IP addresses
+ ipb_create_account bool NOT NULL default 1,
+
-- Time at which the block will expire.
ipb_expiry char(14) binary NOT NULL default '',
@@ -594,9 +600,15 @@ CREATE TABLE /*$wgDBprefix*/ipblocks (
ipb_range_end varchar(32) NOT NULL default '',
PRIMARY KEY ipb_id (ipb_id),
- INDEX ipb_address (ipb_address),
+
+ -- Unique index to support "user already blocked" messages
+ -- Any new options which prevent collisions should be included
+ UNIQUE INDEX ipb_address (ipb_address(255), ipb_user, ipb_auto, ipb_anon_only),
+
INDEX ipb_user (ipb_user),
- INDEX ipb_range (ipb_range_start(8), ipb_range_end(8))
+ INDEX ipb_range (ipb_range_start(8), ipb_range_end(8)),
+ INDEX ipb_timestamp (ipb_timestamp),
+ INDEX ipb_expiry (ipb_expiry)
) TYPE=InnoDB, DEFAULT CHARSET=utf8;
@@ -797,7 +809,8 @@ CREATE TABLE /*$wgDBprefix*/recentchanges (
INDEX rc_namespace_title (rc_namespace, rc_title),
INDEX rc_cur_id (rc_cur_id),
INDEX new_name_timestamp(rc_new,rc_namespace,rc_timestamp),
- INDEX rc_ip (rc_ip)
+ INDEX rc_ip (rc_ip),
+ INDEX rc_ns_usertext ( rc_namespace, rc_user_text )
) TYPE=InnoDB, DEFAULT CHARSET=utf8;
@@ -1006,4 +1019,4 @@ CREATE TABLE /*$wgDBprefix*/querycache_info (
UNIQUE KEY ( qci_type )
-) TYPE=InnoDB; \ No newline at end of file
+) TYPE=InnoDB;
diff --git a/maintenance/namespaceDupes.php b/maintenance/namespaceDupes.php
index ad56eee7..acd3a708 100644
--- a/maintenance/namespaceDupes.php
+++ b/maintenance/namespaceDupes.php
@@ -111,12 +111,12 @@ class NamespaceConflictChecker {
}
function reportConflict( $row, $suffix ) {
- $newTitle = Title::makeTitle( $row->namespace, $row->title );
+ $newTitle = Title::makeTitleSafe( $row->namespace, $row->title );
printf( "... %d (0,\"%s\") -> (%d,\"%s\") [[%s]]\n",
$row->id,
$row->oldtitle,
- $row->namespace,
- $row->title,
+ $newTitle->getNamespace(),
+ $newTitle->getDbKey(),
$newTitle->getPrefixedText() );
$id = $newTitle->getArticleId();
@@ -131,7 +131,7 @@ class NamespaceConflictChecker {
function resolveConflict( $row, $resolvable, $suffix ) {
if( !$resolvable ) {
$row->title .= $suffix;
- $title = Title::makeTitle( $row->namespace, $row->title );
+ $title = Title::makeTitleSafe( $row->namespace, $row->title );
echo "... *** using suffixed form [[" . $title->getPrefixedText() . "]] ***\n";
}
$tables = $this->newSchema()
@@ -146,10 +146,11 @@ class NamespaceConflictChecker {
function resolveConflictOn( $row, $table ) {
$fname = 'NamespaceConflictChecker::resolveConflictOn';
echo "... resolving on $table... ";
+ $newTitle = Title::makeTitleSafe( $row->namespace, $row->title );
$this->db->update( $table,
array(
- "{$table}_namespace" => $row->namespace,
- "{$table}_title" => $row->title,
+ "{$table}_namespace" => $newTitle->getNamespace(),
+ "{$table}_title" => $newTitle->getDbKey(),
),
array(
"{$table}_namespace" => 0,
diff --git a/maintenance/ourusers.php b/maintenance/ourusers.php
index 0d625571..b50519d2 100644
--- a/maintenance/ourusers.php
+++ b/maintenance/ourusers.php
@@ -39,50 +39,22 @@ if ( @$argv[1] == 'yaseo' ) {
} else {
$hosts = array(
'localhost',
- '207.142.131.194',
- '207.142.131.195',
- '207.142.131.196',
- '207.142.131.197',
- '207.142.131.198',
- '207.142.131.199',
- '207.142.131.221',
- '207.142.131.226',
- '207.142.131.227',
- '207.142.131.228',
- '207.142.131.229',
- '207.142.131.230',
- '207.142.131.231',
- '207.142.131.232',
- '207.142.131.233',
- '207.142.131.234',
- '207.142.131.237',
- '207.142.131.238',
- '207.142.131.239',
- '207.142.131.243',
- '207.142.131.244',
- '207.142.131.249',
- '207.142.131.250',
- '207.142.131.216',
'10.0.%',
+ '66.230.200.%',
);
}
$databases = array(
- '%wikibooks',
- '%wiki',
- '%wikiquote',
- '%wiktionary',
- '%wikisource',
- '%wikinews',
- '%wikiversity',
- '%wikimedia',
+ '%wik%',
);
+print "/*!40100 set old_passwords=1 */;";
+print "/*!40100 set global old_passwords=1 */;";
+
foreach( $hosts as $host ) {
print "--\n-- $host\n--\n\n-- wikiuser\n\n";
print "GRANT REPLICATION CLIENT,PROCESS ON *.* TO 'wikiuser'@'$host' IDENTIFIED BY '$wikiuser_pass';\n";
- print "GRANT ALL PRIVILEGES ON `boardvote`.* TO 'wikiuser'@'$host' IDENTIFIED BY '$wikiuser_pass';\n";
- print "GRANT ALL PRIVILEGES ON `boardvote2005`.* TO 'wikiuser'@'$host' IDENTIFIED BY '$wikiuser_pass';\n";
+ print "GRANT ALL PRIVILEGES ON `boardvote%`.* TO 'wikiuser'@'$host' IDENTIFIED BY '$wikiuser_pass';\n";
foreach( $databases as $db ) {
print "GRANT SELECT, INSERT, UPDATE, DELETE ON `$db`.* TO 'wikiuser'@'$host' IDENTIFIED BY '$wikiuser_pass';\n";
}
@@ -111,8 +83,7 @@ EOS;
print "\n-- wikiadmin\n\n";
print "GRANT PROCESS, REPLICATION CLIENT ON *.* TO 'wikiadmin'@'$host' IDENTIFIED BY '$wikiadmin_pass';\n";
- print "GRANT ALL PRIVILEGES ON `boardvote`.* TO wikiadmin@'$host' IDENTIFIED BY '$wikiadmin_pass';\n";
- print "GRANT ALL PRIVILEGES ON `boardvote2005`.* TO wikiadmin@'$host' IDENTIFIED BY '$wikiadmin_pass';\n";
+ print "GRANT ALL PRIVILEGES ON `boardvote%`.* TO wikiadmin@'$host' IDENTIFIED BY '$wikiadmin_pass';\n";
foreach ( $databases as $db ) {
print "GRANT ALL PRIVILEGES ON `$db`.* TO wikiadmin@'$host' IDENTIFIED BY '$wikiadmin_pass';\n";
}
diff --git a/maintenance/parserTests.inc b/maintenance/parserTests.inc
index 9f93c4ac..0aabd27b 100644
--- a/maintenance/parserTests.inc
+++ b/maintenance/parserTests.inc
@@ -31,7 +31,6 @@ $optionsWithArgs = array( 'regex' );
require_once( 'commandLine.inc' );
require_once( "$IP/includes/ObjectCache.php" );
require_once( "$IP/includes/BagOStuff.php" );
-require_once( "$IP/languages/LanguageUtf8.php" );
require_once( "$IP/includes/Hooks.php" );
require_once( "$IP/maintenance/parserTestsParserHook.php" );
require_once( "$IP/maintenance/parserTestsStaticParserHook.php" );
@@ -238,7 +237,7 @@ class ParserTest {
$this->setupGlobals($opts);
- $user =& new User();
+ $user = new User();
$options = ParserOptions::newFromUser( $user );
if (preg_match('/\\bmath\\b/i', $opts)) {
@@ -255,7 +254,7 @@ class ParserTest {
$noxml = (bool)preg_match( '~\\b noxml \\b~x', $opts );
- $parser =& new Parser();
+ $parser = new Parser();
foreach( $this->hooks as $tag => $callback ) {
$parser->setHook( $tag, $callback );
}
@@ -335,14 +334,12 @@ class ParserTest {
'wgLanguageCode' => $lang,
'wgContLanguageCode' => $lang,
'wgDBprefix' => 'parsertest_',
- 'wgDefaultUserOptions' => array(),
'wgLang' => null,
'wgContLang' => null,
'wgNamespacesWithSubpages' => array( 0 => preg_match('/\\bsubpage\\b/i', $opts)),
'wgMaxTocLevel' => 999,
'wgCapitalLinks' => true,
- 'wgDefaultUserOptions' => array(),
'wgNoFollowLinks' => true,
'wgThumbnailScriptPath' => false,
'wgUseTeX' => false,
@@ -354,13 +351,12 @@ class ParserTest {
$this->savedGlobals[$var] = $GLOBALS[$var];
$GLOBALS[$var] = $val;
}
- $langClass = 'Language' . str_replace( '-', '_', ucfirst( $lang ) );
- $langObj = setupLangObj( $langClass );
+ $langObj = Language::factory( $lang );
$GLOBALS['wgLang'] = $langObj;
$GLOBALS['wgContLang'] = $langObj;
$GLOBALS['wgLoadBalancer']->loadMasterPos();
- $GLOBALS['wgMessageCache'] = new MessageCache( new BagOStuff(), false, 0, $GLOBALS['wgDBname'] );
+ //$GLOBALS['wgMessageCache'] = new MessageCache( new BagOStuff(), false, 0, $GLOBALS['wgDBname'] );
$this->setupDatabase();
global $wgUser;
diff --git a/maintenance/parserTests.php b/maintenance/parserTests.php
index eac7adb0..309bf2e0 100644
--- a/maintenance/parserTests.php
+++ b/maintenance/parserTests.php
@@ -49,7 +49,7 @@ END;
# refer to $wgTitle directly, but instead use the title
# passed to it.
$wgTitle = Title::newFromText( 'Parser test script do not use' );
-$tester =& new ParserTest();
+$tester = new ParserTest();
if( isset( $options['file'] ) ) {
$file = $options['file'];
diff --git a/maintenance/parserTests.txt b/maintenance/parserTests.txt
index 0238051c..66b46a53 100644
--- a/maintenance/parserTests.txt
+++ b/maintenance/parserTests.txt
@@ -714,6 +714,24 @@ External links: [encoded equals] (bug 6102)
!! end
!! test
+External links: [IDN ignored character reference in hostname; strip it right off]
+!! input
+[http://e&zwnj;xample.com/]
+!! result
+<p><a href="http://example.com/" class="external autonumber" title="http://example.com/" rel="nofollow">[1]</a>
+</p>
+!! end
+
+!! test
+External links: IDN ignored character reference in hostname; strip it right off
+!! input
+http://e&zwnj;xample.com/
+!! result
+<p><a href="http://example.com/" class="external free" title="http://example.com/" rel="nofollow">http://example.com/</a>
+</p>
+!! end
+
+!! test
External links: www.jpeg.org (bug 554)
!! input
http://www.jpeg.org
@@ -1192,7 +1210,7 @@ Invalid attributes in table cell (bug 1830)
# FIXME: this one has incorrect tag nesting still.
!! test
-Table security: embedded pipes (http://mail.wikipedia.org/pipermail/wikitech-l/2006-April/034637.html)
+TODO: Table security: embedded pipes (http://mail.wikipedia.org/pipermail/wikitech-l/2006-April/034637.html)
!! input
{|
| |[ftp://|x||]" onmouseover="alert(document.cookie)">test
@@ -1365,7 +1383,7 @@ Link containing "<#" and ">#" as a hex sequences
!! end
!! test
-Link containing double-single-quotes '' (bug 4598)
+TODO: Link containing double-single-quotes '' (bug 4598)
!! input
[[Lista d''e paise d''o munno]]
!! result
@@ -2152,7 +2170,7 @@ Template with complex template as argument
!! end
!! test
-Template with thumb image (wiht link in description)
+TODO: Template with thumb image (with link in description)
!! input
{{paramtest|
param =[[Image:noimage.png|thumb|[[no link|link]] [[no link|caption]]]]}}
@@ -2624,6 +2642,146 @@ pst
Foo
!! end
+!! test
+pre-save transform: context links ("pipe trick")
+!! options
+pst
+!! input
+[[Article (context)|]]
+[[Bar:Article|]]
+[[:Bar:Article|]]
+[[Bar:Article (context)|]]
+[[:Bar:Article (context)|]]
+[[|Article]]
+[[|Article (context)]]
+[[Bar:X (Y) Z|]]
+[[:Bar:X (Y) Z|]]
+!! result
+[[Article (context)|Article]]
+[[Bar:Article|Article]]
+[[:Bar:Article|Article]]
+[[Bar:Article (context)|Article]]
+[[:Bar:Article (context)|Article]]
+[[Article]]
+[[Article (context)]]
+[[Bar:X (Y) Z|X (Y) Z]]
+[[:Bar:X (Y) Z|X (Y) Z]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with interwiki prefix
+!! options
+pst
+!! input
+[[interwiki:Article|]]
+[[:interwiki:Article|]]
+[[interwiki:Bar:Article|]]
+[[:interwiki:Bar:Article|]]
+!! result
+[[interwiki:Article|Article]]
+[[:interwiki:Article|Article]]
+[[interwiki:Bar:Article|Bar:Article]]
+[[:interwiki:Bar:Article|Bar:Article]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with parens in title
+!! options
+pst title=[[Somearticle (context)]]
+!! input
+[[|Article]]
+!! result
+[[Article (context)|Article]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with comma in title
+!! options
+pst title=[[Someplace, Somewhere]]
+!! input
+[[|Otherplace]]
+[[Otherplace, Elsewhere|]]
+[[Otherplace, Elsewhere, Anywhere|]]
+!! result
+[[Otherplace, Somewhere|Otherplace]]
+[[Otherplace, Elsewhere|Otherplace]]
+[[Otherplace, Elsewhere, Anywhere|Otherplace]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with parens and comma
+!! options
+pst title=[[Someplace (IGNORED), Somewhere]]
+!! input
+[[|Otherplace]]
+[[Otherplace (place), Elsewhere|]]
+!! result
+[[Otherplace, Somewhere|Otherplace]]
+[[Otherplace (place), Elsewhere|Otherplace]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with comma and parens
+!! options
+pst title=[[Who, me? (context)]]
+!! input
+[[|Yes, you.]]
+[[Me, Myself, and I (1937 song)|]]
+!! result
+[[Yes, you. (context)|Yes, you.]]
+[[Me, Myself, and I (1937 song)|Me, Myself, and I]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with namespace
+!! options
+pst title=[[Ns:Somearticle]]
+!! input
+[[|Article]]
+!! result
+[[Ns:Article|Article]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with namespace and parens
+!! options
+pst title=[[Ns:Somearticle (context)]]
+!! input
+[[|Article]]
+!! result
+[[Ns:Article (context)|Article]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with namespace and comma
+!! options
+pst title=[[Ns:Somearticle, Context, Whatever]]
+!! input
+[[|Article]]
+!! result
+[[Ns:Article, Context, Whatever|Article]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with namespace, comma and parens
+!! options
+pst title=[[Ns:Somearticle, Context (context)]]
+!! input
+[[|Article]]
+!! result
+[[Ns:Article (context)|Article]]
+!! end
+
+!! test
+pre-save transform: context links ("pipe trick") with namespace, parens and comma
+!! options
+pst title=[[Ns:Somearticle (IGNORED), Context]]
+!! input
+[[|Article]]
+!! result
+[[Ns:Article, Context|Article]]
+!! end
+
###
### Message transform tests
@@ -2649,7 +2807,7 @@ msg
!! end
!! test
-message transform: <noinclude> in transcluded template (bug 4926)
+TODO: message transform: <noinclude> in transcluded template (bug 4926)
!! options
msg
!! input
@@ -2659,7 +2817,7 @@ Foobar
!! end
!! test
-message transform: <onlyinclude> in transcluded template (bug 4926)
+TODO: message transform: <onlyinclude> in transcluded template (bug 4926)
!! options
msg
!! input
@@ -4043,6 +4201,16 @@ Something, but defenetly not <br id="9" />...
!! end
!! test
+Sanitizer: Validating id attribute uniqueness (bug 4515, bug 6301)
+!! options
+disabled
+!! input
+<br id="foo" /><br id="foo" />
+!! result
+Something need to be done. foo-2 ?
+!! end
+
+!! test
Language converter: output gets cut off unexpectedly (bug 5757)
!! options
language=zh
@@ -4094,7 +4262,7 @@ HTML bullet list, closed tags (bug 5497)
!! end
!! test
-HTML bullet list, unclosed tags (bug 5497)
+TODO: HTML bullet list, unclosed tags (bug 5497)
!! input
<ul>
<li>One
@@ -4124,7 +4292,7 @@ HTML ordered list, closed tags (bug 5497)
!! end
!! test
-HTML ordered list, unclosed tags (bug 5497)
+TODO: HTML ordered list, unclosed tags (bug 5497)
!! input
<ol>
<li>One
@@ -4164,7 +4332,7 @@ HTML nested bullet list, closed tags (bug 5497)
!! end
!! test
-HTML nested bullet list, open tags (bug 5497)
+TODO: HTML nested bullet list, open tags (bug 5497)
!! input
<ul>
<li>One
@@ -4212,7 +4380,7 @@ HTML nested ordered list, closed tags (bug 5497)
!! end
!! test
-HTML nested ordered list, open tags (bug 5497)
+TODO: HTML nested ordered list, open tags (bug 5497)
!! input
<ol>
<li>One
@@ -4458,7 +4626,7 @@ Fuzz testing: encoded newline in generated HTML replacements (bug 6577)
!! end
!! test
-Parsing optional HTML elements (Bug 6171)
+TODO: Parsing optional HTML elements (Bug 6171)
!! options
!! input
<table>
@@ -4524,7 +4692,7 @@ New wiki paragraph
!! end
!! test
-Inline HTML vs wiki block nesting
+TODO: Inline HTML vs wiki block nesting
!! input
<b>Bold paragraph
@@ -4537,7 +4705,7 @@ New wiki paragraph
!!test
-Mixing markup for italics and bold
+TODO: Mixing markup for italics and bold
!! options
!! input
'''bold''''''bold''bolditalics'''''
@@ -5463,6 +5631,226 @@ Handling of &#x0A; in URLs
</li></ul>
!!end
+
+!! test
+TODO: 5 quotes, code coverage +1 line
+!! input
+'''''
+!! result
+!! end
+
+!! test
+Special:Search page linking.
+!! input
+{{Special:search}}
+!! result
+<p><a href="/wiki/Special:Search" title="Special:Search">Special:Search</a>
+</p>
+!! end
+
+!! test
+Say the magic word
+!! input
+* {{PAGENAME}}
+* {{BASEPAGENAME}}
+* {{SUBPAGENAME}}
+* {{SUBPAGENAMEE}}
+* {{BASEPAGENAME}}
+* {{BASEPAGENAMEE}}
+* {{TALKPAGENAME}}
+* {{TALKPAGENAMEE}}
+* {{SUBJECTPAGENAME}}
+* {{SUBJECTPAGENAMEE}}
+* {{NAMESPACEE}}
+* {{NAMESPACE}}
+* {{TALKSPACE}}
+* {{TALKSPACEE}}
+* {{SUBJECTSPACE}}
+* {{SUBJECTSPACEE}}
+* {{Dynamic|{{NUMBEROFUSERS}}|{{NUMBEROFPAGES}}|{{CURRENTVERSION}}|{{CONTENTLANGUAGE}}|{{DIRECTIONMARK}}|{{CURRENTTIMESTAMP}}|{{NUMBEROFARTICLES}}}}
+!! result
+<ul><li> Parser test
+</li><li> Parser test
+</li><li> Parser test
+</li><li> Parser_test
+</li><li> Parser test
+</li><li> Parser_test
+</li><li> Talk:Parser test
+</li><li> Talk:Parser_test
+</li><li> Parser test
+</li><li> Parser_test
+</li><li>
+</li><li>
+</li><li> Talk
+</li><li> Talk
+</li><li>
+</li><li>
+</li><li> <a href="/index.php?title=Template:Dynamic&amp;action=edit" class="new" title="Template:Dynamic">Template:Dynamic</a>
+</li></ul>
+
+!! end
+### Note: Above tests excludes the "{{NUMBEROFADMINS}}" magic word because it generates a MySQL error when included.
+
+!! test
+Gallery
+!! input
+<gallery>
+image1.png |
+image2.gif|||||
+
+image3|
+image4 |300px| centre
+ image5.svg| http://///////
+[[x|xx]]]]
+* image6
+</gallery>
+!! result
+<table class="gallery" cellspacing="0" cellpadding="0"><tr><td><div class="gallerybox"><div style="height: 152px;">Image1.png</div><div class="gallerytext">
+</div></div></td>
+<td><div class="gallerybox"><div style="height: 152px;">Image2.gif</div><div class="gallerytext">
+||||</div></div></td>
+<td><div class="gallerybox"><div style="height: 152px;">Image3</div><div class="gallerytext">
+</div></div></td>
+<td><div class="gallerybox"><div style="height: 152px;">Image4</div><div class="gallerytext">
+300px| centre</div></div></td>
+</tr><tr><td><div class="gallerybox"><div style="height: 152px;">Image5.svg</div><div class="gallerytext">
+ <a href="http://///////" class="external free" title="http://///////" rel="nofollow">http://///////</a></div></div></td>
+<td><div class="gallerybox"><div style="height: 152px;">* image6</div><div class="gallerytext">
+</div></div></td>
+</tr>
+</table>
+
+!! end
+
+!! test
+TODO: HTML Hex character encoding.
+!! input
+&#x4A;&#x061;&#x0076;&#x00061;&#x000053;&#x0000063;&#114;&#x0000069;&#00000112;&#x0000000074;
+!! result
+<p>JavaScript
+</p>
+!! end
+
+!! test
+__FORCETOC__ override
+!! input
+__NEWSECTIONLINK__
+__FORCETOC__
+!! result
+<p><br />
+</p>
+!! end
+
+!! test
+ISBN code coverage
+!! input
+ISBN 983&#x20;987
+!! result
+<p><a href="/index.php?title=Special:Booksources&amp;isbn=983" class="internal">ISBN 983</a>&#x20;987
+</p>
+!! end
+
+!! test
+ISBN followed by 5 spaces
+!! input
+ISBN
+!! result
+<p>ISBN
+</p>
+!! end
+
+!! test
+Double ISBN
+!! options
+disabled # Disabled until Bug 6560 resolved
+!! input
+ISBN ISBN 1234
+!! result
+<p>ISBN <a href="/wiki/index.php?title=Special:Booksources&amp;isbn=1234" class="internal">ISBN 1234</a>
+</p>
+!! end
+
+!! test
+Double RFC
+!! input
+RFC RFC 1234
+!! result
+<p>RFC <a href="http://www.ietf.org/rfc/rfc1234.txt" class="external" title="http://www.ietf.org/rfc/rfc1234.txt">RFC 1234</a>
+</p>
+!! end
+
+!! test
+Double RFC with a wiki link
+!! input
+RFC [[RFC 1234]]
+!! result
+<p>RFC <a href="/index.php?title=RFC_1234&amp;action=edit" class="new" title="RFC 1234">RFC 1234</a>
+</p>
+!! end
+
+!! test
+RFC code coverage
+!! input
+RFC 983&#x20;987
+!! result
+<p><a href="http://www.ietf.org/rfc/rfc983.txt" class="external" title="http://www.ietf.org/rfc/rfc983.txt">RFC 983</a>&#x20;987
+</p>
+!! end
+
+!! test
+Centre-aligned image
+!! input
+[[Image:foobar.jpg|centre]]
+!! result
+<div class="center"><div class="floatnone"><span><a href="/wiki/Image:Foobar.jpg" class="image" title=""><img src="http://example.com/images/3/3a/Foobar.jpg" alt="" width="1941" height="220" longdesc="/wiki/Image:Foobar.jpg" /></a></span></div></div>
+
+!!end
+
+!! test
+None-aligned image
+!! input
+[[Image:foobar.jpg|none]]
+!! result
+<div class="floatnone"><span><a href="/wiki/Image:Foobar.jpg" class="image" title=""><img src="http://example.com/images/3/3a/Foobar.jpg" alt="" width="1941" height="220" longdesc="/wiki/Image:Foobar.jpg" /></a></span></div>
+
+!!end
+
+!! test
+Width + Height sized image (using px) (height is ignored)
+!! input
+[[Image:foobar.jpg|640x480px]]
+!! result
+<p><a href="/wiki/Image:Foobar.jpg" class="image" title=""><img src="http://example.com/images/thumb/3/3a/Foobar.jpg/640px-Foobar.jpg" alt="" width="640" height="73" longdesc="/wiki/Image:Foobar.jpg" /></a>
+</p>
+!!end
+
+!! test
+Another italics / bold test
+!! input
+ ''' ''x'
+!! result
+<pre>'<i> </i>x'
+</pre>
+!!end
+
+# Note the results may be incorrect, as parserTest output included this:
+# XML error: Mismatched tag at byte 6120:
+# ...<dd> </dt></dl> </dd...
+!! test
+TODO: dt/dd/dl test
+!! input
+:;;;::
+!! result
+<dl><dd><dl><dt><dl><dt><dl><dt><dl><dd><dl><dd>
+</dt></dl>
+</dd></dl>
+</dd></dl>
+</dd></dl>
+</dd></dl>
+</dd></dl>
+
+!!end
+
#
#
#
@@ -5473,3 +5861,4 @@ more tables
math
character entities
and much more
+Try for 100% code coverage
diff --git a/maintenance/postgres/compare_schemas.pl b/maintenance/postgres/compare_schemas.pl
new file mode 100644
index 00000000..4a76b270
--- /dev/null
+++ b/maintenance/postgres/compare_schemas.pl
@@ -0,0 +1,181 @@
+#!/usr/bin/perl
+
+## Rough check that the base and postgres "tables.sql" are in sync
+## Should be run from maintenance/postgres
+
+use strict;
+use warnings;
+use Data::Dumper;
+
+my @old = ("../tables.sql");
+my $new = "tables.sql";
+
+## Read in exceptions and other metadata
+my %ok;
+while (<DATA>) {
+ next unless /^(\w+)\s*:\s*([^#]+)/;
+ my ($name,$val) = ($1,$2);
+ chomp $val;
+ if ($name eq 'RENAME') {
+ die "Invalid rename\n" unless $val =~ /(\w+)\s+(\w+)/;
+ $ok{OLD}{$1} = $2;
+ $ok{NEW}{$2} = $1;
+ next;
+ }
+ if ($name eq 'XFILE') {
+ push @old, $val;
+ next;
+ }
+ for (split(/\s+/ => $val)) {
+ $ok{$name}{$_} = 0;
+ }
+}
+
+open my $newfh, "<", $new or die qq{Could not open $new: $!\n};
+
+my $datatype = join '|' => qw(
+bool
+tinyint int bigint real float
+tinytext mediumtext text char varchar
+timestamp datetime
+tinyblob mediumblob blob
+);
+$datatype .= q{|ENUM\([\"\w, ]+\)};
+$datatype = qr{($datatype)};
+
+my $typeval = qr{(\(\d+\))?};
+
+my $typeval2 = qr{ unsigned| binary| NOT NULL| NULL| auto_increment| default ['\-\d\w"]+| REFERENCES .+CASCADE};
+
+my $indextype = join '|' => qw(INDEX KEY FULLTEXT), "PRIMARY KEY", "UNIQUE INDEX", "UNIQUE KEY";
+$indextype = qr{$indextype};
+
+my $tabletype = qr{InnoDB|MyISAM|HEAP|HEAP MAX_ROWS=\d+};
+
+my ($table,%old);
+for my $old (@old) {
+ open my $oldfh, "<", $old or die qq{Could not open $old: $!\n};
+
+ while (<$oldfh>) {
+ next if /^\s*\-\-/ or /^\s+$/;
+ s/\s*\-\- [\w ]+$//;
+ chomp;
+
+ if (/CREATE\s*TABLE/i) {
+ m{^CREATE TABLE /\*\$wgDBprefix\*/(\w+) \($}
+ or die qq{Invalid CREATE TABLE at line $. of $old\n};
+ $table = $1;
+ $old{$table}{name}=$table;
+ }
+ elsif (/^\) TYPE=($tabletype);$/) {
+ $old{$table}{type}=$1;
+ }
+ elsif (/^ (\w+) $datatype$typeval$typeval2{0,3},?$/) {
+ $old{$table}{column}{$1} = $2;
+ }
+ elsif (/^ ($indextype)(?: (\w+))? \(([\w, \(\)]+)\),?$/) {
+ $old{$table}{lc $1."_name"} = $2 ? $2 : "";
+ $old{$table}{lc $1."pk_target"} = $3;
+ }
+ else {
+ die "Cannot parse line $. of $old:\n$_\n";
+ }
+ }
+ close $oldfh;
+}
+
+$datatype = join '|' => qw(
+SMALLINT INTEGER BIGINT NUMERIC SERIAL
+TEXT CHAR VARCHAR
+BYTEA
+TIMESTAMPTZ
+CIDR
+);
+$datatype = qr{($datatype)};
+my %new;
+my ($infunction,$inview,$inrule) = (0,0,0);
+while (<$newfh>) {
+ next if /^\s*\-\-/ or /^\s*$/;
+ s/\s*\-\- [\w ']+$//;
+ next if /^BEGIN;/ or /^SET / or /^COMMIT;/;
+ next if /^CREATE SEQUENCE/;
+ next if /^CREATE(?: UNIQUE)? INDEX/;
+ next if /^CREATE FUNCTION/;
+ next if /^CREATE TRIGGER/ or /^ FOR EACH ROW/;
+ next if /^INSERT INTO/ or /^ VALUES \(/;
+ next if /^ALTER TABLE/;
+ chomp;
+
+ if (/^\$mw\$;?$/) {
+ $infunction = $infunction ? 0 : 1;
+ next;
+ }
+ next if $infunction;
+
+ next if /^CREATE VIEW/ and $inview = 1;
+ if ($inview) {
+ /;$/ and $inview = 0;
+ next;
+ }
+
+ next if /^CREATE RULE/ and $inrule = 1;
+ if ($inrule) {
+ /;$/ and $inrule = 0;
+ next;
+ }
+
+ if (/^CREATE TABLE "?(\w+)"? \($/) {
+ $table = $1;
+ $new{$table}{name}=$table;
+ }
+ elsif (/^\);$/) {
+ }
+ elsif (/^ (\w+) +$datatype/) {
+ $new{$table}{column}{$1} = $2;
+ }
+ else {
+ die "Cannot parse line $. of $new:\n$_\n";
+ }
+}
+close $newfh;
+
+## Old but not new
+for my $t (sort keys %old) {
+ if (!exists $new{$t} and !exists $ok{OLD}{$t}) {
+ print "Table not in $new: $t\n";
+ next;
+ }
+ next if exists $ok{OLD}{$t} and !$ok{OLD}{$t};
+ my $newt = exists $ok{OLD}{$t} ? $ok{OLD}{$t} : $t;
+ my $oldcol = $old{$t}{column};
+ my $newcol = $new{$newt}{column};
+ for my $c (keys %$oldcol) {
+ if (!exists $newcol->{$c}) {
+ print "Column $t.$c not in new\n";
+ next;
+ }
+ }
+ for my $c (keys %$newcol) {
+ if (!exists $oldcol->{$c}) {
+ print "Column $t.$c not in old\n";
+ next;
+ }
+ }
+}
+## New but not old:
+for (sort keys %new) {
+ if (!exists $old{$_} and !exists $ok{NEW}{$_}) {
+ print "Not in old: $_\n";
+ next;
+ }
+}
+
+__DATA__
+## Known exceptions
+OLD: searchindex ## We use tsearch2 directly on the page table instead
+OLD: archive ## This is a view due to the char(14) timestamp hack
+RENAME: user mwuser ## Reserved word causing lots of problems
+RENAME: text pagecontent ## Reserved word
+NEW: archive2 ## The real archive table
+NEW: mediawiki_version ## Just us, for now
+XFILE: ../archives/patch-profiling.sql
diff --git a/maintenance/postgres/tables.sql b/maintenance/postgres/tables.sql
index 5481a394..9ac329d8 100644
--- a/maintenance/postgres/tables.sql
+++ b/maintenance/postgres/tables.sql
@@ -11,7 +11,7 @@ BEGIN;
SET client_min_messages = 'ERROR';
CREATE SEQUENCE user_user_id_seq MINVALUE 0 START WITH 0;
-CREATE TABLE "user" (
+CREATE TABLE mwuser ( -- replace reserved word 'user'
user_id INTEGER NOT NULL PRIMARY KEY DEFAULT nextval('user_user_id_seq'),
user_name TEXT NOT NULL UNIQUE,
user_real_name TEXT,
@@ -26,20 +26,20 @@ CREATE TABLE "user" (
user_touched TIMESTAMPTZ,
user_registration TIMESTAMPTZ
);
-CREATE INDEX user_email_token_idx ON "user" (user_email_token);
+CREATE INDEX user_email_token_idx ON mwuser (user_email_token);
-- Create a dummy user to satisfy fk contraints especially with revisions
-INSERT INTO "user" VALUES
- (DEFAULT,'Anonymous','',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,now(),now());
+INSERT INTO mwuser
+ VALUES (DEFAULT,'Anonymous','',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,now(),now());
CREATE TABLE user_groups (
- ug_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE CASCADE,
+ ug_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE CASCADE,
ug_group TEXT NOT NULL
);
CREATE UNIQUE INDEX user_groups_unique ON user_groups (ug_user, ug_group);
CREATE TABLE user_newtalk (
- user_id INTEGER NOT NULL REFERENCES "user"(user_id) ON DELETE CASCADE,
+ user_id INTEGER NOT NULL REFERENCES mwuser(user_id) ON DELETE CASCADE,
user_ip CIDR NULL
);
CREATE INDEX user_newtalk_id_idx ON user_newtalk (user_id);
@@ -69,18 +69,24 @@ CREATE INDEX page_project_title ON page (page_title) WHERE page_namespace =
CREATE INDEX page_random_idx ON page (page_random);
CREATE INDEX page_len_idx ON page (page_len);
--- Create a dummy page to satisfy fk contraints where a page_id of "0" is added
-INSERT INTO page (page_id,page_namespace,page_title,page_random,page_latest,page_len)
-VALUES (0,0,'',0.0,0,0);
+CREATE FUNCTION page_deleted() RETURNS TRIGGER LANGUAGE plpgsql AS
+$mw$
+BEGIN
+DELETE FROM recentchanges WHERE rc_namespace = OLD.page_namespace AND rc_title = OLD.page_title;
+RETURN NULL;
+END;
+$mw$;
+CREATE TRIGGER page_deleted AFTER DELETE ON page
+ FOR EACH ROW EXECUTE PROCEDURE page_deleted();
CREATE SEQUENCE rev_rev_id_val;
CREATE TABLE revision (
rev_id INTEGER NOT NULL UNIQUE DEFAULT nextval('rev_rev_id_val'),
- rev_page INTEGER NULL REFERENCES page (page_id) ON DELETE SET NULL,
+ rev_page INTEGER NULL REFERENCES page (page_id) ON DELETE CASCADE,
rev_text_id INTEGER NULL, -- FK
rev_comment TEXT,
- rev_user INTEGER NOT NULL REFERENCES "user"(user_id),
+ rev_user INTEGER NOT NULL REFERENCES mwuser(user_id),
rev_user_text TEXT NOT NULL,
rev_timestamp TIMESTAMPTZ NOT NULL,
rev_minor_edit CHAR NOT NULL DEFAULT '0',
@@ -93,19 +99,19 @@ CREATE INDEX rev_user_text_idx ON revision (rev_user_text);
CREATE SEQUENCE text_old_id_val;
-CREATE TABLE "text" (
+CREATE TABLE pagecontent ( -- replaces reserved word 'text'
old_id INTEGER NOT NULL PRIMARY KEY DEFAULT nextval('text_old_id_val'),
old_text TEXT,
old_flags TEXT
);
-CREATE TABLE archive (
+CREATE TABLE archive2 (
ar_namespace SMALLINT NOT NULL,
ar_title TEXT NOT NULL,
ar_text TEXT,
ar_comment TEXT,
- ar_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE SET NULL,
+ ar_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE SET NULL,
ar_user_text TEXT NOT NULL,
ar_timestamp TIMESTAMPTZ NOT NULL,
ar_minor_edit CHAR NOT NULL DEFAULT '0',
@@ -113,7 +119,22 @@ CREATE TABLE archive (
ar_rev_id INTEGER,
ar_text_id INTEGER
);
-CREATE INDEX archive_name_title_timestamp ON archive (ar_namespace,ar_title,ar_timestamp);
+CREATE INDEX archive_name_title_timestamp ON archive2 (ar_namespace,ar_title,ar_timestamp);
+
+-- This is the easiest way to work around the char(15) timestamp hack without modifying PHP code
+CREATE VIEW archive AS
+SELECT
+ ar_namespace, ar_title, ar_text, ar_comment, ar_user, ar_user_text,
+ ar_minor_edit, ar_flags, ar_rev_id, ar_text_id,
+ TO_CHAR(ar_timestamp, 'YYYYMMDDHH24MISS') AS ar_timestamp
+FROM archive2;
+
+CREATE RULE archive_insert AS ON INSERT TO archive
+DO INSTEAD INSERT INTO archive2 VALUES (
+ NEW.ar_namespace, NEW.ar_title, NEW.ar_text, NEW.ar_comment, NEW.ar_user, NEW.ar_user_text,
+ TO_DATE(NEW.ar_timestamp, 'YYYYMMDDHH24MISS'),
+ NEW.ar_minor_edit, NEW.ar_flags, NEW.ar_rev_id, NEW.ar_text_id
+);
CREATE TABLE pagelinks (
@@ -121,7 +142,7 @@ CREATE TABLE pagelinks (
pl_namespace SMALLINT NOT NULL,
pl_title TEXT NOT NULL
);
-CREATE UNIQUE INDEX pagelink_unique ON pagelinks (pl_namespace,pl_title,pl_from);
+CREATE UNIQUE INDEX pagelink_unique ON pagelinks (pl_from,pl_namespace,pl_title);
CREATE TABLE templatelinks (
tl_from INTEGER NOT NULL REFERENCES page(page_id) ON DELETE CASCADE,
@@ -180,16 +201,18 @@ CREATE TABLE hitcounter (
CREATE SEQUENCE ipblocks_ipb_id_val;
CREATE TABLE ipblocks (
- ipb_id INTEGER NOT NULL PRIMARY KEY DEFAULT nextval('ipblocks_ipb_id_val'),
- ipb_address CIDR NULL,
- ipb_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE SET NULL,
- ipb_by INTEGER NOT NULL REFERENCES "user"(user_id) ON DELETE CASCADE,
- ipb_reason TEXT NOT NULL,
- ipb_timestamp TIMESTAMPTZ NOT NULL,
- ipb_auto CHAR NOT NULL DEFAULT '0',
- ipb_expiry TIMESTAMPTZ NOT NULL,
- ipb_range_start TEXT,
- ipb_range_end TEXT
+ ipb_id INTEGER NOT NULL PRIMARY KEY DEFAULT nextval('ipblocks_ipb_id_val'),
+ ipb_address CIDR NULL,
+ ipb_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE SET NULL,
+ ipb_by INTEGER NOT NULL REFERENCES mwuser(user_id) ON DELETE CASCADE,
+ ipb_reason TEXT NOT NULL,
+ ipb_timestamp TIMESTAMPTZ NOT NULL,
+ ipb_auto CHAR NOT NULL DEFAULT '0',
+ ipb_anon_only CHAR NOT NULL DEFAULT '0',
+ ipb_create_account CHAR NOT NULL DEFAULT '1',
+ ipb_expiry TIMESTAMPTZ NOT NULL,
+ ipb_range_start TEXT,
+ ipb_range_end TEXT
);
CREATE INDEX ipb_address ON ipblocks (ipb_address);
CREATE INDEX ipb_user ON ipblocks (ipb_user);
@@ -198,16 +221,16 @@ CREATE INDEX ipb_range ON ipblocks (ipb_range_start,ipb_range_end);
CREATE TABLE image (
img_name TEXT NOT NULL PRIMARY KEY,
- img_size SMALLINT NOT NULL,
- img_width SMALLINT NOT NULL,
- img_height SMALLINT NOT NULL,
+ img_size INTEGER NOT NULL,
+ img_width INTEGER NOT NULL,
+ img_height INTEGER NOT NULL,
img_metadata TEXT,
img_bits SMALLINT,
img_media_type TEXT,
img_major_mime TEXT DEFAULT 'unknown',
img_minor_mime TEXT DEFAULT 'unknown',
img_description TEXT NOT NULL,
- img_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE SET NULL,
+ img_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE SET NULL,
img_user_text TEXT NOT NULL,
img_timestamp TIMESTAMPTZ
);
@@ -217,12 +240,12 @@ CREATE INDEX img_timestamp_idx ON image (img_timestamp);
CREATE TABLE oldimage (
oi_name TEXT NOT NULL REFERENCES image(img_name),
oi_archive_name TEXT NOT NULL,
- oi_size SMALLINT NOT NULL,
- oi_width SMALLINT NOT NULL,
- oi_height SMALLINT NOT NULL,
+ oi_size INTEGER NOT NULL,
+ oi_width INTEGER NOT NULL,
+ oi_height INTEGER NOT NULL,
oi_bits SMALLINT NOT NULL,
oi_description TEXT,
- oi_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE SET NULL,
+ oi_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE SET NULL,
oi_user_text TEXT NOT NULL,
oi_timestamp TIMESTAMPTZ NOT NULL
);
@@ -235,7 +258,7 @@ CREATE TABLE filearchive (
fa_archive_name TEXT,
fa_storage_group VARCHAR(16),
fa_storage_key CHAR(64),
- fa_deleted_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE SET NULL,
+ fa_deleted_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE SET NULL,
fa_deleted_timestamp TIMESTAMPTZ NOT NULL,
fa_deleted_reason TEXT,
fa_size SMALLINT NOT NULL,
@@ -247,7 +270,7 @@ CREATE TABLE filearchive (
fa_major_mime TEXT DEFAULT 'unknown',
fa_minor_mime TEXT DEFAULT 'unknown',
fa_description TEXT NOT NULL,
- fa_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE SET NULL,
+ fa_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE SET NULL,
fa_user_text TEXT NOT NULL,
fa_timestamp TIMESTAMPTZ
);
@@ -262,7 +285,7 @@ CREATE TABLE recentchanges (
rc_id INTEGER NOT NULL PRIMARY KEY DEFAULT nextval('rc_rc_id_seq'),
rc_timestamp TIMESTAMPTZ NOT NULL,
rc_cur_time TIMESTAMPTZ NOT NULL,
- rc_user INTEGER NULL REFERENCES "user"(user_id) ON DELETE SET NULL,
+ rc_user INTEGER NULL REFERENCES mwuser(user_id) ON DELETE SET NULL,
rc_user_text TEXT NOT NULL,
rc_namespace SMALLINT NOT NULL,
rc_title TEXT NOT NULL,
@@ -270,7 +293,7 @@ CREATE TABLE recentchanges (
rc_minor CHAR NOT NULL DEFAULT '0',
rc_bot CHAR NOT NULL DEFAULT '0',
rc_new CHAR NOT NULL DEFAULT '0',
- rc_cur_id INTEGER NOT NULL REFERENCES page(page_id),
+ rc_cur_id INTEGER NULL REFERENCES page(page_id) ON DELETE SET NULL,
rc_this_oldid INTEGER NOT NULL,
rc_last_oldid INTEGER NOT NULL,
rc_type CHAR NOT NULL DEFAULT '0',
@@ -287,7 +310,7 @@ CREATE INDEX rc_ip ON recentchanges (rc_ip);
CREATE TABLE watchlist (
- wl_user INTEGER NOT NULL REFERENCES "user"(user_id) ON DELETE CASCADE,
+ wl_user INTEGER NOT NULL REFERENCES mwuser(user_id) ON DELETE CASCADE,
wl_namespace SMALLINT NOT NULL DEFAULT 0,
wl_title TEXT NOT NULL,
wl_notificationtimestamp TIMESTAMPTZ
@@ -343,7 +366,7 @@ CREATE TABLE logging (
log_type TEXT NOT NULL,
log_action TEXT NOT NULL,
log_timestamp TIMESTAMPTZ NOT NULL,
- log_user INTEGER REFERENCES "user"(user_id) ON DELETE SET NULL,
+ log_user INTEGER REFERENCES mwuser(user_id) ON DELETE SET NULL,
log_namespace SMALLINT NOT NULL,
log_title TEXT NOT NULL,
log_comment TEXT,
@@ -383,38 +406,71 @@ CREATE FUNCTION ts2_page_title() RETURNS TRIGGER LANGUAGE plpgsql AS
$mw$
BEGIN
IF TG_OP = 'INSERT' THEN
- NEW.titlevector = to_tsvector(NEW.page_title);
+ NEW.titlevector = to_tsvector('default',NEW.page_title);
ELSIF NEW.page_title != OLD.page_title THEN
- NEW.titlevector := to_tsvector(NEW.page_title);
+ NEW.titlevector := to_tsvector('default',NEW.page_title);
END IF;
RETURN NEW;
END;
$mw$;
CREATE TRIGGER ts2_page_title BEFORE INSERT OR UPDATE ON page
-FOR EACH ROW EXECUTE PROCEDURE ts2_page_title();
+ FOR EACH ROW EXECUTE PROCEDURE ts2_page_title();
-ALTER TABLE text ADD textvector tsvector;
-CREATE INDEX ts2_page_text ON text USING gist(textvector);
+ALTER TABLE pagecontent ADD textvector tsvector;
+CREATE INDEX ts2_page_text ON pagecontent USING gist(textvector);
CREATE FUNCTION ts2_page_text() RETURNS TRIGGER LANGUAGE plpgsql AS
$mw$
BEGIN
IF TG_OP = 'INSERT' THEN
- NEW.textvector = to_tsvector(NEW.old_text);
+ NEW.textvector = to_tsvector('default',NEW.old_text);
ELSIF NEW.old_text != OLD.old_text THEN
- NEW.textvector := to_tsvector(NEW.old_text);
+ NEW.textvector := to_tsvector('default',NEW.old_text);
END IF;
RETURN NEW;
END;
$mw$;
-CREATE TRIGGER ts2_page_text BEFORE INSERT OR UPDATE ON text
-FOR EACH ROW EXECUTE PROCEDURE ts2_page_text();
+CREATE TRIGGER ts2_page_text BEFORE INSERT OR UPDATE ON pagecontent
+ FOR EACH ROW EXECUTE PROCEDURE ts2_page_text();
-CREATE OR REPLACE FUNCTION add_interwiki (TEXT,INT,CHAR) RETURNS INT LANGUAGE SQL AS
+CREATE FUNCTION add_interwiki (TEXT,INT,CHAR) RETURNS INT LANGUAGE SQL AS
$mw$
INSERT INTO interwiki (iw_prefix, iw_url, iw_local) VALUES ($1,$2,$3);
SELECT 1;
$mw$;
+
+-- This table is not used unless profiling is turned on
+CREATE TABLE profiling (
+ pf_count INTEGER NOT NULL DEFAULT 0,
+ pf_time NUMERIC(18,10) NOT NULL DEFAULT 0,
+ pf_name TEXT NOT NULL,
+ pf_server TEXT NULL
+);
+CREATE UNIQUE INDEX pf_name_server ON profiling (pf_name, pf_server);
+
+
+CREATE TABLE mediawiki_version (
+ type TEXT NOT NULL,
+ mw_version TEXT NOT NULL,
+ notes TEXT NULL,
+
+ pg_version TEXT NULL,
+ pg_dbname TEXT NULL,
+ pg_user TEXT NULL,
+ pg_port TEXT NULL,
+ mw_schema TEXT NULL,
+ ts2_schema TEXT NULL,
+ ctype TEXT NULL,
+
+ sql_version TEXT NULL,
+ sql_date TEXT NULL,
+ cdate TIMESTAMPTZ NOT NULL DEFAULT now()
+);
+
+INSERT INTO mediawiki_version (type,mw_version,sql_version,sql_date)
+ VALUES ('Creation','??','$LastChangedRevision: 16747 $','$LastChangedDate: 2006-10-02 17:55:26 -0700 (Mon, 02 Oct 2006) $');
+
+
COMMIT;
diff --git a/maintenance/postgres/wp_mysql2postgres.pl b/maintenance/postgres/wp_mysql2postgres.pl
new file mode 100644
index 00000000..788d9e0b
--- /dev/null
+++ b/maintenance/postgres/wp_mysql2postgres.pl
@@ -0,0 +1,400 @@
+#!/usr/bin/perl
+
+## Convert data from a MySQL mediawiki database into a Postgres mediawiki database
+## svn: $Id: wp_mysql2postgres.pl 16088 2006-08-16 01:12:20Z greg $
+
+use strict;
+use warnings;
+use Data::Dumper;
+use Getopt::Long;
+
+use vars qw(%table %tz %special @torder $COM);
+my $VERSION = "1.0";
+
+## The following options can be changed via command line arguments:
+my $MYSQLDB = 'wikidb';
+my $MYSQLUSER = 'wikiuser';
+
+## If the following are zero-length, we omit their arguments entirely:
+my $MYSQLHOST = '';
+my $MYSQLPASSWORD = '';
+my $MYSQLSOCKET = '';
+
+## Name of the dump file created
+my $MYSQLDUMPFILE = "mediawiki_upgrade.pg";
+
+## How verbose should this script be (0, 1, or 2)
+my $verbose = 0;
+
+my $USAGE = "
+Usage: $0 [OPTION]...
+Convert a MediaWiki schema from MySQL to Postgres
+Example: $0 --db=wikidb --user=wikiuser --pass=sushi
+Options:
+ db Name of the MySQL database
+ user MySQL database username
+ pass MySQL database password
+ host MySQL database host
+ socket MySQL database socket
+ verbose Verbosity, increases with multiple uses
+";
+
+GetOptions
+ (
+ "db=s" => \$MYSQLDB,
+ "user=s" => \$MYSQLUSER,
+ "pass=s" => \$MYSQLPASSWORD,
+ "host=s" => \$MYSQLHOST,
+ "socket=s" => \$MYSQLSOCKET,
+ "verbose+" => \$verbose
+ );
+
+## The Postgres schema file: should not be changed
+my $PG_SCHEMA = "tables.sql";
+
+## What version we default to when we can't parse the old schema
+my $MW_DEFAULT_VERSION = '1.8';
+
+## Try and find a working version of mysqldump
+$verbose and warn "Locating the mysqldump executable\n";
+my @MYSQLDUMP = ("/usr/local/bin/mysqldump", "/usr/bin/mysqldump");
+my $MYSQLDUMP;
+for my $mytry (@MYSQLDUMP) {
+ next if ! -e $mytry;
+ -x $mytry or die qq{Not an executable file: "$mytry"\n};
+ my $version = qx{$mytry -V};
+ $version =~ /^mysqldump\s+Ver\s+\d+/ or die qq{Program at "$mytry" does not act like mysqldump\n};
+ $MYSQLDUMP = $mytry;
+}
+$MYSQLDUMP or die qq{Could not find the mysqldump program\n};
+
+## Flags we use for mysqldump
+my @MYSQLDUMPARGS = qw(
+--skip-lock-tables
+--complete-insert
+--skip-extended-insert
+--skip-add-drop-table
+--skip-add-locks
+--skip-disable-keys
+--skip-set-charset
+--skip-comments
+--skip-quote-names
+);
+
+
+$verbose and warn "Checking that mysqldump can handle our flags\n";
+## Make sure this version can handle all the flags we want.
+## Combine with user dump below
+my $MYSQLDUMPARGS = join " " => @MYSQLDUMPARGS;
+## Argh. Any way to make this work on Win32?
+my $version = qx{$MYSQLDUMP $MYSQLDUMPARGS 2>&1};
+if ($version =~ /unknown option/) {
+ die qq{Sorry, you need to use a newer version of the mysqldump program than the one at "$MYSQLDUMP"\n};
+}
+
+push @MYSQLDUMPARGS, "--user=$MYSQLUSER";
+length $MYSQLPASSWORD and push @MYSQLDUMPARGS, "--password=$MYSQLPASSWORD";
+length $MYSQLHOST and push @MYSQLDUMPARGS, "--host=$MYSQLHOST";
+
+## Open the dump file to hold the mysqldump output
+open my $mdump, "+>", $MYSQLDUMPFILE or die qq{Could not open "$MYSQLDUMPFILE": $!\n};
+$verbose and warn qq{Writing file "$MYSQLDUMPFILE"\n};
+
+open my $mfork2, "-|" or exec $MYSQLDUMP, @MYSQLDUMPARGS, "--no-data", $MYSQLDB;
+my $oldselect = select $mdump;
+
+print while <$mfork2>;
+
+## Slurp in the current schema
+my $current_schema;
+seek $mdump, 0, 0;
+{
+ local $/;
+ $current_schema = <$mdump>;
+}
+seek $mdump, 0, 0;
+truncate $mdump, 0;
+
+warn qq{Trying to determine database version...\n} if $verbose;
+
+my $current_version = 0;
+if ($current_schema =~ /CREATE TABLE \S+cur /) {
+ $current_version = '1.3';
+}
+elsif ($current_schema =~ /CREATE TABLE \S+brokenlinks /) {
+ $current_version = '1.4';
+}
+elsif ($current_schema !~ /CREATE TABLE \S+templatelinks /) {
+ $current_version = '1.5';
+}
+elsif ($current_schema !~ /CREATE TABLE \S+validate /) {
+ $current_version = '1.6';
+}
+elsif ($current_schema !~ /ipb_auto tinyint/) {
+ $current_version = '1.7';
+}
+else {
+ $current_version = '1.8';
+}
+
+if (!$current_version) {
+ warn qq{WARNING! Could not figure out the old version, assuming MediaWiki $MW_DEFAULT_VERSION\n};
+ $current_version = $MW_DEFAULT_VERSION;
+}
+
+## Check for a table prefix:
+my $table_prefix = '';
+if ($current_version =~ /CREATE TABLE (\S+)archive /) {
+ $table_prefix = $1;
+}
+
+warn qq{Old schema is from MediaWiki version $current_version\n} if $verbose;
+warn qq{Table prefix is "$table_prefix"\n} if $verbose and length $table_prefix;
+
+$verbose and warn qq{Writing file "$MYSQLDUMPFILE"\n};
+my $now = scalar localtime();
+my $conninfo = '';
+$MYSQLHOST and $conninfo .= "\n-- host $MYSQLHOST";
+$MYSQLSOCKET and $conninfo .= "\n-- socket $MYSQLSOCKET";
+
+print qq{
+-- Dump of MySQL Mediawiki tables for import into a Postgres Mediawiki schema
+-- Performed by the program: $0
+-- Version: $VERSION (subversion }.q{$LastChangedRevision: 16088 $}.qq{)
+-- Author: Greg Sabino Mullane <greg\@turnstep.com> Comments welcome
+--
+-- This file was created: $now
+-- Executable used: $MYSQLDUMP
+-- Connection information:
+-- database: $MYSQLDB
+-- user: $MYSQLUSER$conninfo
+
+-- This file can be imported manually with psql like so:
+-- psql -p port# -h hostname -U username -f $MYSQLDUMPFILE databasename
+-- This will overwrite any existing MediaWiki information, so be careful
+
+
+};
+
+warn qq{Reading in the Postgres schema information\n} if $verbose;
+open my $schema, "<", $PG_SCHEMA
+ or die qq{Could not open "$PG_SCHEMA": make sure this script is run from maintenance/postgres/\n};
+my $t;
+while (<$schema>) {
+ if (/CREATE TABLE\s+(\S+)/) {
+ $t = $1;
+ $table{$t}={};
+ }
+ elsif (/^ +(\w+)\s+TIMESTAMP/) {
+ $tz{$t}{$1}++;
+ }
+ elsif (/REFERENCES\s*([^( ]+)/) {
+ my $ref = $1;
+ exists $table{$ref} or die qq{No parent table $ref found for $t\n};
+ $table{$t}{$ref}++;
+ }
+}
+close $schema;
+
+## Read in special cases and table/version information
+$verbose and warn qq{Reading in schema exception information\n};
+my %version_tables;
+while (<DATA>) {
+ if (/^VERSION\s+(\d+\.\d+):\s+(.+)/) {
+ my $list = join '|' => split /\s+/ => $2;
+ $version_tables{$1} = qr{\b$list\b};
+ next;
+ }
+ next unless /^(\w+)\s*(.*)/;
+ $special{$1} = $2||'';
+ $special{$2} = $1 if length $2;
+}
+
+## Determine the order of tables based on foreign key constraints
+$verbose and warn qq{Figuring out order of tables to dump\n};
+my %dumped;
+my $bail = 0;
+{
+ my $found=0;
+ T: for my $t (sort keys %table) {
+ next if exists $dumped{$t} and $dumped{$t} >= 1;
+ $found=1;
+ for my $dep (sort keys %{$table{$t}}) {
+ next T if ! exists $dumped{$dep} or $dumped{$dep} < 0;
+ }
+ $dumped{$t} = -1 if ! exists $dumped{$t};
+ ## Skip certain tables that are not imported
+ next if exists $special{$t} and !$special{$t};
+ push @torder, $special{$t} || $t;
+ }
+ last if !$found;
+ push @torder, "---";
+ for (values %dumped) { $_+=2; }
+ die "Too many loops!\n" if $bail++ > 1000;
+ redo;
+}
+
+## Prepare the Postgres database for the move
+$verbose and warn qq{Writing Postgres transformation information\n};
+
+print "\n-- Empty out all existing tables\n";
+$verbose and warn qq{Writing truncates to empty existing tables\n};
+for my $t (@torder) {
+ next if $t eq '---';
+ my $tname = $special{$t}||$t;
+ printf qq{TRUNCATE TABLE %-18s CASCADE;\n}, qq{"$tname"};
+}
+print "\n\n";
+
+print qq{-- Rename the "text" table\n};
+print qq{ALTER TABLE pagecontent RENAME TO "text";\n\n};
+
+print qq{-- Allow rc_ip to contain empty string, will convert at end\n};
+print qq{ALTER TABLE recentchanges ALTER rc_ip TYPE text USING host(rc_ip);\n\n};
+
+print "-- Changing all timestamp fields to handle raw integers\n";
+for my $t (sort keys %tz) {
+ next if $t eq "archive2";
+ for my $c (sort keys %{$tz{$t}}) {
+ printf "ALTER TABLE %-18s ALTER %-25s TYPE TEXT;\n", $t, $c;
+ }
+}
+print "\n";
+
+print qq{
+INSERT INTO page VALUES (0,-1,'Dummy Page','',0,0,0,default,now(),0,10);
+};
+
+## If we have a table _prefix, we need to temporarily rename all of our Postgres
+## tables temporarily for the import. Perhaps consider making this an auto-schema
+## thing in the future.
+if (length $table_prefix) {
+ print qq{\n\n-- Temporarily renaming tables to accomodate the table_prefix "$table_prefix"\n\n};
+ for my $t (@torder) {
+ next if $t eq '---';
+ my $tname = $special{$t}||$t;
+ printf qq{ALTER TABLE %-18s RENAME TO "${table_prefix}$tname"\n}, qq{"$tname"};
+ }
+}
+
+
+## Try and dump the ill-named "user" table:
+## We do this table alone because "user" is a reserved word.
+print qq{
+
+SET escape_string_warning TO 'off';
+\\o /dev/null
+
+-- Postgres uses a table name of "mwuser" instead of "user"
+
+-- Create a dummy user to satisfy fk contraints especially with revisions
+SELECT setval('user_user_id_seq',0,'false');
+INSERT INTO mwuser
+ VALUES (DEFAULT,'Anonymous','',NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,now(),now());
+
+};
+
+push @MYSQLDUMPARGS, "--no-create-info";
+
+$verbose and warn qq{Dumping "user" table\n};
+$verbose > 2 and warn Dumper \@MYSQLDUMPARGS;
+my $usertable = "${table_prefix}user";
+open my $mfork, "-|" or exec $MYSQLDUMP, @MYSQLDUMPARGS, $MYSQLDB, $usertable;
+## Unfortunately, there is no easy way to catch errors
+my $numusers = 0;
+while (<$mfork>) {
+ ++$numusers and print if s/INSERT INTO $usertable/INSERT INTO mwuser/;
+}
+close $mfork;
+if ($numusers < 1) {
+ warn qq{No users found, probably a connection error.\n};
+ print qq{ERROR: No users found, connection failed, or table "$usertable" does not exist. Dump aborted.\n};
+ close $mdump;
+ exit;
+}
+print "\n-- Users loaded: $numusers\n\n-- Loading rest of the mediawiki schema:\n";
+
+warn qq{Dumping all other tables from the MySQL schema\n} if $verbose;
+
+## Dump the rest of the tables, in chunks based on constraints
+## We do not need the user table:
+my @dumplist = grep { $_ ne 'user'} @torder;
+my @alist;
+{
+ undef @alist;
+ PICKATABLE: {
+ my $tname = shift @dumplist;
+ ## XXX Make this dynamic below
+ for my $ver (sort {$b <=> $a } keys %version_tables) {
+ redo PICKATABLE if $tname =~ $version_tables{$ver};
+ }
+ $tname = "${table_prefix}$tname" if length $table_prefix;
+ push @alist, $tname;
+ pop @alist and last if index($alist[-1],'---') >= 0;
+ redo if @dumplist;
+ }
+
+ ## Dump everything else
+ open my $mfork2, "-|" or exec $MYSQLDUMP, @MYSQLDUMPARGS, $MYSQLDB, @alist;
+ print while <$mfork2>;
+ close $mfork2;
+ warn qq{Finished dumping from MySQL\n} if $verbose;
+
+ redo if @dumplist;
+}
+
+warn qq{Writing information to return Postgres database to normal\n} if $verbose;
+print qq{ALTER TABLE "${table_prefix}text" RENAME TO pagecontent;\n};
+print qq{ALTER TABLE ${table_prefix}recentchanges ALTER rc_ip TYPE cidr USING\n};
+print qq{ CASE WHEN rc_ip = '' THEN NULL ELSE rc_ip::cidr END;\n};
+
+## Return tables to their original names if a table prefix was used.
+if (length $table_prefix) {
+ print qq{\n\n-- Renaming tables by removing table prefix "$table_prefix"\n\n};
+ my $maxsize = 18;
+ for (@torder) {
+ $maxsize = length "$_$table_prefix" if length "$_$table_prefix" > $maxsize;
+ }
+ for my $t (@torder) {
+ next if $t eq '---' or $t eq 'text';
+ my $tname = $special{$t}||$t;
+ printf qq{ALTER TABLE %*s RENAME TO "$tname"\n}, $maxsize+1, qq{"${table_prefix}$tname"};
+ }
+}
+
+print qq{\n\n--Returning timestamps to normal\n};
+for my $t (sort keys %tz) {
+ next if $t eq "archive2";
+ for my $c (sort keys %{$tz{$t}}) {
+ printf "ALTER TABLE %-18s ALTER %-25s TYPE timestamptz\n".
+ " USING TO_TIMESTAMP($c,'YYYYMMDDHHMISS');\n", $t, $c;
+ }
+}
+
+## Finally, make a record in the mediawiki_version table about this import
+print qq{
+INSERT INTO mediawiki_version (type,mw_version,notes) VALUES ('MySQL import','??',
+'Imported from file created on $now. Old version: $current_version');
+};
+
+
+print "\\o\n\n-- End of dump\n\n";
+select $oldselect;
+close $mdump;
+exit;
+
+
+__DATA__
+## Known remappings: either indicate the MySQL name,
+## or leave blank if it should be skipped
+pagecontent text
+mwuser user
+mediawiki_version
+archive2
+profiling
+objectcache
+
+## Which tables to ignore depending on the version
+VERSION 1.5: trackback
+VERSION 1.6: externallinks job templatelinks transcache
+VERSION 1.7: filearchive langlinks querycache_info
diff --git a/maintenance/rebuildImages.php b/maintenance/rebuildImages.php
index 45477097..38b89a48 100644
--- a/maintenance/rebuildImages.php
+++ b/maintenance/rebuildImages.php
@@ -125,8 +125,8 @@ class ImageBuilder extends FiveUpgrade {
// Fill in the new image info fields
$info = $this->imageInfo( $row->img_name );
- global $wgMemc, $wgDBname;
- $key = $wgDBname . ":Image:" . md5( $row->img_name );
+ global $wgMemc;
+ $key = wfMemcKey( "Image", md5( $row->img_name ) );
$wgMemc->delete( $key );
return array(
diff --git a/maintenance/refreshImageCount.php b/maintenance/refreshImageCount.php
index 15ce2b91..88ac3c52 100644
--- a/maintenance/refreshImageCount.php
+++ b/maintenance/refreshImageCount.php
@@ -10,7 +10,7 @@ $dbw =& wfGetDB( DB_MASTER );
// Load the current value from the master
$count = $dbw->selectField( 'site_stats', 'ss_images' );
-echo "$wgDBname: forcing ss_images to $count\n";
+echo wfWikiID().": forcing ss_images to $count\n";
// First set to NULL so that it changes on the master
$dbw->update( 'site_stats',
@@ -22,4 +22,4 @@ $dbw->update( 'site_stats',
array( 'ss_images' => $count ),
array( 'ss_row_id' => 1 ) );
-?> \ No newline at end of file
+?>
diff --git a/maintenance/runJobs.php b/maintenance/runJobs.php
index d72addc7..343cda8a 100644
--- a/maintenance/runJobs.php
+++ b/maintenance/runJobs.php
@@ -1,13 +1,22 @@
<?php
+$optionsWithArgs = array( 'maxjobs' );
+$wgUseNormalUser = true;
require_once( 'commandLine.inc' );
require_once( "$IP/includes/JobQueue.php" );
require_once( "$IP/includes/FakeTitle.php" );
+if ( isset( $options['maxjobs'] ) ) {
+ $maxJobs = $options['maxjobs'];
+} else {
+ $maxJobs = 10000;
+}
+
// Trigger errors on inappropriate use of $wgTitle
$wgTitle = new FakeTitle;
$dbw =& wfGetDB( DB_MASTER );
+$n = 0;
while ( $dbw->selectField( 'job', 'count(*)', '', 'runJobs.php' ) ) {
while ( false != ($job = Job::pop()) ) {
wfWaitForSlaves( 5 );
@@ -15,6 +24,9 @@ while ( $dbw->selectField( 'job', 'count(*)', '', 'runJobs.php' ) ) {
if ( !$job->run() ) {
print "Error: {$job->error}\n";
}
+ if ( $maxJobs && ++$n > $maxJobs ) {
+ break 2;
+ }
}
}
?>
diff --git a/maintenance/stats.php b/maintenance/stats.php
index 8ebc3823..bb19e671 100644
--- a/maintenance/stats.php
+++ b/maintenance/stats.php
@@ -2,8 +2,8 @@
require_once('commandLine.inc');
print "Requests\n";
-$session = intval($wgMemc->get("$wgDBname:stats:request_with_session"));
-$noSession = intval($wgMemc->get("$wgDBname:stats:request_without_session"));
+$session = intval($wgMemc->get(wfMemcKey('stats','request_with_session')));
+$noSession = intval($wgMemc->get(wfMemcKey('stats','request_without_session')));
$total = $session + $noSession;
printf( "with session: %-10d %6.2f%%\n", $session, $session/$total*100 );
printf( "without session: %-10d %6.2f%%\n", $noSession, $noSession/$total*100 );
@@ -11,11 +11,11 @@ printf( "total: %-10d %6.2f%%\n", $total, 100 );
print "\nParser cache\n";
-$hits = intval($wgMemc->get("$wgDBname:stats:pcache_hit"));
-$invalid = intval($wgMemc->get("$wgDBname:stats:pcache_miss_invalid"));
-$expired = intval($wgMemc->get("$wgDBname:stats:pcache_miss_expired"));
-$absent = intval($wgMemc->get("$wgDBname:stats:pcache_miss_absent"));
-$stub = intval($wgMemc->get("$wgDBname:stats:pcache_miss_stub"));
+$hits = intval($wgMemc->get(wfMemcKey('stats','pcache_hit')));
+$invalid = intval($wgMemc->get(wfMemcKey('stats','pcache_miss_invalid')));
+$expired = intval($wgMemc->get(wfMemcKey('stats','pcache_miss_expired')));
+$absent = intval($wgMemc->get(wfMemcKey('stats','pcache_miss_absent')));
+$stub = intval($wgMemc->get(wfMemcKey('stats','pcache_miss_stub')));
$total = $hits + $invalid + $expired + $absent + $stub;
printf( "hits: %-10d %6.2f%%\n", $hits, $hits/$total*100 );
printf( "invalid: %-10d %6.2f%%\n", $invalid, $invalid/$total*100 );
@@ -24,18 +24,18 @@ printf( "absent: %-10d %6.2f%%\n", $absent, $absent/$total*100 );
printf( "stub threshold: %-10d %6.2f%%\n", $stub, $stub/$total*100 );
printf( "total: %-10d %6.2f%%\n", $total, 100 );
-$hits = intval($wgMemc->get("$wgDBname:stats:image_cache_hit"));
-$misses = intval($wgMemc->get("$wgDBname:stats:image_cache_miss"));
-$updates = intval($wgMemc->get("$wgDBname:stats:image_cache_update"));
+$hits = intval($wgMemc->get(wfMemcKey('stats','image_cache_hit')));
+$misses = intval($wgMemc->get(wfMemcKey('stats','image_cache_miss')));
+$updates = intval($wgMemc->get(wfMemcKey('stats','image_cache_update')));
$total = $hits + $misses;
print("\nImage cache\n");
printf( "hits: %-10d %6.2f%%\n", $hits, $hits/$total*100 );
printf( "misses: %-10d %6.2f%%\n", $misses, $misses/$total*100 );
printf( "updates: %-10d\n", $updates );
-$hits = intval($wgMemc->get("$wgDBname:stats:diff_cache_hit"));
-$misses = intval($wgMemc->get("$wgDBname:stats:diff_cache_miss"));
-$uncacheable = intval($wgMemc->get("$wgDBname:stats:diff_uncacheable"));
+$hits = intval($wgMemc->get(wfMemcKey('stats','diff_cache_hit')));
+$misses = intval($wgMemc->get(wfMemcKey('stats','diff_cache_miss')));
+$uncacheable = intval($wgMemc->get(wfMemcKey('stats','diff_uncacheable')));
$total = $hits + $misses + $uncacheable;
print("\nDiff cache\n");
printf( "hits: %-10d %6.2f%%\n", $hits, $hits/$total*100 );
diff --git a/maintenance/storage/checkStorage.php b/maintenance/storage/checkStorage.php
index a83d2744..579954d5 100644
--- a/maintenance/storage/checkStorage.php
+++ b/maintenance/storage/checkStorage.php
@@ -1,468 +1,468 @@
-<?php
-
-/**
- * Fsck for MediaWiki
- */
-
-define( 'CONCAT_HEADER', 'O:27:"concatenatedgziphistoryblob"' );
-
-if ( !defined( 'MEDIAWIKI' ) ) {
- require_once( dirname(__FILE__) . '/../commandLine.inc' );
- require_once( 'ExternalStore.php' );
- require_once( 'ExternalStoreDB.php' );
- require_once( 'SpecialImport.php' );
-
- $cs = new CheckStorage;
- $fix = isset( $options['fix'] );
- if ( isset( $args[0] ) ) {
- $xml = $args[0];
- } else {
- $xml = false;
- }
- $cs->check( $fix, $xml );
-}
-
-
-//----------------------------------------------------------------------------------
-
-class CheckStorage
-{
- var $oldIdMap, $errors;
- var $dbStore = null;
-
- var $errorDescriptions = array(
- 'restore text' => 'Damaged text, need to be restored from a backup',
- 'restore revision' => 'Damaged revision row, need to be restored from a backup',
- 'unfixable' => 'Unexpected errors with no automated fixing method',
- 'fixed' => 'Errors already fixed',
- 'fixable' => 'Errors which would already be fixed if --fix was specified',
- );
-
- function check( $fix = false, $xml = '' ) {
- $fname = 'checkStorage';
- $dbr =& wfGetDB( DB_SLAVE );
- if ( $fix ) {
- $dbw =& wfGetDB( DB_MASTER );
- print "Checking, will fix errors if possible...\n";
- } else {
- print "Checking...\n";
- }
- $maxRevId = $dbr->selectField( 'revision', 'MAX(rev_id)', false, $fname );
- $chunkSize = 1000;
- $flagStats = array();
- $objectStats = array();
- $knownFlags = array( 'external', 'gzip', 'object', 'utf-8' );
- $this->errors = array(
- 'restore text' => array(),
- 'restore revision' => array(),
- 'unfixable' => array(),
- 'fixed' => array(),
- 'fixable' => array(),
- );
-
- for ( $chunkStart = 1 ; $chunkStart < $maxRevId; $chunkStart += $chunkSize ) {
- $chunkEnd = $chunkStart + $chunkSize - 1;
- //print "$chunkStart of $maxRevId\n";
-
- // Fetch revision rows
- $this->oldIdMap = array();
- $dbr->ping();
- $res = $dbr->select( 'revision', array( 'rev_id', 'rev_text_id' ),
- array( "rev_id BETWEEN $chunkStart AND $chunkEnd" ), $fname );
- while ( $row = $dbr->fetchObject( $res ) ) {
- $this->oldIdMap[$row->rev_id] = $row->rev_text_id;
- }
- $dbr->freeResult( $res );
-
- if ( !count( $this->oldIdMap ) ) {
- continue;
- }
-
- // Fetch old_flags
- $missingTextRows = array_flip( $this->oldIdMap );
- $externalRevs = array();
- $objectRevs = array();
- $res = $dbr->select( 'text', array( 'old_id', 'old_flags' ),
- 'old_id IN (' . implode( ',', $this->oldIdMap ) . ')', $fname );
- while ( $row = $dbr->fetchObject( $res ) ) {
- $flags = $row->old_flags;
- $id = $row->old_id;
-
- // Create flagStats row if it doesn't exist
- $flagStats = $flagStats + array( $flags => 0 );
- // Increment counter
- $flagStats[$flags]++;
-
- // Not missing
- unset( $missingTextRows[$row->old_id] );
-
- // Check for external or object
- if ( $flags == '' ) {
- $flagArray = array();
- } else {
- $flagArray = explode( ',', $flags );
- }
- if ( in_array( 'external', $flagArray ) ) {
- $externalRevs[] = $id;
- } elseif ( in_array( 'object', $flagArray ) ) {
- $objectRevs[] = $id;
- }
-
- // Check for unrecognised flags
- if ( $flags == '0' ) {
- // This is a known bug from 2004
- // It's safe to just erase the old_flags field
- if ( $fix ) {
- $this->error( 'fixed', "Warning: old_flags set to 0", $id );
- $dbw->ping();
- $dbw->update( 'text', array( 'old_flags' => '' ),
- array( 'old_id' => $id ), $fname );
- echo "Fixed\n";
- } else {
- $this->error( 'fixable', "Warning: old_flags set to 0", $id );
- }
- } elseif ( count( array_diff( $flagArray, $knownFlags ) ) ) {
- $this->error( 'unfixable', "Error: invalid flags field \"$flags\"", $id );
- }
- }
- $dbr->freeResult( $res );
-
- // Output errors for any missing text rows
- foreach ( $missingTextRows as $oldId => $revId ) {
- $this->error( 'restore revision', "Error: missing text row", $oldId );
- }
-
- // Verify external revisions
- $externalConcatBlobs = array();
- $externalNormalBlobs = array();
- if ( count( $externalRevs ) ) {
- $res = $dbr->select( 'text', array( 'old_id', 'old_flags', 'old_text' ),
- array( 'old_id IN (' . implode( ',', $externalRevs ) . ')' ), $fname );
- while ( $row = $dbr->fetchObject( $res ) ) {
- $urlParts = explode( '://', $row->old_text, 2 );
- if ( count( $urlParts ) !== 2 || $urlParts[1] == '' ) {
- $this->error( 'restore text', "Error: invalid URL \"{$row->old_text}\"", $row->old_id );
- continue;
- }
- list( $proto, $path ) = $urlParts;
- if ( $proto != 'DB' ) {
- $this->error( 'restore text', "Error: invalid external protocol \"$proto\"", $row->old_id );
- continue;
- }
- $path = explode( '/', $row->old_text );
- $cluster = $path[2];
- $id = $path[3];
- if ( isset( $path[4] ) ) {
- $externalConcatBlobs[$cluster][$id][] = $row->old_id;
- } else {
- $externalNormalBlobs[$cluster][$id][] = $row->old_id;
- }
- }
- $dbr->freeResult( $res );
- }
-
- // Check external concat blobs for the right header
- $this->checkExternalConcatBlobs( $externalConcatBlobs );
-
- // Check external normal blobs for existence
- if ( count( $externalNormalBlobs ) ) {
- if ( is_null( $this->dbStore ) ) {
- $this->dbStore = new ExternalStoreDB;
- }
- foreach ( $externalConcatBlobs as $cluster => $xBlobIds ) {
- $blobIds = array_keys( $xBlobIds );
- $extDb =& $this->dbStore->getSlave( $cluster );
- $blobsTable = $this->dbStore->getTable( $extDb );
- $res = $extDb->select( $blobsTable,
- array( 'blob_id' ),
- array( 'blob_id IN( ' . implode( ',', $blobIds ) . ')' ), $fname );
- while ( $row = $extDb->fetchObject( $res ) ) {
- unset( $xBlobIds[$row->blob_id] );
- }
- $extDb->freeResult( $res );
- // Print errors for missing blobs rows
- foreach ( $xBlobIds as $blobId => $oldId ) {
- $this->error( 'restore text', "Error: missing target $blobId for one-part ES URL", $oldId );
- }
- }
- }
-
- // Check local objects
- $dbr->ping();
- $concatBlobs = array();
- $curIds = array();
- if ( count( $objectRevs ) ) {
- $headerLength = 300;
- $res = $dbr->select( 'text', array( 'old_id', 'old_flags', "LEFT(old_text, $headerLength) AS header" ),
- array( 'old_id IN (' . implode( ',', $objectRevs ) . ')' ), $fname );
- while ( $row = $dbr->fetchObject( $res ) ) {
- $oldId = $row->old_id;
- if ( !preg_match( '/^O:(\d+):"(\w+)"/', $row->header, $matches ) ) {
- $this->error( 'restore text', "Error: invalid object header", $oldId );
- continue;
- }
-
- $className = strtolower( $matches[2] );
- if ( strlen( $className ) != $matches[1] ) {
- $this->error( 'restore text', "Error: invalid object header, wrong class name length", $oldId );
- continue;
- }
-
- $objectStats = $objectStats + array( $className => 0 );
- $objectStats[$className]++;
-
- switch ( $className ) {
- case 'concatenatedgziphistoryblob':
- // Good
- break;
- case 'historyblobstub':
- case 'historyblobcurstub':
- if ( strlen( $row->header ) == $headerLength ) {
- $this->error( 'unfixable', "Error: overlong stub header", $oldId );
- continue;
- }
- $stubObj = unserialize( $row->header );
- if ( !is_object( $stubObj ) ) {
- $this->error( 'restore text', "Error: unable to unserialize stub object", $oldId );
- continue;
- }
- if ( $className == 'historyblobstub' ) {
- $concatBlobs[$stubObj->mOldId][] = $oldId;
- } else {
- $curIds[$stubObj->mCurId][] = $oldId;
- }
- break;
- default:
- $this->error( 'unfixable', "Error: unrecognised object class \"$className\"", $oldId );
- }
- }
- $dbr->freeResult( $res );
- }
-
- // Check local concat blob validity
- $externalConcatBlobs = array();
- if ( count( $concatBlobs ) ) {
- $headerLength = 300;
- $res = $dbr->select( 'text', array( 'old_id', 'old_flags', "LEFT(old_text, $headerLength) AS header" ),
- array( 'old_id IN (' . implode( ',', array_keys( $concatBlobs ) ) . ')' ), $fname );
- while ( $row = $dbr->fetchObject( $res ) ) {
- $flags = explode( ',', $row->old_flags );
- if ( in_array( 'external', $flags ) ) {
- // Concat blob is in external storage?
- if ( in_array( 'object', $flags ) ) {
- $urlParts = explode( '/', $row->header );
- if ( $urlParts[0] != 'DB:' ) {
- $this->error( 'unfixable', "Error: unrecognised external storage type \"{$urlParts[0]}", $row->old_id );
- } else {
- $cluster = $urlParts[2];
- $id = $urlParts[3];
- if ( !isset( $externalConcatBlobs[$cluster][$id] ) ) {
- $externalConcatBlobs[$cluster][$id] = array();
- }
- $externalConcatBlobs[$cluster][$id] = array_merge(
- $externalConcatBlobs[$cluster][$id], $concatBlobs[$row->old_id]
- );
- }
- } else {
- $this->error( 'unfixable', "Error: invalid flags \"{$row->old_flags}\" on concat bulk row {$row->old_id}",
- $concatBlobs[$row->old_id] );
- }
- } elseif ( strcasecmp( substr( $row->header, 0, strlen( CONCAT_HEADER ) ), CONCAT_HEADER ) ) {
- $this->error( 'restore text', "Error: Incorrect object header for concat bulk row {$row->old_id}",
- $concatBlobs[$row->old_id] );
- } # else good
-
- unset( $concatBlobs[$row->old_id] );
- }
- $dbr->freeResult( $res );
- }
-
- // Check targets of unresolved stubs
- $this->checkExternalConcatBlobs( $externalConcatBlobs );
-
- // next chunk
- }
-
- print "\n\nErrors:\n";
- foreach( $this->errors as $name => $errors ) {
- if ( count( $errors ) ) {
- $description = $this->errorDescriptions[$name];
- echo "$description: " . implode( ',', array_keys( $errors ) ) . "\n";
- }
- }
-
- if ( count( $this->errors['restore text'] ) && $fix ) {
- if ( (string)$xml !== '' ) {
- $this->restoreText( array_keys( $this->errors['restore text'] ), $xml );
- } else {
- echo "Can't fix text, no XML backup specified\n";
- }
- }
-
- print "\nFlag statistics:\n";
- $total = array_sum( $flagStats );
- foreach ( $flagStats as $flag => $count ) {
- printf( "%-30s %10d %5.2f%%\n", $flag, $count, $count / $total * 100 );
- }
- print "\nLocal object statistics:\n";
- $total = array_sum( $objectStats );
- foreach ( $objectStats as $className => $count ) {
- printf( "%-30s %10d %5.2f%%\n", $className, $count, $count / $total * 100 );
- }
- }
-
-
- function error( $type, $msg, $ids ) {
- if ( is_array( $ids ) && count( $ids ) == 1 ) {
- $ids = reset( $ids );
- }
- if ( is_array( $ids ) ) {
- $revIds = array();
- foreach ( $ids as $id ) {
- $revIds = array_merge( $revIds, array_keys( $this->oldIdMap, $id ) );
- }
- print "$msg in text rows " . implode( ', ', $ids ) .
- ", revisions " . implode( ', ', $revIds ) . "\n";
- } else {
- $id = $ids;
- $revIds = array_keys( $this->oldIdMap, $id );
- if ( count( $revIds ) == 1 ) {
- print "$msg in old_id $id, rev_id {$revIds[0]}\n";
- } else {
- print "$msg in old_id $id, revisions " . implode( ', ', $revIds ) . "\n";
- }
- }
- $this->errors[$type] = $this->errors[$type] + array_flip( $revIds );
- }
-
- function checkExternalConcatBlobs( $externalConcatBlobs ) {
- $fname = 'CheckStorage::checkExternalConcatBlobs';
- if ( !count( $externalConcatBlobs ) ) {
- return;
- }
-
- if ( is_null( $this->dbStore ) ) {
- $this->dbStore = new ExternalStoreDB;
- }
-
- foreach ( $externalConcatBlobs as $cluster => $oldIds ) {
- $blobIds = array_keys( $oldIds );
- $extDb =& $this->dbStore->getSlave( $cluster );
- $blobsTable = $this->dbStore->getTable( $extDb );
- $headerLength = strlen( CONCAT_HEADER );
- $res = $extDb->select( $blobsTable,
- array( 'blob_id', "LEFT(blob_text, $headerLength) AS header" ),
- array( 'blob_id IN( ' . implode( ',', $blobIds ) . ')' ), $fname );
- while ( $row = $extDb->fetchObject( $res ) ) {
- if ( strcasecmp( $row->header, CONCAT_HEADER ) ) {
- $this->error( 'restore text', "Error: invalid header on target $cluster/{$row->blob_id} of two-part ES URL",
- $oldIds[$row->blob_id] );
- }
- unset( $oldIds[$row->blob_id] );
-
- }
- $extDb->freeResult( $res );
-
- // Print errors for missing blobs rows
- foreach ( $oldIds as $blobId => $oldIds ) {
- $this->error( 'restore text', "Error: missing target $cluster/$blobId for two-part ES URL", $oldIds );
- }
- }
- }
-
- function restoreText( $revIds, $xml ) {
- global $wgTmpDirectory, $wgDBname;
-
- if ( !count( $revIds ) ) {
- return;
- }
-
- print "Restoring text from XML backup...\n";
-
- $revFileName = "$wgTmpDirectory/broken-revlist-$wgDBname";
- $filteredXmlFileName = "$wgTmpDirectory/filtered-$wgDBname.xml";
-
- // Write revision list
- if ( !file_put_contents( $revFileName, implode( "\n", $revIds ) ) ) {
- echo "Error writing revision list, can't restore text\n";
- return;
- }
-
- // Run mwdumper
- echo "Filtering XML dump...\n";
- $exitStatus = 0;
- passthru( 'mwdumper ' .
- wfEscapeShellArg(
- "--output=file:$filteredXmlFileName",
- "--filter=revlist:$revFileName",
- $xml
- ), $exitStatus
- );
-
- if ( $exitStatus ) {
- echo "mwdumper died with exit status $exitStatus\n";
- return;
- }
-
- $file = fopen( $filteredXmlFileName, 'r' );
- if ( !$file ) {
- echo "Unable to open filtered XML file\n";
- return;
- }
-
- $dbr =& wfGetDB( DB_SLAVE );
- $dbw =& wfGetDB( DB_MASTER );
- $dbr->ping();
- $dbw->ping();
-
- $source = new ImportStreamSource( $file );
- $importer = new WikiImporter( $source );
- $importer->setRevisionCallback( array( &$this, 'importRevision' ) );
- $importer->doImport();
- }
-
- function importRevision( &$revision, &$importer ) {
- $fname = 'CheckStorage::importRevision';
-
- $id = $revision->getID();
- $text = $revision->getText();
- if ( $text === '' ) {
- // This is what happens if the revision was broken at the time the
- // dump was made. Unfortunately, it also happens if the revision was
- // legitimately blank, so there's no way to tell the difference. To
- // be safe, we'll skip it and leave it broken
- $id = $id ? $id : '';
- echo "Revision $id is blank in the dump, may have been broken before export\n";
- return;
- }
-
- if ( !$id ) {
- // No ID, can't import
- echo "No id tag in revision, can't import\n";
- return;
- }
-
- // Find text row again
- $dbr =& wfGetDB( DB_SLAVE );
- $oldId = $dbr->selectField( 'revision', 'rev_text_id', array( 'rev_id' => $id ), $fname );
- if ( !$oldId ) {
- echo "Missing revision row for rev_id $id\n";
- return;
- }
-
- // Compress the text
- $flags = Revision::compressRevisionText( $text );
-
- // Update the text row
- $dbw->update( 'text',
- array( 'old_flags' => $flags, 'old_text' => $text ),
- array( 'old_id' => $oldId ),
- $fname, array( 'LIMIT' => 1 )
- );
-
- // Remove it from the unfixed list and add it to the fixed list
- unset( $this->errors['restore text'][$id] );
- $this->errors['fixed'][$id] = true;
- }
-}
-?>
+<?php
+
+/**
+ * Fsck for MediaWiki
+ */
+
+define( 'CONCAT_HEADER', 'O:27:"concatenatedgziphistoryblob"' );
+
+if ( !defined( 'MEDIAWIKI' ) ) {
+ require_once( dirname(__FILE__) . '/../commandLine.inc' );
+ require_once( 'ExternalStore.php' );
+ require_once( 'ExternalStoreDB.php' );
+ require_once( 'SpecialImport.php' );
+
+ $cs = new CheckStorage;
+ $fix = isset( $options['fix'] );
+ if ( isset( $args[0] ) ) {
+ $xml = $args[0];
+ } else {
+ $xml = false;
+ }
+ $cs->check( $fix, $xml );
+}
+
+
+//----------------------------------------------------------------------------------
+
+class CheckStorage
+{
+ var $oldIdMap, $errors;
+ var $dbStore = null;
+
+ var $errorDescriptions = array(
+ 'restore text' => 'Damaged text, need to be restored from a backup',
+ 'restore revision' => 'Damaged revision row, need to be restored from a backup',
+ 'unfixable' => 'Unexpected errors with no automated fixing method',
+ 'fixed' => 'Errors already fixed',
+ 'fixable' => 'Errors which would already be fixed if --fix was specified',
+ );
+
+ function check( $fix = false, $xml = '' ) {
+ $fname = 'checkStorage';
+ $dbr =& wfGetDB( DB_SLAVE );
+ if ( $fix ) {
+ $dbw =& wfGetDB( DB_MASTER );
+ print "Checking, will fix errors if possible...\n";
+ } else {
+ print "Checking...\n";
+ }
+ $maxRevId = $dbr->selectField( 'revision', 'MAX(rev_id)', false, $fname );
+ $chunkSize = 1000;
+ $flagStats = array();
+ $objectStats = array();
+ $knownFlags = array( 'external', 'gzip', 'object', 'utf-8' );
+ $this->errors = array(
+ 'restore text' => array(),
+ 'restore revision' => array(),
+ 'unfixable' => array(),
+ 'fixed' => array(),
+ 'fixable' => array(),
+ );
+
+ for ( $chunkStart = 1 ; $chunkStart < $maxRevId; $chunkStart += $chunkSize ) {
+ $chunkEnd = $chunkStart + $chunkSize - 1;
+ //print "$chunkStart of $maxRevId\n";
+
+ // Fetch revision rows
+ $this->oldIdMap = array();
+ $dbr->ping();
+ $res = $dbr->select( 'revision', array( 'rev_id', 'rev_text_id' ),
+ array( "rev_id BETWEEN $chunkStart AND $chunkEnd" ), $fname );
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ $this->oldIdMap[$row->rev_id] = $row->rev_text_id;
+ }
+ $dbr->freeResult( $res );
+
+ if ( !count( $this->oldIdMap ) ) {
+ continue;
+ }
+
+ // Fetch old_flags
+ $missingTextRows = array_flip( $this->oldIdMap );
+ $externalRevs = array();
+ $objectRevs = array();
+ $res = $dbr->select( 'text', array( 'old_id', 'old_flags' ),
+ 'old_id IN (' . implode( ',', $this->oldIdMap ) . ')', $fname );
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ $flags = $row->old_flags;
+ $id = $row->old_id;
+
+ // Create flagStats row if it doesn't exist
+ $flagStats = $flagStats + array( $flags => 0 );
+ // Increment counter
+ $flagStats[$flags]++;
+
+ // Not missing
+ unset( $missingTextRows[$row->old_id] );
+
+ // Check for external or object
+ if ( $flags == '' ) {
+ $flagArray = array();
+ } else {
+ $flagArray = explode( ',', $flags );
+ }
+ if ( in_array( 'external', $flagArray ) ) {
+ $externalRevs[] = $id;
+ } elseif ( in_array( 'object', $flagArray ) ) {
+ $objectRevs[] = $id;
+ }
+
+ // Check for unrecognised flags
+ if ( $flags == '0' ) {
+ // This is a known bug from 2004
+ // It's safe to just erase the old_flags field
+ if ( $fix ) {
+ $this->error( 'fixed', "Warning: old_flags set to 0", $id );
+ $dbw->ping();
+ $dbw->update( 'text', array( 'old_flags' => '' ),
+ array( 'old_id' => $id ), $fname );
+ echo "Fixed\n";
+ } else {
+ $this->error( 'fixable', "Warning: old_flags set to 0", $id );
+ }
+ } elseif ( count( array_diff( $flagArray, $knownFlags ) ) ) {
+ $this->error( 'unfixable', "Error: invalid flags field \"$flags\"", $id );
+ }
+ }
+ $dbr->freeResult( $res );
+
+ // Output errors for any missing text rows
+ foreach ( $missingTextRows as $oldId => $revId ) {
+ $this->error( 'restore revision', "Error: missing text row", $oldId );
+ }
+
+ // Verify external revisions
+ $externalConcatBlobs = array();
+ $externalNormalBlobs = array();
+ if ( count( $externalRevs ) ) {
+ $res = $dbr->select( 'text', array( 'old_id', 'old_flags', 'old_text' ),
+ array( 'old_id IN (' . implode( ',', $externalRevs ) . ')' ), $fname );
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ $urlParts = explode( '://', $row->old_text, 2 );
+ if ( count( $urlParts ) !== 2 || $urlParts[1] == '' ) {
+ $this->error( 'restore text', "Error: invalid URL \"{$row->old_text}\"", $row->old_id );
+ continue;
+ }
+ list( $proto, $path ) = $urlParts;
+ if ( $proto != 'DB' ) {
+ $this->error( 'restore text', "Error: invalid external protocol \"$proto\"", $row->old_id );
+ continue;
+ }
+ $path = explode( '/', $row->old_text );
+ $cluster = $path[2];
+ $id = $path[3];
+ if ( isset( $path[4] ) ) {
+ $externalConcatBlobs[$cluster][$id][] = $row->old_id;
+ } else {
+ $externalNormalBlobs[$cluster][$id][] = $row->old_id;
+ }
+ }
+ $dbr->freeResult( $res );
+ }
+
+ // Check external concat blobs for the right header
+ $this->checkExternalConcatBlobs( $externalConcatBlobs );
+
+ // Check external normal blobs for existence
+ if ( count( $externalNormalBlobs ) ) {
+ if ( is_null( $this->dbStore ) ) {
+ $this->dbStore = new ExternalStoreDB;
+ }
+ foreach ( $externalConcatBlobs as $cluster => $xBlobIds ) {
+ $blobIds = array_keys( $xBlobIds );
+ $extDb =& $this->dbStore->getSlave( $cluster );
+ $blobsTable = $this->dbStore->getTable( $extDb );
+ $res = $extDb->select( $blobsTable,
+ array( 'blob_id' ),
+ array( 'blob_id IN( ' . implode( ',', $blobIds ) . ')' ), $fname );
+ while ( $row = $extDb->fetchObject( $res ) ) {
+ unset( $xBlobIds[$row->blob_id] );
+ }
+ $extDb->freeResult( $res );
+ // Print errors for missing blobs rows
+ foreach ( $xBlobIds as $blobId => $oldId ) {
+ $this->error( 'restore text', "Error: missing target $blobId for one-part ES URL", $oldId );
+ }
+ }
+ }
+
+ // Check local objects
+ $dbr->ping();
+ $concatBlobs = array();
+ $curIds = array();
+ if ( count( $objectRevs ) ) {
+ $headerLength = 300;
+ $res = $dbr->select( 'text', array( 'old_id', 'old_flags', "LEFT(old_text, $headerLength) AS header" ),
+ array( 'old_id IN (' . implode( ',', $objectRevs ) . ')' ), $fname );
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ $oldId = $row->old_id;
+ if ( !preg_match( '/^O:(\d+):"(\w+)"/', $row->header, $matches ) ) {
+ $this->error( 'restore text', "Error: invalid object header", $oldId );
+ continue;
+ }
+
+ $className = strtolower( $matches[2] );
+ if ( strlen( $className ) != $matches[1] ) {
+ $this->error( 'restore text', "Error: invalid object header, wrong class name length", $oldId );
+ continue;
+ }
+
+ $objectStats = $objectStats + array( $className => 0 );
+ $objectStats[$className]++;
+
+ switch ( $className ) {
+ case 'concatenatedgziphistoryblob':
+ // Good
+ break;
+ case 'historyblobstub':
+ case 'historyblobcurstub':
+ if ( strlen( $row->header ) == $headerLength ) {
+ $this->error( 'unfixable', "Error: overlong stub header", $oldId );
+ continue;
+ }
+ $stubObj = unserialize( $row->header );
+ if ( !is_object( $stubObj ) ) {
+ $this->error( 'restore text', "Error: unable to unserialize stub object", $oldId );
+ continue;
+ }
+ if ( $className == 'historyblobstub' ) {
+ $concatBlobs[$stubObj->mOldId][] = $oldId;
+ } else {
+ $curIds[$stubObj->mCurId][] = $oldId;
+ }
+ break;
+ default:
+ $this->error( 'unfixable', "Error: unrecognised object class \"$className\"", $oldId );
+ }
+ }
+ $dbr->freeResult( $res );
+ }
+
+ // Check local concat blob validity
+ $externalConcatBlobs = array();
+ if ( count( $concatBlobs ) ) {
+ $headerLength = 300;
+ $res = $dbr->select( 'text', array( 'old_id', 'old_flags', "LEFT(old_text, $headerLength) AS header" ),
+ array( 'old_id IN (' . implode( ',', array_keys( $concatBlobs ) ) . ')' ), $fname );
+ while ( $row = $dbr->fetchObject( $res ) ) {
+ $flags = explode( ',', $row->old_flags );
+ if ( in_array( 'external', $flags ) ) {
+ // Concat blob is in external storage?
+ if ( in_array( 'object', $flags ) ) {
+ $urlParts = explode( '/', $row->header );
+ if ( $urlParts[0] != 'DB:' ) {
+ $this->error( 'unfixable', "Error: unrecognised external storage type \"{$urlParts[0]}", $row->old_id );
+ } else {
+ $cluster = $urlParts[2];
+ $id = $urlParts[3];
+ if ( !isset( $externalConcatBlobs[$cluster][$id] ) ) {
+ $externalConcatBlobs[$cluster][$id] = array();
+ }
+ $externalConcatBlobs[$cluster][$id] = array_merge(
+ $externalConcatBlobs[$cluster][$id], $concatBlobs[$row->old_id]
+ );
+ }
+ } else {
+ $this->error( 'unfixable', "Error: invalid flags \"{$row->old_flags}\" on concat bulk row {$row->old_id}",
+ $concatBlobs[$row->old_id] );
+ }
+ } elseif ( strcasecmp( substr( $row->header, 0, strlen( CONCAT_HEADER ) ), CONCAT_HEADER ) ) {
+ $this->error( 'restore text', "Error: Incorrect object header for concat bulk row {$row->old_id}",
+ $concatBlobs[$row->old_id] );
+ } # else good
+
+ unset( $concatBlobs[$row->old_id] );
+ }
+ $dbr->freeResult( $res );
+ }
+
+ // Check targets of unresolved stubs
+ $this->checkExternalConcatBlobs( $externalConcatBlobs );
+
+ // next chunk
+ }
+
+ print "\n\nErrors:\n";
+ foreach( $this->errors as $name => $errors ) {
+ if ( count( $errors ) ) {
+ $description = $this->errorDescriptions[$name];
+ echo "$description: " . implode( ',', array_keys( $errors ) ) . "\n";
+ }
+ }
+
+ if ( count( $this->errors['restore text'] ) && $fix ) {
+ if ( (string)$xml !== '' ) {
+ $this->restoreText( array_keys( $this->errors['restore text'] ), $xml );
+ } else {
+ echo "Can't fix text, no XML backup specified\n";
+ }
+ }
+
+ print "\nFlag statistics:\n";
+ $total = array_sum( $flagStats );
+ foreach ( $flagStats as $flag => $count ) {
+ printf( "%-30s %10d %5.2f%%\n", $flag, $count, $count / $total * 100 );
+ }
+ print "\nLocal object statistics:\n";
+ $total = array_sum( $objectStats );
+ foreach ( $objectStats as $className => $count ) {
+ printf( "%-30s %10d %5.2f%%\n", $className, $count, $count / $total * 100 );
+ }
+ }
+
+
+ function error( $type, $msg, $ids ) {
+ if ( is_array( $ids ) && count( $ids ) == 1 ) {
+ $ids = reset( $ids );
+ }
+ if ( is_array( $ids ) ) {
+ $revIds = array();
+ foreach ( $ids as $id ) {
+ $revIds = array_merge( $revIds, array_keys( $this->oldIdMap, $id ) );
+ }
+ print "$msg in text rows " . implode( ', ', $ids ) .
+ ", revisions " . implode( ', ', $revIds ) . "\n";
+ } else {
+ $id = $ids;
+ $revIds = array_keys( $this->oldIdMap, $id );
+ if ( count( $revIds ) == 1 ) {
+ print "$msg in old_id $id, rev_id {$revIds[0]}\n";
+ } else {
+ print "$msg in old_id $id, revisions " . implode( ', ', $revIds ) . "\n";
+ }
+ }
+ $this->errors[$type] = $this->errors[$type] + array_flip( $revIds );
+ }
+
+ function checkExternalConcatBlobs( $externalConcatBlobs ) {
+ $fname = 'CheckStorage::checkExternalConcatBlobs';
+ if ( !count( $externalConcatBlobs ) ) {
+ return;
+ }
+
+ if ( is_null( $this->dbStore ) ) {
+ $this->dbStore = new ExternalStoreDB;
+ }
+
+ foreach ( $externalConcatBlobs as $cluster => $oldIds ) {
+ $blobIds = array_keys( $oldIds );
+ $extDb =& $this->dbStore->getSlave( $cluster );
+ $blobsTable = $this->dbStore->getTable( $extDb );
+ $headerLength = strlen( CONCAT_HEADER );
+ $res = $extDb->select( $blobsTable,
+ array( 'blob_id', "LEFT(blob_text, $headerLength) AS header" ),
+ array( 'blob_id IN( ' . implode( ',', $blobIds ) . ')' ), $fname );
+ while ( $row = $extDb->fetchObject( $res ) ) {
+ if ( strcasecmp( $row->header, CONCAT_HEADER ) ) {
+ $this->error( 'restore text', "Error: invalid header on target $cluster/{$row->blob_id} of two-part ES URL",
+ $oldIds[$row->blob_id] );
+ }
+ unset( $oldIds[$row->blob_id] );
+
+ }
+ $extDb->freeResult( $res );
+
+ // Print errors for missing blobs rows
+ foreach ( $oldIds as $blobId => $oldIds ) {
+ $this->error( 'restore text', "Error: missing target $cluster/$blobId for two-part ES URL", $oldIds );
+ }
+ }
+ }
+
+ function restoreText( $revIds, $xml ) {
+ global $wgTmpDirectory, $wgDBname;
+
+ if ( !count( $revIds ) ) {
+ return;
+ }
+
+ print "Restoring text from XML backup...\n";
+
+ $revFileName = "$wgTmpDirectory/broken-revlist-$wgDBname";
+ $filteredXmlFileName = "$wgTmpDirectory/filtered-$wgDBname.xml";
+
+ // Write revision list
+ if ( !file_put_contents( $revFileName, implode( "\n", $revIds ) ) ) {
+ echo "Error writing revision list, can't restore text\n";
+ return;
+ }
+
+ // Run mwdumper
+ echo "Filtering XML dump...\n";
+ $exitStatus = 0;
+ passthru( 'mwdumper ' .
+ wfEscapeShellArg(
+ "--output=file:$filteredXmlFileName",
+ "--filter=revlist:$revFileName",
+ $xml
+ ), $exitStatus
+ );
+
+ if ( $exitStatus ) {
+ echo "mwdumper died with exit status $exitStatus\n";
+ return;
+ }
+
+ $file = fopen( $filteredXmlFileName, 'r' );
+ if ( !$file ) {
+ echo "Unable to open filtered XML file\n";
+ return;
+ }
+
+ $dbr =& wfGetDB( DB_SLAVE );
+ $dbw =& wfGetDB( DB_MASTER );
+ $dbr->ping();
+ $dbw->ping();
+
+ $source = new ImportStreamSource( $file );
+ $importer = new WikiImporter( $source );
+ $importer->setRevisionCallback( array( &$this, 'importRevision' ) );
+ $importer->doImport();
+ }
+
+ function importRevision( &$revision, &$importer ) {
+ $fname = 'CheckStorage::importRevision';
+
+ $id = $revision->getID();
+ $text = $revision->getText();
+ if ( $text === '' ) {
+ // This is what happens if the revision was broken at the time the
+ // dump was made. Unfortunately, it also happens if the revision was
+ // legitimately blank, so there's no way to tell the difference. To
+ // be safe, we'll skip it and leave it broken
+ $id = $id ? $id : '';
+ echo "Revision $id is blank in the dump, may have been broken before export\n";
+ return;
+ }
+
+ if ( !$id ) {
+ // No ID, can't import
+ echo "No id tag in revision, can't import\n";
+ return;
+ }
+
+ // Find text row again
+ $dbr =& wfGetDB( DB_SLAVE );
+ $oldId = $dbr->selectField( 'revision', 'rev_text_id', array( 'rev_id' => $id ), $fname );
+ if ( !$oldId ) {
+ echo "Missing revision row for rev_id $id\n";
+ return;
+ }
+
+ // Compress the text
+ $flags = Revision::compressRevisionText( $text );
+
+ // Update the text row
+ $dbw->update( 'text',
+ array( 'old_flags' => $flags, 'old_text' => $text ),
+ array( 'old_id' => $oldId ),
+ $fname, array( 'LIMIT' => 1 )
+ );
+
+ // Remove it from the unfixed list and add it to the fixed list
+ unset( $this->errors['restore text'][$id] );
+ $this->errors['fixed'][$id] = true;
+ }
+}
+?>
diff --git a/maintenance/storage/compressOld.inc b/maintenance/storage/compressOld.inc
index b7d7094f..3c426841 100644
--- a/maintenance/storage/compressOld.inc
+++ b/maintenance/storage/compressOld.inc
@@ -155,6 +155,17 @@ function compressWithConcat( $startId, $maxChunkSize, $maxChunkFactor, $factorTh
$titleObj = Title::makeTitle( $pageRow->page_namespace, $pageRow->page_title );
print "$pageId\t" . $titleObj->getPrefixedDBkey() . " ";
+ print_r(
+ array(
+ 'rev_page' => $pageRow->page_id,
+ # Don't operate on the current revision
+ # Use < instead of <> in case the current revision has changed
+ # since the page select, which wasn't locking
+ 'rev_id < ' . $pageRow->page_latest
+ ) + $conds
+ );
+ exit;
+
# Load revisions
$revRes = $dbw->select( $tables, $fields,
array(
diff --git a/maintenance/tables.sql b/maintenance/tables.sql
index 288d4a06..3ffa5e5f 100644
--- a/maintenance/tables.sql
+++ b/maintenance/tables.sql
@@ -23,7 +23,7 @@
-- in early 2002 after a lot of trouble with the fields
-- auto-updating.
--
--- The PostgreSQL backend uses DATETIME fields for timestamps,
+-- The Postgres backend uses DATETIME fields for timestamps,
-- and we will migrate the MySQL definitions at some point as
-- well.
--
@@ -97,18 +97,18 @@ CREATE TABLE /*$wgDBprefix*/user (
-- Initially NULL; when a user's e-mail address has been
-- validated by returning with a mailed token, this is
-- set to the current timestamp.
- user_email_authenticated CHAR(14) BINARY,
+ user_email_authenticated char(14) binary,
-- Randomly generated token created when the e-mail address
-- is set and a confirmation test mail sent.
- user_email_token CHAR(32) BINARY,
+ user_email_token char(32) binary,
-- Expiration date for the user_email_token
- user_email_token_expires CHAR(14) BINARY,
+ user_email_token_expires char(14) binary,
-- Timestamp of account registration.
-- Accounts predating this schema addition may contain NULL.
- user_registration CHAR(14) BINARY,
+ user_registration char(14) binary,
PRIMARY KEY user_id (user_id),
UNIQUE INDEX user_name (user_name),
@@ -152,7 +152,8 @@ CREATE TABLE /*$wgDBprefix*/user_newtalk (
user_ip varchar(40) NOT NULL default '',
INDEX user_id (user_id),
INDEX user_ip (user_ip)
-);
+
+) TYPE=InnoDB;
--
@@ -365,7 +366,7 @@ CREATE TABLE /*$wgDBprefix*/pagelinks (
pl_namespace int NOT NULL default '0',
pl_title varchar(255) binary NOT NULL default '',
- UNIQUE KEY pl_from(pl_from,pl_namespace,pl_title),
+ UNIQUE KEY pl_from (pl_from,pl_namespace,pl_title),
KEY (pl_namespace,pl_title)
) TYPE=InnoDB;
@@ -385,7 +386,7 @@ CREATE TABLE /*$wgDBprefix*/templatelinks (
tl_namespace int NOT NULL default '0',
tl_title varchar(255) binary NOT NULL default '',
- UNIQUE KEY tl_from(tl_from,tl_namespace,tl_title),
+ UNIQUE KEY tl_from (tl_from,tl_namespace,tl_title),
KEY (tl_namespace,tl_title)
) TYPE=InnoDB;
@@ -404,7 +405,7 @@ CREATE TABLE /*$wgDBprefix*/imagelinks (
-- all such pages are in namespace 6 (NS_IMAGE).
il_to varchar(255) binary NOT NULL default '',
- UNIQUE KEY il_from(il_from,il_to),
+ UNIQUE KEY il_from (il_from,il_to),
KEY (il_to)
) TYPE=InnoDB;
@@ -439,13 +440,13 @@ CREATE TABLE /*$wgDBprefix*/categorylinks (
-- sorting method by approximate addition time.
cl_timestamp timestamp NOT NULL,
- UNIQUE KEY cl_from(cl_from,cl_to),
+ UNIQUE KEY cl_from (cl_from,cl_to),
-- We always sort within a given category...
- KEY cl_sortkey(cl_to,cl_sortkey),
+ KEY cl_sortkey (cl_to,cl_sortkey),
-- Not really used?
- KEY cl_timestamp(cl_to,cl_timestamp)
+ KEY cl_timestamp (cl_to,cl_timestamp)
) TYPE=InnoDB;
@@ -539,7 +540,7 @@ CREATE TABLE /*$wgDBprefix*/site_stats (
-- that have been visited.)
--
CREATE TABLE /*$wgDBprefix*/hitcounter (
- hc_id INTEGER UNSIGNED NOT NULL
+ hc_id int unsigned NOT NULL
) TYPE=HEAP MAX_ROWS=25000;
@@ -552,7 +553,7 @@ CREATE TABLE /*$wgDBprefix*/ipblocks (
ipb_id int(8) NOT NULL auto_increment,
-- Blocked IP address in dotted-quad form or user name.
- ipb_address varchar(40) binary NOT NULL default '',
+ ipb_address tinyblob NOT NULL default '',
-- Blocked user ID or 0 for IP blocks.
ipb_user int(8) unsigned NOT NULL default '0',
@@ -570,20 +571,32 @@ CREATE TABLE /*$wgDBprefix*/ipblocks (
-- Indicates that the IP address was banned because a banned
-- user accessed a page through it. If this is 1, ipb_address
-- will be hidden, and the block identified by block ID number.
- ipb_auto tinyint(1) NOT NULL default '0',
+ ipb_auto bool NOT NULL default 0,
+
+ -- If set to 1, block applies only to logged-out users
+ ipb_anon_only bool NOT NULL default 0,
+
+ -- Block prevents account creation from matching IP addresses
+ ipb_create_account bool NOT NULL default 1,
-- Time at which the block will expire.
ipb_expiry char(14) binary NOT NULL default '',
-- Start and end of an address range, in hexadecimal
-- Size chosen to allow IPv6
- ipb_range_start varchar(32) NOT NULL default '',
- ipb_range_end varchar(32) NOT NULL default '',
+ ipb_range_start tinyblob NOT NULL default '',
+ ipb_range_end tinyblob NOT NULL default '',
PRIMARY KEY ipb_id (ipb_id),
- INDEX ipb_address (ipb_address),
+
+ -- Unique index to support "user already blocked" messages
+ -- Any new options which prevent collisions should be included
+ UNIQUE INDEX ipb_address (ipb_address(255), ipb_user, ipb_auto, ipb_anon_only),
+
INDEX ipb_user (ipb_user),
- INDEX ipb_range (ipb_range_start(8), ipb_range_end(8))
+ INDEX ipb_range (ipb_range_start(8), ipb_range_end(8)),
+ INDEX ipb_timestamp (ipb_timestamp),
+ INDEX ipb_expiry (ipb_expiry)
) TYPE=InnoDB;
@@ -601,14 +614,14 @@ CREATE TABLE /*$wgDBprefix*/image (
img_size int(8) unsigned NOT NULL default '0',
-- For images, size in pixels.
- img_width int(5) NOT NULL default '0',
- img_height int(5) NOT NULL default '0',
+ img_width int(5) NOT NULL default '0',
+ img_height int(5) NOT NULL default '0',
-- Extracted EXIF metadata stored as a serialized PHP array.
img_metadata mediumblob NOT NULL,
-- For images, bits per pixel if known.
- img_bits int(3) NOT NULL default '0',
+ img_bits int(3) NOT NULL default '0',
-- Media type as defined by the MEDIATYPE_xxx constants
img_media_type ENUM("UNKNOWN", "BITMAP", "DRAWING", "AUDIO", "VIDEO", "MULTIMEDIA", "OFFICE", "TEXT", "EXECUTABLE", "ARCHIVE") default NULL,
@@ -676,7 +689,7 @@ CREATE TABLE /*$wgDBprefix*/oldimage (
--
CREATE TABLE /*$wgDBprefix*/filearchive (
-- Unique row id
- fa_id int not null auto_increment,
+ fa_id int NOT NULL auto_increment,
-- Original base filename; key to image.img_name, page.page_title, etc
fa_name varchar(255) binary NOT NULL default '',
@@ -703,10 +716,10 @@ CREATE TABLE /*$wgDBprefix*/filearchive (
-- Duped fields from image
fa_size int(8) unsigned default '0',
- fa_width int(5) default '0',
- fa_height int(5) default '0',
+ fa_width int(5) default '0',
+ fa_height int(5) default '0',
fa_metadata mediumblob,
- fa_bits int(3) default '0',
+ fa_bits int(3) default '0',
fa_media_type ENUM("UNKNOWN", "BITMAP", "DRAWING", "AUDIO", "VIDEO", "MULTIMEDIA", "OFFICE", "TEXT", "EXECUTABLE", "ARCHIVE") default NULL,
fa_major_mime ENUM("unknown", "application", "audio", "image", "text", "video", "message", "model", "multipart") default "unknown",
fa_minor_mime varchar(32) default "unknown",
@@ -782,8 +795,9 @@ CREATE TABLE /*$wgDBprefix*/recentchanges (
INDEX rc_timestamp (rc_timestamp),
INDEX rc_namespace_title (rc_namespace, rc_title),
INDEX rc_cur_id (rc_cur_id),
- INDEX new_name_timestamp(rc_new,rc_namespace,rc_timestamp),
- INDEX rc_ip (rc_ip)
+ INDEX new_name_timestamp (rc_new,rc_namespace,rc_timestamp),
+ INDEX rc_ip (rc_ip),
+ INDEX rc_ns_usertext (rc_namespace, rc_user_text)
) TYPE=InnoDB;
@@ -802,7 +816,7 @@ CREATE TABLE /*$wgDBprefix*/watchlist (
wl_notificationtimestamp varchar(14) binary,
UNIQUE KEY (wl_user, wl_namespace, wl_title),
- KEY namespace_title (wl_namespace,wl_title)
+ KEY namespace_title (wl_namespace, wl_title)
) TYPE=InnoDB;
@@ -870,10 +884,10 @@ CREATE TABLE /*$wgDBprefix*/interwiki (
-- A boolean value indicating whether the wiki is in this project
-- (used, for example, to detect redirect loops)
- iw_local BOOL NOT NULL,
+ iw_local bool NOT NULL,
-- Boolean value indicating whether interwiki transclusions are allowed.
- iw_trans TINYINT(1) NOT NULL DEFAULT 0,
+ iw_trans tinyint(1) NOT NULL default 0,
UNIQUE KEY iw_prefix (iw_prefix)
@@ -901,11 +915,11 @@ CREATE TABLE /*$wgDBprefix*/querycache (
-- For a few generic cache operations if not using Memcached
--
CREATE TABLE /*$wgDBprefix*/objectcache (
- keyname char(255) binary not null default '',
+ keyname char(255) binary NOT NULL default '',
value mediumblob,
exptime datetime,
- unique key (keyname),
- key (exptime)
+ UNIQUE KEY (keyname),
+ KEY (exptime)
) TYPE=InnoDB;
@@ -913,10 +927,10 @@ CREATE TABLE /*$wgDBprefix*/objectcache (
-- Cache of interwiki transclusion
--
CREATE TABLE /*$wgDBprefix*/transcache (
- tc_url VARCHAR(255) NOT NULL,
- tc_contents TEXT,
- tc_time INT NOT NULL,
- UNIQUE INDEX tc_url_idx(tc_url)
+ tc_url varchar(255) NOT NULL,
+ tc_contents text,
+ tc_time int NOT NULL,
+ UNIQUE INDEX tc_url_idx (tc_url)
) TYPE=InnoDB;
CREATE TABLE /*$wgDBprefix*/logging (
@@ -951,14 +965,15 @@ CREATE TABLE /*$wgDBprefix*/logging (
) TYPE=InnoDB;
CREATE TABLE /*$wgDBprefix*/trackbacks (
- tb_id integer AUTO_INCREMENT PRIMARY KEY,
- tb_page integer REFERENCES page(page_id) ON DELETE CASCADE,
- tb_title varchar(255) NOT NULL,
- tb_url varchar(255) NOT NULL,
- tb_ex text,
- tb_name varchar(255),
-
- INDEX (tb_page)
+ tb_id int auto_increment,
+ tb_page int REFERENCES page(page_id) ON DELETE CASCADE,
+ tb_title varchar(255) NOT NULL,
+ tb_url varchar(255) NOT NULL,
+ tb_ex text,
+ tb_name varchar(255),
+
+ PRIMARY KEY (tb_id),
+ INDEX (tb_page)
) TYPE=InnoDB;
@@ -986,13 +1001,15 @@ CREATE TABLE /*$wgDBprefix*/job (
-- Details of updates to cached special pages
CREATE TABLE /*$wgDBprefix*/querycache_info (
- -- Special page name
- -- Corresponds to a qc_type value
- qci_type varchar(32) NOT NULL default '',
+ -- Special page name
+ -- Corresponds to a qc_type value
+ qci_type varchar(32) NOT NULL default '',
- -- Timestamp of last update
- qci_timestamp char(14) NOT NULL default '19700101000000',
+ -- Timestamp of last update
+ qci_timestamp char(14) NOT NULL default '19700101000000',
- UNIQUE KEY ( qci_type )
+ UNIQUE KEY ( qci_type )
) TYPE=InnoDB;
+
+-- vim: sw=2 sts=2 et
diff --git a/maintenance/update.php b/maintenance/update.php
index 8643aa79..d2dcbf92 100644
--- a/maintenance/update.php
+++ b/maintenance/update.php
@@ -15,8 +15,6 @@ require_once( "commandLine.inc" );
require_once( "updaters.inc" );
$wgTitle = Title::newFromText( "MediaWiki database updater" );
$dbclass = 'Database' . ucfirst( $wgDBtype ) ;
-require_once("$dbclass.php");
-$dbc = new $dbclass;
echo( "MediaWiki {$wgVersion} Updater\n\n" );
@@ -32,20 +30,16 @@ if( !isset( $wgDBadminuser ) || !isset( $wgDBadminpassword ) ) {
# Attempt to connect to the database as a privileged user
# This will vomit up an error if there are permissions problems
-$wgDatabase = $dbc->newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname, 1 );
+$wgDatabase = new $dbclass( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname, 1 );
if( !$wgDatabase->isOpen() ) {
# Appears to have failed
echo( "A connection to the database could not be established. Check the\n" );
- # Let's be a bit clever and guess at what's wrong
- if( isset( $wgDBadminuser ) && isset( $wgDBadminpassword ) ) {
- # Tell the user the value(s) are wrong
- echo( 'values of $wgDBadminuser and $wgDBadminpassword.' . "\n" );
- }
+ echo( "values of \$wgDBadminuser and \$wgDBadminpassword.\n" );
exit();
}
-print "Going to run database updates for $wgDBname\n";
+print "Going to run database updates for ".wfWikiID()."\n";
print "Depending on the size of your database this may take a while!\n";
if( !isset( $options['quick'] ) ) {
diff --git a/maintenance/updateSpecialPages.php b/maintenance/updateSpecialPages.php
index 71c688fc..a7a72b58 100644
--- a/maintenance/updateSpecialPages.php
+++ b/maintenance/updateSpecialPages.php
@@ -33,8 +33,8 @@ foreach ( $wgQueryPages as $page ) {
print "No such special page: $special\n";
exit;
}
- $file = $specialObj->getFile();
- if ( $file ) {
+ if ( !class_exists( $class ) ) {
+ $file = $specialObj->getFile();
require_once( $file );
}
$queryPage = new $class;
diff --git a/maintenance/updaters.inc b/maintenance/updaters.inc
index 164a00cf..d334660e 100644
--- a/maintenance/updaters.inc
+++ b/maintenance/updaters.inc
@@ -56,6 +56,7 @@ $wgNewFields = array(
array( 'interwiki', 'iw_trans', 'patch-interwiki-trans.sql' ),
array( 'ipblocks', 'ipb_range_start', 'patch-ipb_range_start.sql' ),
array( 'site_stats', 'ss_images', 'patch-ss_images.sql' ),
+ array( 'ipblocks', 'ipb_anon_only', 'patch-ipb_anon_only.sql' ),
);
function rename_table( $from, $to, $patch ) {
@@ -761,11 +762,33 @@ function do_templatelinks_update() {
echo "Done. Please run maintenance/refreshLinks.php for a more thorough templatelinks update.\n";
}
+# July 2006
+# Add ( rc_namespace, rc_user_text ) index [R. Church]
+function do_rc_indices_update() {
+ global $wgDatabase;
+ echo( "Checking for additional recent changes indices...\n" );
+ # See if we can find the index we want
+ $info = $wgDatabase->indexInfo( 'recentchanges', 'rc_ns_usertext', __METHOD__ );
+ if( !$info ) {
+ # None, so create
+ echo( "...index on ( rc_namespace, rc_user_text ) not found; creating\n" );
+ dbsource( archive( 'patch-recentchanges-utindex.sql' ) );
+ } else {
+ # Index seems to exist
+ echo( "...seems to be ok\n" );
+ }
+}
+
function do_all_updates( $doShared = false ) {
- global $wgNewTables, $wgNewFields, $wgRenamedTables, $wgSharedDB, $wgDatabase;
+ global $wgNewTables, $wgNewFields, $wgRenamedTables, $wgSharedDB, $wgDatabase, $wgDBtype;
$doUser = !$wgSharedDB || $doShared;
+ if ($wgDBtype === 'postgres') {
+ do_postgres_updates();
+ return;
+ }
+
# Rename tables
foreach ( $wgRenamedTables as $tableRecord ) {
rename_table( $tableRecord[0], $tableRecord[1], $tableRecord[2] );
@@ -819,6 +842,8 @@ function do_all_updates( $doShared = false ) {
do_logging_timestamp_index(); flush();
do_page_random_update(); flush();
+
+ do_rc_indices_update(); flush();
initialiseMessages(); flush();
}
@@ -832,4 +857,121 @@ function archive($name) {
return "$IP/maintenance/archives/$name";
}
}
+
+function do_postgres_updates() {
+ global $wgDatabase, $wgVersion, $wgDBmwschema;
+
+ $version = "1.7.1";
+
+ # Just in case their LocalSetings.php does not have this:
+ if ( !isset( $wgDBmwschema ))
+ $wgDBmwschema = 'mediawiki';
+
+ if ($wgDatabase->tableExists("mediawiki_version")) {
+ $version = "1.8";
+ }
+
+ if ($version == '1.7.1') {
+ $upgrade = <<<PGEND
+
+BEGIN;
+
+-- Type tweaking:
+ALTER TABLE oldimage ALTER oi_size TYPE INTEGER;
+ALTER TABLE oldimage ALTER oi_width TYPE INTEGER;
+ALTER TABLE oldimage ALTER oi_height TYPE INTEGER;
+
+ALTER TABLE image ALTER img_size TYPE INTEGER;
+ALTER TABLE image ALTER img_width TYPE INTEGER;
+ALTER TABLE image ALTER img_height TYPE INTEGER;
+
+-- Constraint tweaking:
+ALTER TABLE recentchanges ALTER rc_cur_id DROP NOT NULL;
+
+-- New columns:
+ALTER TABLE ipblocks ADD ipb_anon_only CHAR NOT NULL DEFAULT '0';
+ALTER TABLE ipblocks ADD ipb_create_account CHAR NOT NULL DEFAULT '1';
+
+-- Index order rearrangements:
+DROP INDEX pagelink_unique;
+CREATE UNIQUE INDEX pagelink_unique ON pagelinks (pl_from,pl_namespace,pl_title);
+
+-- Rename tables
+ALTER TABLE "user" RENAME TO mwuser;
+ALTER TABLE "text" RENAME to pagecontent;
+
+-- New tables:
+CREATE TABLE profiling (
+ pf_count INTEGER NOT NULL DEFAULT 0,
+ pf_time NUMERIC(18,10) NOT NULL DEFAULT 0,
+ pf_name TEXT NOT NULL,
+ pf_server TEXT NULL
+);
+CREATE UNIQUE INDEX pf_name_server ON profiling (pf_name, pf_server);
+
+CREATE TABLE mediawiki_version (
+ type TEXT NOT NULL,
+ mw_version TEXT NOT NULL,
+ notes TEXT NULL,
+
+ pg_version TEXT NULL,
+ pg_dbname TEXT NULL,
+ pg_user TEXT NULL,
+ pg_port TEXT NULL,
+ mw_schema TEXT NULL,
+ ts2_schema TEXT NULL,
+ ctype TEXT NULL,
+
+ sql_version TEXT NULL,
+ sql_date TEXT NULL,
+ cdate TIMESTAMPTZ NOT NULL DEFAULT now()
+);
+
+INSERT INTO mediawiki_version (type,mw_version,notes)
+VALUES ('Upgrade','MWVERSION','Upgrade from older version 1.7.1');
+
+-- Special modifications
+ALTER TABLE archive RENAME to archive2;
+CREATE VIEW archive AS
+SELECT
+ ar_namespace, ar_title, ar_text, ar_comment, ar_user, ar_user_text,
+ ar_minor_edit, ar_flags, ar_rev_id, ar_text_id,
+ TO_CHAR(ar_timestamp, 'YYYYMMDDHH24MISS') AS ar_timestamp
+FROM archive2;
+
+CREATE RULE archive_insert AS ON INSERT TO archive
+DO INSTEAD INSERT INTO archive2 VALUES (
+ NEW.ar_namespace, NEW.ar_title, NEW.ar_text, NEW.ar_comment, NEW.ar_user, NEW.ar_user_text,
+ TO_DATE(NEW.ar_timestamp, 'YYYYMMDDHH24MISS'),
+ NEW.ar_minor_edit, NEW.ar_flags, NEW.ar_rev_id, NEW.ar_text_id
+);
+
+CREATE FUNCTION page_deleted() RETURNS TRIGGER LANGUAGE plpgsql AS
+\$mw\$
+BEGIN
+DELETE FROM recentchanges WHERE rc_namespace = OLD.page_namespace AND rc_title = OLD.page_title;
+RETURN NULL;
+END;
+\$mw\$;
+
+CREATE TRIGGER page_deleted AFTER DELETE ON page
+ FOR EACH ROW EXECUTE PROCEDURE page_deleted();
+
+COMMIT;
+
+PGEND;
+
+ $upgrade = str_replace( 'MWVERSION', $wgVersion, $upgrade );
+
+ $res = $wgDatabase->query($upgrade);
+
+ } ## end version 1.7.1 upgrade
+
+ else {
+ print "No updates needed\n";
+ }
+
+ return;
+}
+
?>
diff --git a/maintenance/userDupes.inc b/maintenance/userDupes.inc
index f66051d4..e632f737 100644
--- a/maintenance/userDupes.inc
+++ b/maintenance/userDupes.inc
@@ -79,10 +79,8 @@ class UserDupes {
* @return bool
*/
function checkDupes( $doDelete = false ) {
- global $wgDBname;
-
if( $this->hasUniqueIndex() ) {
- echo "$wgDBname already has a unique index on its user table.\n";
+ echo wfWikiID()." already has a unique index on its user table.\n";
return true;
}
@@ -92,7 +90,7 @@ class UserDupes {
$dupes = $this->getDupes();
$count = count( $dupes );
- echo "Found $count accounts with duplicate records on $wgDBname.\n";
+ echo "Found $count accounts with duplicate records on ".wfWikiID().".\n";
$this->trimmed = 0;
$this->reassigned = 0;
$this->failed = 0;
@@ -114,9 +112,9 @@ class UserDupes {
if( $this->trimmed > 0 ) {
if( $doDelete ) {
- echo "$this->trimmed duplicate user records were deleted from $wgDBname.\n";
+ echo "$this->trimmed duplicate user records were deleted from ".wfWikiID().".\n";
} else {
- echo "$this->trimmed duplicate user accounts were found on $wgDBname which can be removed safely.\n";
+ echo "$this->trimmed duplicate user accounts were found on ".wfWikiID()." which can be removed safely.\n";
}
}
@@ -325,4 +323,4 @@ class UserDupes {
}
-?> \ No newline at end of file
+?>