summaryrefslogtreecommitdiff
path: root/maintenance
diff options
context:
space:
mode:
authorPierre Schmitz <pierre@archlinux.de>2011-06-22 11:28:20 +0200
committerPierre Schmitz <pierre@archlinux.de>2011-06-22 11:28:20 +0200
commit9db190c7e736ec8d063187d4241b59feaf7dc2d1 (patch)
tree46d1a0dee7febef5c2d57a9f7b972be16a163b3d /maintenance
parent78677c7bbdcc9739f6c10c75935898a20e1acd9e (diff)
update to MediaWiki 1.17.0
Diffstat (limited to 'maintenance')
-rw-r--r--maintenance/7zip.inc41
-rw-r--r--maintenance/Doxyfile18
-rw-r--r--maintenance/FiveUpgrade.inc1190
-rw-r--r--maintenance/Maintenance.php634
-rw-r--r--maintenance/Makefile2
-rw-r--r--maintenance/addwiki.php46
-rw-r--r--maintenance/archives/patch-archive_ar_revid.sql4
-rw-r--r--maintenance/archives/patch-archive_kill_ar_page_revid.sql4
-rw-r--r--maintenance/archives/patch-categorylinks-better-collation.sql19
-rw-r--r--maintenance/archives/patch-categorylinks-better-collation2.sql12
-rw-r--r--maintenance/archives/patch-iw_api_and_wikiid.sql9
-rw-r--r--maintenance/archives/patch-iwlinks.sql16
-rw-r--r--maintenance/archives/patch-kill-iwl_pft.sql7
-rw-r--r--maintenance/archives/patch-kill-iwl_prefix.sql7
-rw-r--r--maintenance/archives/patch-langlinks-ll_lang-20.sql3
-rw-r--r--maintenance/archives/patch-mime_minor_length.sql2
-rw-r--r--maintenance/archives/patch-module_deps.sql12
-rw-r--r--maintenance/archives/patch-msg_resource.sql20
-rw-r--r--maintenance/archives/patch-profiling.sql7
-rw-r--r--maintenance/archives/patch-rename-iwl_prefix.sql4
-rw-r--r--maintenance/archives/patch-tc-timestamp.sql2
-rw-r--r--maintenance/archives/patch-testrun.sql (renamed from maintenance/testRunner.sql)0
-rw-r--r--maintenance/archives/patch-ul_value.sql4
-rw-r--r--maintenance/archives/upgradeLogging.php4
-rw-r--r--maintenance/attachLatest.php30
-rw-r--r--maintenance/backup.inc97
-rw-r--r--maintenance/backupPrefetch.inc106
-rw-r--r--maintenance/benchmarkPurge.php47
-rw-r--r--maintenance/benchmarks/Benchmarker.php72
-rw-r--r--maintenance/benchmarks/bench_HTTP_HTTPS.php38
-rw-r--r--maintenance/benchmarks/bench_wfIsWindows.php42
-rw-r--r--maintenance/changePassword.php21
-rw-r--r--maintenance/checkAutoLoader.php11
-rw-r--r--maintenance/checkBadRedirects.php19
-rw-r--r--maintenance/checkImages.php23
-rw-r--r--maintenance/checkSyntax.php53
-rw-r--r--maintenance/checkUsernames.php7
-rw-r--r--maintenance/cleanupCaps.php25
-rw-r--r--maintenance/cleanupDupes.inc128
-rw-r--r--maintenance/cleanupImages.php61
-rw-r--r--maintenance/cleanupRemovedModules.php89
-rw-r--r--maintenance/cleanupSpam.php27
-rw-r--r--maintenance/cleanupTable.inc15
-rw-r--r--maintenance/cleanupTitles.php47
-rw-r--r--maintenance/cleanupWatchlist.php11
-rw-r--r--maintenance/clear_interwiki_cache.php7
-rw-r--r--maintenance/clear_stats.php33
-rw-r--r--maintenance/commandLine.inc7
-rw-r--r--maintenance/convertLinks.inc218
-rw-r--r--maintenance/convertLinks.php199
-rw-r--r--maintenance/convertUserOptions.php25
-rw-r--r--maintenance/createAndPromote.php28
-rw-r--r--maintenance/cssjanus/COPYING13
-rw-r--r--maintenance/cssjanus/LICENSE202
-rw-r--r--maintenance/cssjanus/README91
-rw-r--r--maintenance/cssjanus/cssjanus.py574
-rw-r--r--maintenance/cssjanus/csslex.py114
-rw-r--r--maintenance/deleteArchivedFiles.inc62
-rw-r--r--maintenance/deleteArchivedFiles.php48
-rw-r--r--maintenance/deleteArchivedRevisions.inc57
-rw-r--r--maintenance/deleteArchivedRevisions.php33
-rw-r--r--maintenance/deleteBatch.php31
-rw-r--r--maintenance/deleteDefaultMessages.php32
-rw-r--r--maintenance/deleteImageMemcached.php14
-rw-r--r--maintenance/deleteOldRevisions.php32
-rw-r--r--maintenance/deleteOrphanedRevisions.php27
-rw-r--r--maintenance/deleteRevision.php14
-rw-r--r--maintenance/deleteSelfExternals.php18
-rw-r--r--maintenance/doMaintenance.php40
-rw-r--r--maintenance/dumpBackup.php31
-rw-r--r--maintenance/dumpInterwiki.inc209
-rw-r--r--maintenance/dumpInterwiki.php238
-rw-r--r--maintenance/dumpLinks.php14
-rw-r--r--maintenance/dumpSisterSites.php9
-rw-r--r--maintenance/dumpTextPass.php448
-rw-r--r--maintenance/dumpUploads.php50
-rw-r--r--maintenance/edit.php22
-rw-r--r--maintenance/eval.php15
-rw-r--r--maintenance/fetchText.php39
-rw-r--r--maintenance/findhooks.php103
-rw-r--r--maintenance/fixSlaveDesync.php52
-rw-r--r--maintenance/fixTimestamps.php51
-rw-r--r--maintenance/fixUserRegistration.php4
-rw-r--r--maintenance/fuzz-tester.php3881
-rw-r--r--maintenance/gearman/gearman.inc2
-rw-r--r--maintenance/gearman/gearmanRefreshLinks.php12
-rw-r--r--maintenance/gearman/gearmanWorker.php6
-rw-r--r--maintenance/generateSitemap.php116
-rw-r--r--maintenance/getLagTimes.php12
-rw-r--r--maintenance/getSlaveServer.php12
-rw-r--r--maintenance/getText.php6
-rw-r--r--maintenance/httpSessionDownload.php12
-rw-r--r--maintenance/ibm_db2/README3
-rw-r--r--maintenance/ibm_db2/foreignkeys.sql107
-rw-r--r--maintenance/ibm_db2/tables.sql103
-rw-r--r--maintenance/importDump.php76
-rw-r--r--maintenance/importImages.inc45
-rw-r--r--maintenance/importImages.php184
-rw-r--r--maintenance/importTextFile.php12
-rw-r--r--maintenance/importUseModWiki.php90
-rw-r--r--maintenance/initEditCount.php30
-rw-r--r--maintenance/initStats.php13
-rw-r--r--maintenance/install-utils.inc208
-rw-r--r--maintenance/install.php95
-rw-r--r--maintenance/installExtension.php683
-rw-r--r--maintenance/interwiki.list97
-rw-r--r--maintenance/interwiki.sql2
-rw-r--r--maintenance/lag.php12
-rw-r--r--maintenance/language/StatOutputs.php23
-rw-r--r--maintenance/language/alltrans.php8
-rw-r--r--maintenance/language/checkDupeMessages.php82
-rw-r--r--maintenance/language/checkExtensions.php6
-rw-r--r--maintenance/language/checkLanguage.inc44
-rw-r--r--maintenance/language/checkLanguage.php4
-rw-r--r--maintenance/language/countMessages.php10
-rw-r--r--maintenance/language/date-formats.php6
-rw-r--r--maintenance/language/diffLanguage.php159
-rw-r--r--maintenance/language/digit2html.php20
-rw-r--r--maintenance/language/dumpMessages.php6
-rw-r--r--maintenance/language/function-list.php6
-rw-r--r--maintenance/language/generateCollationData.php381
-rw-r--r--maintenance/language/generateNormalizerData.php6
-rw-r--r--maintenance/language/lang2po.php75
-rw-r--r--maintenance/language/langmemusage.php10
-rw-r--r--maintenance/language/languages.inc4
-rw-r--r--maintenance/language/messageTypes.inc16
-rw-r--r--maintenance/language/messages.inc144
-rw-r--r--maintenance/language/rebuildLanguage.php37
-rw-r--r--maintenance/language/transstat.php8
-rw-r--r--maintenance/language/validate.php2
-rw-r--r--maintenance/language/writeMessagesArray.inc31
-rw-r--r--maintenance/mcc.php35
-rw-r--r--maintenance/mctest.php26
-rw-r--r--maintenance/mergeMessageFileList.php18
-rw-r--r--maintenance/migrateUserGroup.php16
-rw-r--r--maintenance/minify.php37
-rw-r--r--maintenance/moveBatch.php20
-rw-r--r--maintenance/mssql/README78
-rw-r--r--maintenance/mssql/tables.sql983
-rw-r--r--maintenance/mwdocgen.php69
-rw-r--r--maintenance/namespaceDupes.php136
-rw-r--r--maintenance/nextJobDB.php14
-rw-r--r--maintenance/nukeNS.php30
-rw-r--r--maintenance/nukePage.php14
-rw-r--r--maintenance/oracle/archives/patch-testrun.sql (renamed from maintenance/testRunner.ora.sql)0
-rw-r--r--maintenance/oracle/archives/patch_16_17_schema_changes.sql98
-rw-r--r--maintenance/oracle/archives/patch_create_17_functions.sql125
-rw-r--r--maintenance/oracle/archives/patch_fk_rename_deferred.sql41
-rw-r--r--maintenance/oracle/archives/patch_namespace_defaults.sql17
-rw-r--r--maintenance/oracle/patch_seq_names_pre1.16.sql (renamed from maintenance/ora/patch_seq_names_pre1.16.sql)0
-rw-r--r--maintenance/oracle/tables.sql (renamed from maintenance/ora/tables.sql)236
-rw-r--r--maintenance/oracle/user.sql (renamed from maintenance/ora/user.sql)4
-rw-r--r--maintenance/orphans.php83
-rw-r--r--maintenance/ourusers.php4
-rw-r--r--maintenance/parserTests.inc1719
-rw-r--r--maintenance/parserTestsParserHook.php34
-rw-r--r--maintenance/parserTestsParserTime.php26
-rw-r--r--maintenance/parserTestsStaticParserHook.php47
-rw-r--r--maintenance/patchSql.php14
-rw-r--r--maintenance/populateCategory.inc85
-rw-r--r--maintenance/populateCategory.php58
-rw-r--r--maintenance/populateLogSearch.inc80
-rw-r--r--maintenance/populateLogSearch.php81
-rw-r--r--maintenance/populateLogUsertext.php22
-rw-r--r--maintenance/populateParentId.inc83
-rw-r--r--maintenance/populateParentId.php41
-rw-r--r--maintenance/populateRevisionLength.php98
-rw-r--r--maintenance/populateSha1.php22
-rw-r--r--maintenance/postgres/archives/patch-categorylinks-better-collation.sql8
-rw-r--r--maintenance/postgres/archives/patch-change_tag.sql27
-rw-r--r--maintenance/postgres/archives/patch-iwlinks.sql8
-rw-r--r--maintenance/postgres/archives/patch-kill-iwl_pft.sql7
-rw-r--r--maintenance/postgres/archives/patch-kill-iwl_prefix.sql7
-rw-r--r--maintenance/postgres/archives/patch-mediawiki_version.sql18
-rw-r--r--maintenance/postgres/archives/patch-module_deps.sql7
-rw-r--r--maintenance/postgres/archives/patch-msg_resource.sql8
-rw-r--r--maintenance/postgres/archives/patch-msg_resource_links.sql6
-rw-r--r--maintenance/postgres/archives/patch-mwuser.sql1
-rw-r--r--maintenance/postgres/archives/patch-page.sql24
-rw-r--r--maintenance/postgres/archives/patch-pagecontent.sql1
-rw-r--r--maintenance/postgres/archives/patch-rename-iwl_prefix.sql2
-rw-r--r--maintenance/postgres/archives/patch-tag_summary.sql9
-rw-r--r--maintenance/postgres/archives/patch-testrun.sql (renamed from maintenance/testRunner.postgres.sql)0
-rw-r--r--maintenance/postgres/archives/patch-update_sequences.sql14
-rw-r--r--maintenance/postgres/archives/patch-valid_tag.sql3
-rw-r--r--maintenance/postgres/compare_schemas.pl63
-rw-r--r--maintenance/postgres/mediawiki_mysql2postgres.pl11
-rw-r--r--maintenance/postgres/tables.sql134
-rw-r--r--maintenance/preprocessorFuzzTest.php26
-rw-r--r--maintenance/protect.php19
-rw-r--r--maintenance/purgeList.php13
-rw-r--r--maintenance/purgeOldText.inc26
-rw-r--r--maintenance/purgeOldText.php8
-rw-r--r--maintenance/reassignEdits.php60
-rw-r--r--maintenance/rebuildFileCache.php51
-rw-r--r--maintenance/rebuildImages.php106
-rw-r--r--maintenance/rebuildInterwiki.inc259
-rw-r--r--maintenance/rebuildInterwiki.php278
-rw-r--r--maintenance/rebuildLocalisationCache.php28
-rw-r--r--maintenance/rebuildall.php6
-rw-r--r--maintenance/rebuildmessages.php12
-rw-r--r--maintenance/rebuildrecentchanges.php139
-rw-r--r--maintenance/rebuildtextindex.php40
-rw-r--r--maintenance/refreshImageCount.php8
-rw-r--r--maintenance/refreshLinks.php88
-rw-r--r--maintenance/removeUnusedAccounts.php32
-rw-r--r--maintenance/renameDbPrefix.php34
-rw-r--r--maintenance/renamewiki.php16
-rw-r--r--maintenance/renderDump.php41
-rw-r--r--maintenance/rollbackEdits.php20
-rw-r--r--maintenance/runBatchedQuery.php8
-rw-r--r--maintenance/runJobs.php28
-rw-r--r--maintenance/showJobs.php8
-rw-r--r--maintenance/showStats.php19
-rw-r--r--maintenance/sql.php8
-rw-r--r--maintenance/sqlite.inc67
-rw-r--r--maintenance/sqlite.php30
-rw-r--r--maintenance/sqlite/README12
-rw-r--r--maintenance/sqlite/archives/initial-indexes.sql2
-rw-r--r--maintenance/sqlite/archives/patch-categorylinks-better-collation.sql7
-rw-r--r--maintenance/sqlite/archives/patch-iw_api_and_wikiid.sql19
-rw-r--r--maintenance/sqlite/archives/patch-kill-iwl_pft.sql7
-rw-r--r--maintenance/sqlite/archives/patch-kill-iwl_prefix.sql7
-rw-r--r--maintenance/sqlite/archives/patch-log_search-rename-index.sql1
-rw-r--r--maintenance/sqlite/archives/patch-rename-iwl_prefix.sql5
-rw-r--r--maintenance/sqlite/archives/patch-tc-timestamp.sql2
-rw-r--r--maintenance/sqlite/archives/searchindex-fts3.sql2
-rw-r--r--maintenance/stats.php81
-rw-r--r--maintenance/storage/checkStorage.php74
-rw-r--r--maintenance/storage/compressOld.inc21
-rw-r--r--maintenance/storage/compressOld.php20
-rw-r--r--maintenance/storage/dumpRev.php16
-rw-r--r--maintenance/storage/fixBug20757.php43
-rw-r--r--maintenance/storage/moveToExternal.php30
-rw-r--r--maintenance/storage/orphanStats.php13
-rw-r--r--maintenance/storage/recompressTracked.php70
-rw-r--r--maintenance/storage/resolveStubs.php21
-rw-r--r--maintenance/storage/storageTypeStats.php7
-rw-r--r--maintenance/storage/testCompression.php12
-rw-r--r--maintenance/storage/trackBlobs.php56
-rw-r--r--maintenance/tables.sql375
-rw-r--r--maintenance/tests/ApiSetup.php39
-rw-r--r--maintenance/tests/ApiTest.php164
-rw-r--r--maintenance/tests/CdbTest.php79
-rw-r--r--maintenance/tests/DatabaseSqliteTest.php57
-rw-r--r--maintenance/tests/DatabaseTest.php92
-rw-r--r--maintenance/tests/GlobalTest.php212
-rw-r--r--maintenance/tests/HttpTest.php567
-rw-r--r--maintenance/tests/IPTest.php52
-rw-r--r--maintenance/tests/ImageFunctionsTest.php48
-rw-r--r--maintenance/tests/LanguageConverterTest.php148
-rw-r--r--maintenance/tests/LicensesTest.php17
-rw-r--r--maintenance/tests/LocalFileTest.php97
-rw-r--r--maintenance/tests/Makefile23
-rw-r--r--maintenance/tests/MediaWikiParserTest.php283
-rw-r--r--maintenance/tests/MediaWiki_Setup.php28
-rw-r--r--maintenance/tests/README24
-rw-r--r--maintenance/tests/RevisionTest.php114
-rw-r--r--maintenance/tests/RunSeleniumTests.php220
-rw-r--r--maintenance/tests/SanitizerTest.php73
-rw-r--r--maintenance/tests/SearchEngineTest.php138
-rw-r--r--maintenance/tests/SearchMySQLTest.php26
-rw-r--r--maintenance/tests/SearchUpdateTest.php103
-rw-r--r--maintenance/tests/SiteConfigurationTest.php311
-rw-r--r--maintenance/tests/TimeAdjustTest.php40
-rw-r--r--maintenance/tests/TitleTest.php17
-rw-r--r--maintenance/tests/XmlTest.php115
-rw-r--r--maintenance/tests/bootstrap.php15
-rw-r--r--maintenance/tests/parser/ExtraParserTests.txtbin0 -> 1261 bytes
-rw-r--r--maintenance/tests/parser/parserTest.inc1305
-rw-r--r--maintenance/tests/parser/parserTests.txt (renamed from maintenance/parserTests.txt)798
-rw-r--r--maintenance/tests/parser/parserTestsParserHook.php46
-rw-r--r--maintenance/tests/parser/parserTestsStaticParserHook.php58
-rw-r--r--maintenance/tests/parserTests.php (renamed from maintenance/parserTests.php)60
-rw-r--r--maintenance/tests/phpunit.xml17
-rw-r--r--maintenance/tests/selenium/Selenium.php190
-rw-r--r--maintenance/tests/selenium/SeleniumConfig.php88
-rw-r--r--maintenance/tests/selenium/SeleniumLoader.php9
-rw-r--r--maintenance/tests/selenium/SeleniumServerManager.php239
-rw-r--r--maintenance/tests/selenium/SeleniumTestCase.php103
-rw-r--r--maintenance/tests/selenium/SeleniumTestConsoleLogger.php25
-rw-r--r--maintenance/tests/selenium/SeleniumTestHTMLLogger.php36
-rw-r--r--maintenance/tests/selenium/SeleniumTestListener.php68
-rw-r--r--maintenance/tests/selenium/SeleniumTestSuite.php46
-rw-r--r--maintenance/tests/selenium/data/Wikipedia-logo-v2-de.pngbin0 -> 21479 bytes
-rw-r--r--maintenance/tests/selenium/selenium_settings.ini.php52.sample23
-rw-r--r--maintenance/tests/selenium/selenium_settings.ini.sample32
-rw-r--r--maintenance/tests/selenium/selenium_settings_grid.ini.sample14
-rw-r--r--maintenance/tests/selenium/suites/AddContentToNewPageTestCase.php182
-rw-r--r--maintenance/tests/selenium/suites/AddNewPageTestCase.php65
-rw-r--r--maintenance/tests/selenium/suites/CreateAccountTestCase.php114
-rw-r--r--maintenance/tests/selenium/suites/DeletePageAdminTestCase.php89
-rw-r--r--maintenance/tests/selenium/suites/EmailPasswordTestCase.php81
-rw-r--r--maintenance/tests/selenium/suites/MediaWikExtraTestSuite.php20
-rw-r--r--maintenance/tests/selenium/suites/MediaWikiEditorConfig.php47
-rw-r--r--maintenance/tests/selenium/suites/MediaWikiEditorTestSuite.php18
-rw-r--r--maintenance/tests/selenium/suites/MediawikiCoreSmokeTestCase.php69
-rw-r--r--maintenance/tests/selenium/suites/MediawikiCoreSmokeTestSuite.php19
-rw-r--r--maintenance/tests/selenium/suites/MovePageTestCase.php117
-rw-r--r--maintenance/tests/selenium/suites/MyContributionsTestCase.php76
-rw-r--r--maintenance/tests/selenium/suites/MyWatchListTestCase.php73
-rw-r--r--maintenance/tests/selenium/suites/PageDeleteTestSuite.php16
-rw-r--r--maintenance/tests/selenium/suites/PageSearchTestCase.php105
-rw-r--r--maintenance/tests/selenium/suites/PreviewPageTestCase.php53
-rw-r--r--maintenance/tests/selenium/suites/SavePageTestCase.php58
-rw-r--r--maintenance/tests/selenium/suites/SimpleSeleniumConfig.php15
-rw-r--r--maintenance/tests/selenium/suites/SimpleSeleniumTestCase.php30
-rw-r--r--maintenance/tests/selenium/suites/SimpleSeleniumTestSuite.php26
-rw-r--r--maintenance/tests/selenium/suites/UserPreferencesTestCase.php179
-rw-r--r--maintenance/tests/test-prefetch-current.xml75
-rw-r--r--maintenance/tests/test-prefetch-previous.xml57
-rw-r--r--maintenance/tests/test-prefetch-stub.xml75
-rw-r--r--maintenance/tests/testHelpers.inc652
-rw-r--r--maintenance/undelete.php4
-rw-r--r--maintenance/update.php135
-rw-r--r--maintenance/updateArticleCount.php14
-rw-r--r--maintenance/updateCollation.php145
-rw-r--r--maintenance/updateDoubleWidthSearch.php71
-rw-r--r--maintenance/updateRestrictions.php35
-rw-r--r--maintenance/updateSearchIndex.php115
-rw-r--r--maintenance/updateSpecialPages.php25
-rw-r--r--maintenance/updaters.inc1979
-rw-r--r--maintenance/upgrade1_5.php1302
-rw-r--r--maintenance/userDupes.inc127
-rw-r--r--maintenance/userOptions.inc90
-rw-r--r--maintenance/userOptions.php2
-rw-r--r--maintenance/waitForSlave.php5
327 files changed, 16738 insertions, 16261 deletions
diff --git a/maintenance/7zip.inc b/maintenance/7zip.inc
index 617083bf..4ac480ed 100644
--- a/maintenance/7zip.inc
+++ b/maintenance/7zip.inc
@@ -1,5 +1,12 @@
<?php
/**
+ * 7z stream wrapper
+ *
+ * @file
+ * @ingroup Maintenance
+ */
+
+/**
* Stream wrapper around 7za filter program.
* Required since we can't pass an open file resource to XMLReader->open()
* which is used for the text prefetch.
@@ -8,62 +15,62 @@
*/
class SevenZipStream {
var $stream;
-
+
private function stripPath( $path ) {
$prefix = 'mediawiki.compress.7z://';
return substr( $path, strlen( $prefix ) );
}
-
+
function stream_open( $path, $mode, $options, &$opened_path ) {
- if( $mode[0] == 'r' ) {
+ if ( $mode[0] == 'r' ) {
$options = 'e -bd -so';
- } elseif( $mode[0] == 'w' ) {
+ } elseif ( $mode[0] == 'w' ) {
$options = 'a -bd -si';
} else {
return false;
}
$arg = wfEscapeShellArg( $this->stripPath( $path ) );
$command = "7za $options $arg";
- if( !wfIsWindows() ) {
+ if ( !wfIsWindows() ) {
// Suppress the stupid messages on stderr
$command .= ' 2>/dev/null';
}
- $this->stream = popen( $command, $mode );
- return ($this->stream !== false);
+ $this->stream = popen( $command, $mode[0] ); // popen() doesn't like two-letter modes
+ return ( $this->stream !== false );
}
-
+
function url_stat( $path, $flags ) {
return stat( $this->stripPath( $path ) );
}
-
+
// This is all so lame; there should be a default class we can extend
-
+
function stream_close() {
return fclose( $this->stream );
}
-
+
function stream_flush() {
return fflush( $this->stream );
}
-
+
function stream_read( $count ) {
return fread( $this->stream, $count );
}
-
+
function stream_write( $data ) {
return fwrite( $this->stream, $data );
}
-
+
function stream_tell() {
return ftell( $this->stream );
}
-
+
function stream_eof() {
return feof( $this->stream );
}
-
+
function stream_seek( $offset, $whence ) {
return fseek( $this->stream, $offset, $whence );
}
}
-stream_wrapper_register( 'mediawiki.compress.7z', 'SevenZipStream' ); \ No newline at end of file
+stream_wrapper_register( 'mediawiki.compress.7z', 'SevenZipStream' );
diff --git a/maintenance/Doxyfile b/maintenance/Doxyfile
index db737bff..3d037651 100644
--- a/maintenance/Doxyfile
+++ b/maintenance/Doxyfile
@@ -9,6 +9,7 @@
# {{INPUT}}
#
# A number of MediaWiki-specific aliases are near the end of this file.
+# To generate documentation run: php mwdocgen.php --no-extensions
#---------------------------------------------------------------------------
# Project related configuration options
@@ -39,7 +40,6 @@ STRIP_FROM_INC_PATH =
SHORT_NAMES = NO
JAVADOC_AUTOBRIEF = YES
MULTILINE_CPP_IS_BRIEF = NO
-DETAILS_AT_TOP = NO
INHERIT_DOCS = YES
SEPARATE_MEMBER_PAGES = NO
TAB_SIZE = 8
@@ -76,6 +76,7 @@ ENABLED_SECTIONS =
MAX_INITIALIZER_LINES = 30
SHOW_USED_FILES = YES
SHOW_DIRECTORIES = NO
+SHOW_NAMESPACES = NO
FILE_VERSION_FILTER = {{SVNSTAT}}
#---------------------------------------------------------------------------
# configuration options related to warning and progress messages
@@ -135,7 +136,7 @@ FILE_PATTERNS = *.c \
RECURSIVE = YES
EXCLUDE =
EXCLUDE_SYMLINKS = YES
-EXCLUDE_PATTERNS = LocalSettings.php AdminSettings.php .svn {{EXCLUDE}}
+EXCLUDE_PATTERNS = LocalSettings.php AdminSettings.php StartProfiler.php .svn */.git/* {{EXCLUDE}}
EXAMPLE_PATH =
EXAMPLE_PATTERNS = *
EXAMPLE_RECURSIVE = NO
@@ -245,7 +246,7 @@ SKIP_FUNCTION_MACROS = YES
# Configuration::additions related to external references
#---------------------------------------------------------------------------
TAGFILES =
-GENERATE_TAGFILE =
+GENERATE_TAGFILE = {{OUTPUT_DIRECTORY}}/html/tagfile.xml
ALLEXTERNALS = NO
EXTERNAL_GROUPS = YES
PERL_PATH = /usr/bin/perl
@@ -284,13 +285,16 @@ ALIASES = "type{1}=<b> \1 </b>:" \
"arrayof{2}=<b> Array </b> of \2" \
"null=\type{Null}" \
"boolean=\type{Boolean}" \
- "bool=\boolean" \
+ "bool=\type{Boolean}" \
"integer=\type{Integer}" \
- "int=\integer" \
+ "int=\type{Integer}" \
"string=\type{String}" \
- "str=\string" \
+ "str=\type{String}" \
"mixed=\type{Mixed}" \
"access=\par Access:\n" \
"private=\access private" \
"protected=\access protected" \
- "public=\access public" \ No newline at end of file
+ "public=\access public" \
+ "copyright=\note" \
+ "license=\note"
+
diff --git a/maintenance/FiveUpgrade.inc b/maintenance/FiveUpgrade.inc
deleted file mode 100644
index be0112e9..00000000
--- a/maintenance/FiveUpgrade.inc
+++ /dev/null
@@ -1,1190 +0,0 @@
-<?php
-/**
- * @file
- * @ingroup Maintenance
- */
-
-require_once( 'cleanupDupes.inc' );
-require_once( 'userDupes.inc' );
-require_once( 'updaters.inc' );
-
-define( 'MW_UPGRADE_COPY', false );
-define( 'MW_UPGRADE_ENCODE', true );
-define( 'MW_UPGRADE_NULL', null );
-define( 'MW_UPGRADE_CALLBACK', null ); // for self-documentation only
-
-/**
- * @ingroup Maintenance
- */
-class FiveUpgrade {
- function FiveUpgrade() {
- $this->conversionTables = $this->prepareWindows1252();
-
- $this->loadBalancers = array();
- $this->dbw = wfGetDB( DB_MASTER );
- $this->dbr = $this->streamConnection();
-
- $this->cleanupSwaps = array();
- $this->emailAuth = false; # don't preauthenticate emails
- $this->maxLag = 10; # if slaves are lagged more than 10 secs, wait
- }
-
- function doing( $step ) {
- return is_null( $this->step ) || $step == $this->step;
- }
-
- function upgrade( $step ) {
- $this->step = $step;
-
- $tables = array(
- 'page',
- 'links',
- 'user',
- 'image',
- 'oldimage',
- 'watchlist',
- 'logging',
- 'archive',
- 'imagelinks',
- 'categorylinks',
- 'ipblocks',
- 'recentchanges',
- 'querycache' );
- foreach( $tables as $table ) {
- if( $this->doing( $table ) ) {
- $method = 'upgrade' . ucfirst( $table );
- $this->$method();
- }
- }
-
- if( $this->doing( 'cleanup' ) ) {
- $this->upgradeCleanup();
- }
- }
-
-
- /**
- * Open a connection to the master server with the admin rights.
- * @return Database
- * @access private
- */
- function newConnection() {
- $lb = wfGetLBFactory()->newMainLB();
- $db = $lb->getConnection( DB_MASTER );
-
- $this->loadBalancers[] = $lb;
- return $db;
- }
-
- /**
- * Commit transactions and close the connections when we're done...
- */
- function close() {
- foreach( $this->loadBalancers as $lb ) {
- $lb->commitMasterChanges();
- $lb->closeAll();
- }
- }
-
- /**
- * Open a second connection to the master server, with buffering off.
- * This will let us stream large datasets in and write in chunks on the
- * other end.
- * @return Database
- * @access private
- */
- function streamConnection() {
- global $wgDBtype;
-
- $timeout = 3600 * 24;
- $db = $this->newConnection();
- $db->bufferResults( false );
- if ($wgDBtype == 'mysql') {
- $db->query( "SET net_read_timeout=$timeout" );
- $db->query( "SET net_write_timeout=$timeout" );
- }
- return $db;
- }
-
- /**
- * Prepare a conversion array for converting Windows Code Page 1252 to
- * UTF-8. This should provide proper conversion of text that was miscoded
- * as Windows-1252 by naughty user-agents, and doesn't rely on an outside
- * iconv library.
- *
- * @return array
- * @access private
- */
- function prepareWindows1252() {
- # Mappings from:
- # http://www.unicode.org/Public/MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1252.TXT
- static $cp1252 = array(
- 0x80 => 0x20AC, #EURO SIGN
- 0x81 => 0xFFFD, #REPLACEMENT CHARACTER (no mapping)
- 0x82 => 0x201A, #SINGLE LOW-9 QUOTATION MARK
- 0x83 => 0x0192, #LATIN SMALL LETTER F WITH HOOK
- 0x84 => 0x201E, #DOUBLE LOW-9 QUOTATION MARK
- 0x85 => 0x2026, #HORIZONTAL ELLIPSIS
- 0x86 => 0x2020, #DAGGER
- 0x87 => 0x2021, #DOUBLE DAGGER
- 0x88 => 0x02C6, #MODIFIER LETTER CIRCUMFLEX ACCENT
- 0x89 => 0x2030, #PER MILLE SIGN
- 0x8A => 0x0160, #LATIN CAPITAL LETTER S WITH CARON
- 0x8B => 0x2039, #SINGLE LEFT-POINTING ANGLE QUOTATION MARK
- 0x8C => 0x0152, #LATIN CAPITAL LIGATURE OE
- 0x8D => 0xFFFD, #REPLACEMENT CHARACTER (no mapping)
- 0x8E => 0x017D, #LATIN CAPITAL LETTER Z WITH CARON
- 0x8F => 0xFFFD, #REPLACEMENT CHARACTER (no mapping)
- 0x90 => 0xFFFD, #REPLACEMENT CHARACTER (no mapping)
- 0x91 => 0x2018, #LEFT SINGLE QUOTATION MARK
- 0x92 => 0x2019, #RIGHT SINGLE QUOTATION MARK
- 0x93 => 0x201C, #LEFT DOUBLE QUOTATION MARK
- 0x94 => 0x201D, #RIGHT DOUBLE QUOTATION MARK
- 0x95 => 0x2022, #BULLET
- 0x96 => 0x2013, #EN DASH
- 0x97 => 0x2014, #EM DASH
- 0x98 => 0x02DC, #SMALL TILDE
- 0x99 => 0x2122, #TRADE MARK SIGN
- 0x9A => 0x0161, #LATIN SMALL LETTER S WITH CARON
- 0x9B => 0x203A, #SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
- 0x9C => 0x0153, #LATIN SMALL LIGATURE OE
- 0x9D => 0xFFFD, #REPLACEMENT CHARACTER (no mapping)
- 0x9E => 0x017E, #LATIN SMALL LETTER Z WITH CARON
- 0x9F => 0x0178, #LATIN CAPITAL LETTER Y WITH DIAERESIS
- );
- $pairs = array();
- for( $i = 0; $i < 0x100; $i++ ) {
- $unicode = isset( $cp1252[$i] ) ? $cp1252[$i] : $i;
- $pairs[chr( $i )] = codepointToUtf8( $unicode );
- }
- return $pairs;
- }
-
- /**
- * Convert from 8-bit Windows-1252 to UTF-8 if necessary.
- * @param string $text
- * @return string
- * @access private
- */
- function conv( $text ) {
- global $wgUseLatin1;
- return is_null( $text )
- ? null
- : ( $wgUseLatin1
- ? strtr( $text, $this->conversionTables )
- : $text );
- }
-
- /**
- * Dump timestamp and message to output
- * @param string $message
- * @access private
- */
- function log( $message ) {
- echo wfWikiID() . ' ' . wfTimestamp( TS_DB ) . ': ' . $message . "\n";
- flush();
- }
-
- /**
- * Initialize the chunked-insert system.
- * Rows will be inserted in chunks of the given number, rather
- * than in a giant INSERT...SELECT query, to keep the serialized
- * MySQL database replication from getting hung up. This way other
- * things can be going on during conversion without waiting for
- * slaves to catch up as badly.
- *
- * @param int $chunksize Number of rows to insert at once
- * @param int $final Total expected number of rows / id of last row,
- * used for progress reports.
- * @param string $table to insert on
- * @param string $fname function name to report in SQL
- * @access private
- */
- function setChunkScale( $chunksize, $final, $table, $fname ) {
- $this->chunkSize = $chunksize;
- $this->chunkFinal = $final;
- $this->chunkCount = 0;
- $this->chunkStartTime = wfTime();
- $this->chunkOptions = array( 'IGNORE' );
- $this->chunkTable = $table;
- $this->chunkFunction = $fname;
- }
-
- /**
- * Chunked inserts: perform an insert if we've reached the chunk limit.
- * Prints a progress report with estimated completion time.
- * @param array &$chunk -- This will be emptied if an insert is done.
- * @param int $key A key identifier to use in progress estimation in
- * place of the number of rows inserted. Use this if
- * you provided a max key number instead of a count
- * as the final chunk number in setChunkScale()
- * @access private
- */
- function addChunk( &$chunk, $key = null ) {
- if( count( $chunk ) >= $this->chunkSize ) {
- $this->insertChunk( $chunk );
-
- $this->chunkCount += count( $chunk );
- $now = wfTime();
- $delta = $now - $this->chunkStartTime;
- $rate = $this->chunkCount / $delta;
-
- if( is_null( $key ) ) {
- $completed = $this->chunkCount;
- } else {
- $completed = $key;
- }
- $portion = $completed / $this->chunkFinal;
-
- $estimatedTotalTime = $delta / $portion;
- $eta = $this->chunkStartTime + $estimatedTotalTime;
-
- printf( "%s: %6.2f%% done on %s; ETA %s [%d/%d] %.2f/sec\n",
- wfTimestamp( TS_DB, intval( $now ) ),
- $portion * 100.0,
- $this->chunkTable,
- wfTimestamp( TS_DB, intval( $eta ) ),
- $completed,
- $this->chunkFinal,
- $rate );
- flush();
-
- $chunk = array();
- }
- }
-
- /**
- * Chunked inserts: perform an insert unconditionally, at the end, and log.
- * @param array &$chunk -- This will be emptied if an insert is done.
- * @access private
- */
- function lastChunk( &$chunk ) {
- $n = count( $chunk );
- if( $n > 0 ) {
- $this->insertChunk( $chunk );
- }
- $this->log( "100.00% done on $this->chunkTable (last chunk $n rows)." );
- }
-
- /**
- * Chunked inserts: perform an insert.
- * @param array &$chunk -- This will be emptied if an insert is done.
- * @access private
- */
- function insertChunk( &$chunk ) {
- // Give slaves a chance to catch up
- wfWaitForSlaves( $this->maxLag );
- $this->dbw->insert( $this->chunkTable, $chunk, $this->chunkFunction, $this->chunkOptions );
- }
-
-
- /**
- * Copy and transcode a table to table_temp.
- * @param string $name Base name of the source table
- * @param string $tabledef CREATE TABLE definition, w/ $1 for the name
- * @param array $fields set of destination fields to these constants:
- * MW_UPGRADE_COPY - straight copy
- * MW_UPGRADE_ENCODE - for old Latin1 wikis, conv to UTF-8
- * MW_UPGRADE_NULL - just put NULL
- * @param callable $callback An optional callback to modify the data
- * or perform other processing. Func should be
- * ( object $row, array $copy ) and return $copy
- * @access private
- */
- function copyTable( $name, $tabledef, $fields, $callback = null ) {
- $fname = 'FiveUpgrade::copyTable';
-
- $name_temp = $name . '_temp';
- $this->log( "Migrating $name table to $name_temp..." );
-
- $table_temp = $this->dbw->tableName( $name_temp );
-
- // Create temporary table; we're going to copy everything in there,
- // then at the end rename the final tables into place.
- $def = str_replace( '$1', $table_temp, $tabledef );
- $this->dbw->query( $def, $fname );
-
- $numRecords = $this->dbw->selectField( $name, 'COUNT(*)', '', $fname );
- $this->setChunkScale( 100, $numRecords, $name_temp, $fname );
-
- // Pull all records from the second, streaming database connection.
- $sourceFields = array_keys( array_filter( $fields,
- create_function( '$x', 'return $x !== MW_UPGRADE_NULL;' ) ) );
- $result = $this->dbr->select( $name,
- $sourceFields,
- '',
- $fname );
-
- $add = array();
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $copy = array();
- foreach( $fields as $field => $source ) {
- if( $source === MW_UPGRADE_COPY ) {
- $copy[$field] = $row->$field;
- } elseif( $source === MW_UPGRADE_ENCODE ) {
- $copy[$field] = $this->conv( $row->$field );
- } elseif( $source === MW_UPGRADE_NULL ) {
- $copy[$field] = null;
- } else {
- $this->log( "Unknown field copy type: $field => $source" );
- }
- }
- if( is_callable( $callback ) ) {
- $copy = call_user_func( $callback, $row, $copy );
- }
- $add[] = $copy;
- $this->addChunk( $add );
- }
- $this->lastChunk( $add );
- $this->dbr->freeResult( $result );
-
- $this->log( "Done converting $name." );
- $this->cleanupSwaps[] = $name;
- }
-
- function upgradePage() {
- $fname = "FiveUpgrade::upgradePage";
- $chunksize = 100;
-
- if( $this->dbw->tableExists( 'page' ) ) {
- $this->log( 'Page table already exists; aborting.' );
- die( -1 );
- }
-
- $this->log( "Checking cur table for unique title index and applying if necessary" );
- checkDupes( true );
-
- $this->log( "...converting from cur/old to page/revision/text DB structure." );
-
- list ($cur, $old, $page, $revision, $text) = $this->dbw->tableNamesN( 'cur', 'old', 'page', 'revision', 'text' );
-
- $this->log( "Creating page and revision tables..." );
- $this->dbw->query("CREATE TABLE $page (
- page_id int(8) unsigned NOT NULL auto_increment,
- page_namespace int NOT NULL,
- page_title varchar(255) binary NOT NULL,
- page_restrictions tinyblob NOT NULL default '',
- page_counter bigint(20) unsigned NOT NULL default '0',
- page_is_redirect tinyint(1) unsigned NOT NULL default '0',
- page_is_new tinyint(1) unsigned NOT NULL default '0',
- page_random real unsigned NOT NULL,
- page_touched char(14) binary NOT NULL default '',
- page_latest int(8) unsigned NOT NULL,
- page_len int(8) unsigned NOT NULL,
-
- PRIMARY KEY page_id (page_id),
- UNIQUE INDEX name_title (page_namespace,page_title),
- INDEX (page_random),
- INDEX (page_len)
- ) TYPE=InnoDB", $fname );
- $this->dbw->query("CREATE TABLE $revision (
- rev_id int(8) unsigned NOT NULL auto_increment,
- rev_page int(8) unsigned NOT NULL,
- rev_text_id int(8) unsigned NOT NULL,
- rev_comment tinyblob NOT NULL default '',
- rev_user int(5) unsigned NOT NULL default '0',
- rev_user_text varchar(255) binary NOT NULL default '',
- rev_timestamp char(14) binary NOT NULL default '',
- rev_minor_edit tinyint(1) unsigned NOT NULL default '0',
- rev_deleted tinyint(1) unsigned NOT NULL default '0',
-
- PRIMARY KEY rev_page_id (rev_page, rev_id),
- UNIQUE INDEX rev_id (rev_id),
- INDEX rev_timestamp (rev_timestamp),
- INDEX page_timestamp (rev_page,rev_timestamp),
- INDEX user_timestamp (rev_user,rev_timestamp),
- INDEX usertext_timestamp (rev_user_text,rev_timestamp)
- ) TYPE=InnoDB", $fname );
-
- $maxold = intval( $this->dbw->selectField( 'old', 'max(old_id)', '', $fname ) );
- $this->log( "Last old record is {$maxold}" );
-
- global $wgLegacySchemaConversion;
- if( $wgLegacySchemaConversion ) {
- // Create HistoryBlobCurStub entries.
- // Text will be pulled from the leftover 'cur' table at runtime.
- echo "......Moving metadata from cur; using blob references to text in cur table.\n";
- $cur_text = "concat('O:18:\"historyblobcurstub\":1:{s:6:\"mCurId\";i:',cur_id,';}')";
- $cur_flags = "'object'";
- } else {
- // Copy all cur text in immediately: this may take longer but avoids
- // having to keep an extra table around.
- echo "......Moving text from cur.\n";
- $cur_text = 'cur_text';
- $cur_flags = "''";
- }
-
- $maxcur = $this->dbw->selectField( 'cur', 'max(cur_id)', '', $fname );
- $this->log( "Last cur entry is $maxcur" );
-
- /**
- * Copy placeholder records for each page's current version into old
- * Don't do any conversion here; text records are converted at runtime
- * based on the flags (and may be originally binary!) while the meta
- * fields will be converted in the old -> rev and cur -> page steps.
- */
- $this->setChunkScale( $chunksize, $maxcur, 'old', $fname );
- $result = $this->dbr->query(
- "SELECT cur_id, cur_namespace, cur_title, $cur_text AS text, cur_comment,
- cur_user, cur_user_text, cur_timestamp, cur_minor_edit, $cur_flags AS flags
- FROM $cur
- ORDER BY cur_id", $fname );
- $add = array();
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $add[] = array(
- 'old_namespace' => $row->cur_namespace,
- 'old_title' => $row->cur_title,
- 'old_text' => $row->text,
- 'old_comment' => $row->cur_comment,
- 'old_user' => $row->cur_user,
- 'old_user_text' => $row->cur_user_text,
- 'old_timestamp' => $row->cur_timestamp,
- 'old_minor_edit' => $row->cur_minor_edit,
- 'old_flags' => $row->flags );
- $this->addChunk( $add, $row->cur_id );
- }
- $this->lastChunk( $add );
- $this->dbr->freeResult( $result );
-
- /**
- * Copy revision metadata from old into revision.
- * We'll also do UTF-8 conversion of usernames and comments.
- */
- #$newmaxold = $this->dbw->selectField( 'old', 'max(old_id)', '', $fname );
- #$this->setChunkScale( $chunksize, $newmaxold, 'revision', $fname );
- #$countold = $this->dbw->selectField( 'old', 'count(old_id)', '', $fname );
- $countold = $this->dbw->selectField( 'old', 'max(old_id)', '', $fname );
- $this->setChunkScale( $chunksize, $countold, 'revision', $fname );
-
- $this->log( "......Setting up revision table." );
- $result = $this->dbr->query(
- "SELECT old_id, cur_id, old_comment, old_user, old_user_text,
- old_timestamp, old_minor_edit
- FROM $old,$cur WHERE old_namespace=cur_namespace AND old_title=cur_title",
- $fname );
-
- $add = array();
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $add[] = array(
- 'rev_id' => $row->old_id,
- 'rev_page' => $row->cur_id,
- 'rev_text_id' => $row->old_id,
- 'rev_comment' => $this->conv( $row->old_comment ),
- 'rev_user' => $row->old_user,
- 'rev_user_text' => $this->conv( $row->old_user_text ),
- 'rev_timestamp' => $row->old_timestamp,
- 'rev_minor_edit' => $row->old_minor_edit );
- $this->addChunk( $add );
- }
- $this->lastChunk( $add );
- $this->dbr->freeResult( $result );
-
-
- /**
- * Copy page metadata from cur into page.
- * We'll also do UTF-8 conversion of titles.
- */
- $this->log( "......Setting up page table." );
- $this->setChunkScale( $chunksize, $maxcur, 'page', $fname );
- $result = $this->dbr->query( "
- SELECT cur_id, cur_namespace, cur_title, cur_restrictions, cur_counter, cur_is_redirect, cur_is_new,
- cur_random, cur_touched, rev_id, LENGTH(cur_text) AS len
- FROM $cur,$revision
- WHERE cur_id=rev_page AND rev_timestamp=cur_timestamp AND rev_id > {$maxold}
- ORDER BY cur_id", $fname );
- $add = array();
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $add[] = array(
- 'page_id' => $row->cur_id,
- 'page_namespace' => $row->cur_namespace,
- 'page_title' => $this->conv( $row->cur_title ),
- 'page_restrictions' => $row->cur_restrictions,
- 'page_counter' => $row->cur_counter,
- 'page_is_redirect' => $row->cur_is_redirect,
- 'page_is_new' => $row->cur_is_new,
- 'page_random' => $row->cur_random,
- 'page_touched' => $this->dbw->timestamp(),
- 'page_latest' => $row->rev_id,
- 'page_len' => $row->len );
- #$this->addChunk( $add, $row->cur_id );
- $this->addChunk( $add );
- }
- $this->lastChunk( $add );
- $this->dbr->freeResult( $result );
-
- $this->log( "...done with cur/old -> page/revision." );
- }
-
- function upgradeLinks() {
- $fname = 'FiveUpgrade::upgradeLinks';
- $chunksize = 200;
- list ($links, $brokenlinks, $pagelinks, $cur) = $this->dbw->tableNamesN( 'links', 'brokenlinks', 'pagelinks', 'cur' );
-
- $this->log( 'Checking for interwiki table change in case of bogus items...' );
- if( $this->dbw->fieldExists( 'interwiki', 'iw_trans' ) ) {
- $this->log( 'interwiki has iw_trans.' );
- } else {
- global $IP;
- $this->log( 'adding iw_trans...' );
- $this->dbw->sourceFile( $IP . '/maintenance/archives/patch-interwiki-trans.sql' );
- $this->log( 'added iw_trans.' );
- }
-
- $this->log( 'Creating pagelinks table...' );
- $this->dbw->query( "
-CREATE TABLE $pagelinks (
- -- Key to the page_id of the page containing the link.
- pl_from int(8) unsigned NOT NULL default '0',
-
- -- Key to page_namespace/page_title of the target page.
- -- The target page may or may not exist, and due to renames
- -- and deletions may refer to different page records as time
- -- goes by.
- pl_namespace int NOT NULL default '0',
- pl_title varchar(255) binary NOT NULL default '',
-
- UNIQUE KEY pl_from(pl_from,pl_namespace,pl_title),
- KEY (pl_namespace,pl_title)
-
-) TYPE=InnoDB" );
-
- $this->log( 'Importing live links -> pagelinks' );
- $nlinks = $this->dbw->selectField( 'links', 'count(*)', '', $fname );
- if( $nlinks ) {
- $this->setChunkScale( $chunksize, $nlinks, 'pagelinks', $fname );
- $result = $this->dbr->query( "
- SELECT l_from,cur_namespace,cur_title
- FROM $links, $cur
- WHERE l_to=cur_id", $fname );
- $add = array();
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $add[] = array(
- 'pl_from' => $row->l_from,
- 'pl_namespace' => $row->cur_namespace,
- 'pl_title' => $this->conv( $row->cur_title ) );
- $this->addChunk( $add );
- }
- $this->lastChunk( $add );
- } else {
- $this->log( 'no links!' );
- }
-
- $this->log( 'Importing brokenlinks -> pagelinks' );
- $nbrokenlinks = $this->dbw->selectField( 'brokenlinks', 'count(*)', '', $fname );
- if( $nbrokenlinks ) {
- $this->setChunkScale( $chunksize, $nbrokenlinks, 'pagelinks', $fname );
- $result = $this->dbr->query(
- "SELECT bl_from, bl_to FROM $brokenlinks",
- $fname );
- $add = array();
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $pagename = $this->conv( $row->bl_to );
- $title = Title::newFromText( $pagename );
- if( is_null( $title ) ) {
- $this->log( "** invalid brokenlink: $row->bl_from -> '$pagename' (converted from '$row->bl_to')" );
- } else {
- $add[] = array(
- 'pl_from' => $row->bl_from,
- 'pl_namespace' => $title->getNamespace(),
- 'pl_title' => $title->getDBkey() );
- $this->addChunk( $add );
- }
- }
- $this->lastChunk( $add );
- } else {
- $this->log( 'no brokenlinks!' );
- }
-
- $this->log( 'Done with links.' );
- }
-
- function upgradeUser() {
- // Apply unique index, if necessary:
- $duper = new UserDupes( $this->dbw );
- if( $duper->hasUniqueIndex() ) {
- $this->log( "Already have unique user_name index." );
- } else {
- $this->log( "Clearing user duplicates..." );
- if( !$duper->clearDupes() ) {
- $this->log( "WARNING: Duplicate user accounts, may explode!" );
- }
- }
-
- $tabledef = <<<END
-CREATE TABLE $1 (
- user_id int(5) unsigned NOT NULL auto_increment,
- user_name varchar(255) binary NOT NULL default '',
- user_real_name varchar(255) binary NOT NULL default '',
- user_password tinyblob NOT NULL default '',
- user_newpassword tinyblob NOT NULL default '',
- user_email tinytext NOT NULL default '',
- user_options blob NOT NULL default '',
- user_touched char(14) binary NOT NULL default '',
- user_token char(32) binary NOT NULL default '',
- user_email_authenticated CHAR(14) BINARY,
- user_email_token CHAR(32) BINARY,
- user_email_token_expires CHAR(14) BINARY,
-
- PRIMARY KEY user_id (user_id),
- UNIQUE INDEX user_name (user_name),
- INDEX (user_email_token)
-
-) TYPE=InnoDB
-END;
- $fields = array(
- 'user_id' => MW_UPGRADE_COPY,
- 'user_name' => MW_UPGRADE_ENCODE,
- 'user_real_name' => MW_UPGRADE_ENCODE,
- 'user_password' => MW_UPGRADE_COPY,
- 'user_newpassword' => MW_UPGRADE_COPY,
- 'user_email' => MW_UPGRADE_ENCODE,
- 'user_options' => MW_UPGRADE_ENCODE,
- 'user_touched' => MW_UPGRADE_CALLBACK,
- 'user_token' => MW_UPGRADE_COPY,
- 'user_email_authenticated' => MW_UPGRADE_CALLBACK,
- 'user_email_token' => MW_UPGRADE_NULL,
- 'user_email_token_expires' => MW_UPGRADE_NULL );
- $this->copyTable( 'user', $tabledef, $fields,
- array( &$this, 'userCallback' ) );
- }
-
- function userCallback( $row, $copy ) {
- $now = $this->dbw->timestamp();
- $copy['user_touched'] = $now;
- $copy['user_email_authenticated'] = $this->emailAuth ? $now : null;
- return $copy;
- }
-
- function upgradeImage() {
- $tabledef = <<<END
-CREATE TABLE $1 (
- img_name varchar(255) binary NOT NULL default '',
- img_size int(8) unsigned NOT NULL default '0',
- img_width int(5) NOT NULL default '0',
- img_height int(5) NOT NULL default '0',
- img_metadata mediumblob NOT NULL,
- img_bits int(3) NOT NULL default '0',
- img_media_type ENUM("UNKNOWN", "BITMAP", "DRAWING", "AUDIO", "VIDEO", "MULTIMEDIA", "OFFICE", "TEXT", "EXECUTABLE", "ARCHIVE") default NULL,
- img_major_mime ENUM("unknown", "application", "audio", "image", "text", "video", "message", "model", "multipart") NOT NULL default "unknown",
- img_minor_mime varchar(32) NOT NULL default "unknown",
- img_description tinyblob NOT NULL default '',
- img_user int(5) unsigned NOT NULL default '0',
- img_user_text varchar(255) binary NOT NULL default '',
- img_timestamp char(14) binary NOT NULL default '',
-
- PRIMARY KEY img_name (img_name),
- INDEX img_size (img_size),
- INDEX img_timestamp (img_timestamp)
-) TYPE=InnoDB
-END;
- $fields = array(
- 'img_name' => MW_UPGRADE_ENCODE,
- 'img_size' => MW_UPGRADE_COPY,
- 'img_width' => MW_UPGRADE_CALLBACK,
- 'img_height' => MW_UPGRADE_CALLBACK,
- 'img_metadata' => MW_UPGRADE_CALLBACK,
- 'img_bits' => MW_UPGRADE_CALLBACK,
- 'img_media_type' => MW_UPGRADE_CALLBACK,
- 'img_major_mime' => MW_UPGRADE_CALLBACK,
- 'img_minor_mime' => MW_UPGRADE_CALLBACK,
- 'img_description' => MW_UPGRADE_ENCODE,
- 'img_user' => MW_UPGRADE_COPY,
- 'img_user_text' => MW_UPGRADE_ENCODE,
- 'img_timestamp' => MW_UPGRADE_COPY );
- $this->copyTable( 'image', $tabledef, $fields,
- array( &$this, 'imageCallback' ) );
- }
-
- function imageCallback( $row, $copy ) {
- global $options;
- if( !isset( $options['noimage'] ) ) {
- // Fill in the new image info fields
- $info = $this->imageInfo( $row->img_name );
-
- $copy['img_width' ] = $info['width'];
- $copy['img_height' ] = $info['height'];
- $copy['img_metadata' ] = ""; // loaded on-demand
- $copy['img_bits' ] = $info['bits'];
- $copy['img_media_type'] = $info['media'];
- $copy['img_major_mime'] = $info['major'];
- $copy['img_minor_mime'] = $info['minor'];
- }
-
- // If doing UTF8 conversion the file must be renamed
- $this->renameFile( $row->img_name, 'wfImageDir' );
-
- return $copy;
- }
-
- function imageInfo( $filename ) {
- $info = array(
- 'width' => 0,
- 'height' => 0,
- 'bits' => 0,
- 'media' => '',
- 'major' => '',
- 'minor' => '' );
-
- $magic = MimeMagic::singleton();
- $mime = $magic->guessMimeType( $filename, true );
- list( $info['major'], $info['minor'] ) = explode( '/', $mime );
-
- $info['media'] = $magic->getMediaType( $filename, $mime );
-
- $image = UnregisteredLocalFile::newFromPath( $filename, $mime );
-
- $info['width'] = $image->getWidth();
- $info['height'] = $image->getHeight();
-
- $gis = $image->getImageSize();
- if ( isset( $gis['bits'] ) ) {
- $info['bits'] = $gis['bits'];
- }
-
- return $info;
- }
-
-
- /**
- * Truncate a table.
- * @param string $table The table name to be truncated
- */
- function clearTable( $table ) {
- print "Clearing $table...\n";
- $tableName = $this->db->tableName( $table );
- $this->db->query( "TRUNCATE $tableName" );
- }
-
- /**
- * Rename a given image or archived image file to the converted filename,
- * leaving a symlink for URL compatibility.
- *
- * @param string $oldname pre-conversion filename
- * @param string $basename pre-conversion base filename for dir hashing, if an archive
- * @access private
- */
- function renameFile( $oldname, $subdirCallback='wfImageDir', $basename=null ) {
- $newname = $this->conv( $oldname );
- if( $newname == $oldname ) {
- // No need to rename; another field triggered this row.
- return false;
- }
-
- if( is_null( $basename ) ) $basename = $oldname;
- $ubasename = $this->conv( $basename );
- $oldpath = call_user_func( $subdirCallback, $basename ) . '/' . $oldname;
- $newpath = call_user_func( $subdirCallback, $ubasename ) . '/' . $newname;
-
- $this->log( "$oldpath -> $newpath" );
- if( rename( $oldpath, $newpath ) ) {
- $relpath = wfRelativePath( $newpath, dirname( $oldpath ) );
- if( !symlink( $relpath, $oldpath ) ) {
- $this->log( "... symlink failed!" );
- }
- return $newname;
- } else {
- $this->log( "... rename failed!" );
- return false;
- }
- }
-
- function upgradeOldImage() {
- $tabledef = <<<END
-CREATE TABLE $1 (
- -- Base filename: key to image.img_name
- oi_name varchar(255) binary NOT NULL default '',
-
- -- Filename of the archived file.
- -- This is generally a timestamp and '!' prepended to the base name.
- oi_archive_name varchar(255) binary NOT NULL default '',
-
- -- Other fields as in image...
- oi_size int(8) unsigned NOT NULL default 0,
- oi_width int(5) NOT NULL default 0,
- oi_height int(5) NOT NULL default 0,
- oi_bits int(3) NOT NULL default 0,
- oi_description tinyblob NOT NULL default '',
- oi_user int(5) unsigned NOT NULL default '0',
- oi_user_text varchar(255) binary NOT NULL default '',
- oi_timestamp char(14) binary NOT NULL default '',
-
- INDEX oi_name (oi_name(10))
-
-) TYPE=InnoDB;
-END;
- $fields = array(
- 'oi_name' => MW_UPGRADE_ENCODE,
- 'oi_archive_name' => MW_UPGRADE_ENCODE,
- 'oi_size' => MW_UPGRADE_COPY,
- 'oi_width' => MW_UPGRADE_CALLBACK,
- 'oi_height' => MW_UPGRADE_CALLBACK,
- 'oi_bits' => MW_UPGRADE_CALLBACK,
- 'oi_description' => MW_UPGRADE_ENCODE,
- 'oi_user' => MW_UPGRADE_COPY,
- 'oi_user_text' => MW_UPGRADE_ENCODE,
- 'oi_timestamp' => MW_UPGRADE_COPY );
- $this->copyTable( 'oldimage', $tabledef, $fields,
- array( &$this, 'oldimageCallback' ) );
- }
-
- function oldimageCallback( $row, $copy ) {
- global $options;
- if( !isset( $options['noimage'] ) ) {
- // Fill in the new image info fields
- $info = $this->imageInfo( $row->oi_archive_name, 'wfImageArchiveDir', $row->oi_name );
- $copy['oi_width' ] = $info['width' ];
- $copy['oi_height'] = $info['height'];
- $copy['oi_bits' ] = $info['bits' ];
- }
-
- // If doing UTF8 conversion the file must be renamed
- $this->renameFile( $row->oi_archive_name, 'wfImageArchiveDir', $row->oi_name );
-
- return $copy;
- }
-
-
- function upgradeWatchlist() {
- $fname = 'FiveUpgrade::upgradeWatchlist';
- $chunksize = 100;
-
- list ($watchlist, $watchlist_temp) = $this->dbw->tableNamesN( 'watchlist', 'watchlist_temp' );
-
- $this->log( 'Migrating watchlist table to watchlist_temp...' );
- $this->dbw->query(
-"CREATE TABLE $watchlist_temp (
- -- Key to user_id
- wl_user int(5) unsigned NOT NULL,
-
- -- Key to page_namespace/page_title
- -- Note that users may watch patches which do not exist yet,
- -- or existed in the past but have been deleted.
- wl_namespace int NOT NULL default '0',
- wl_title varchar(255) binary NOT NULL default '',
-
- -- Timestamp when user was last sent a notification e-mail;
- -- cleared when the user visits the page.
- -- FIXME: add proper null support etc
- wl_notificationtimestamp varchar(14) binary NOT NULL default '0',
-
- UNIQUE KEY (wl_user, wl_namespace, wl_title),
- KEY namespace_title (wl_namespace,wl_title)
-
-) TYPE=InnoDB;", $fname );
-
- // Fix encoding for Latin-1 upgrades, add some fields,
- // and double article to article+talk pairs
- $numwatched = $this->dbw->selectField( 'watchlist', 'count(*)', '', $fname );
-
- $this->setChunkScale( $chunksize, $numwatched * 2, 'watchlist_temp', $fname );
- $result = $this->dbr->select( 'watchlist',
- array(
- 'wl_user',
- 'wl_namespace',
- 'wl_title' ),
- '',
- $fname );
-
- $add = array();
- while( $row = $this->dbr->fetchObject( $result ) ) {
- $add[] = array(
- 'wl_user' => $row->wl_user,
- 'wl_namespace' => MWNamespace::getSubject( $row->wl_namespace ),
- 'wl_title' => $this->conv( $row->wl_title ),
- 'wl_notificationtimestamp' => '0' );
- $this->addChunk( $add );
-
- $add[] = array(
- 'wl_user' => $row->wl_user,
- 'wl_namespace' => MWNamespace::getTalk( $row->wl_namespace ),
- 'wl_title' => $this->conv( $row->wl_title ),
- 'wl_notificationtimestamp' => '0' );
- $this->addChunk( $add );
- }
- $this->lastChunk( $add );
- $this->dbr->freeResult( $result );
-
- $this->log( 'Done converting watchlist.' );
- $this->cleanupSwaps[] = 'watchlist';
- }
-
- function upgradeLogging() {
- $tabledef = <<<ENDS
-CREATE TABLE $1 (
- -- Symbolic keys for the general log type and the action type
- -- within the log. The output format will be controlled by the
- -- action field, but only the type controls categorization.
- log_type char(10) NOT NULL default '',
- log_action char(10) NOT NULL default '',
-
- -- Timestamp. Duh.
- log_timestamp char(14) NOT NULL default '19700101000000',
-
- -- The user who performed this action; key to user_id
- log_user int unsigned NOT NULL default 0,
-
- -- Key to the page affected. Where a user is the target,
- -- this will point to the user page.
- log_namespace int NOT NULL default 0,
- log_title varchar(255) binary NOT NULL default '',
-
- -- Freeform text. Interpreted as edit history comments.
- log_comment varchar(255) NOT NULL default '',
-
- -- LF separated list of miscellaneous parameters
- log_params blob NOT NULL default '',
-
- KEY type_time (log_type, log_timestamp),
- KEY user_time (log_user, log_timestamp),
- KEY page_time (log_namespace, log_title, log_timestamp)
-
-) TYPE=InnoDB
-ENDS;
- $fields = array(
- 'log_type' => MW_UPGRADE_COPY,
- 'log_action' => MW_UPGRADE_COPY,
- 'log_timestamp' => MW_UPGRADE_COPY,
- 'log_user' => MW_UPGRADE_COPY,
- 'log_namespace' => MW_UPGRADE_COPY,
- 'log_title' => MW_UPGRADE_ENCODE,
- 'log_comment' => MW_UPGRADE_ENCODE,
- 'log_params' => MW_UPGRADE_ENCODE );
- $this->copyTable( 'logging', $tabledef, $fields );
- }
-
- function upgradeArchive() {
- $tabledef = <<<ENDS
-CREATE TABLE $1 (
- ar_namespace int NOT NULL default '0',
- ar_title varchar(255) binary NOT NULL default '',
- ar_text mediumblob NOT NULL default '',
-
- ar_comment tinyblob NOT NULL default '',
- ar_user int(5) unsigned NOT NULL default '0',
- ar_user_text varchar(255) binary NOT NULL,
- ar_timestamp char(14) binary NOT NULL default '',
- ar_minor_edit tinyint(1) NOT NULL default '0',
-
- ar_flags tinyblob NOT NULL default '',
-
- ar_rev_id int(8) unsigned,
- ar_text_id int(8) unsigned,
-
- KEY name_title_timestamp (ar_namespace,ar_title,ar_timestamp)
-
-) TYPE=InnoDB
-ENDS;
- $fields = array(
- 'ar_namespace' => MW_UPGRADE_COPY,
- 'ar_title' => MW_UPGRADE_ENCODE,
- 'ar_text' => MW_UPGRADE_COPY,
- 'ar_comment' => MW_UPGRADE_ENCODE,
- 'ar_user' => MW_UPGRADE_COPY,
- 'ar_user_text' => MW_UPGRADE_ENCODE,
- 'ar_timestamp' => MW_UPGRADE_COPY,
- 'ar_minor_edit' => MW_UPGRADE_COPY,
- 'ar_flags' => MW_UPGRADE_COPY,
- 'ar_rev_id' => MW_UPGRADE_NULL,
- 'ar_text_id' => MW_UPGRADE_NULL );
- $this->copyTable( 'archive', $tabledef, $fields );
- }
-
- function upgradeImagelinks() {
- global $wgUseLatin1;
- if( $wgUseLatin1 ) {
- $tabledef = <<<ENDS
-CREATE TABLE $1 (
- -- Key to page_id of the page containing the image / media link.
- il_from int(8) unsigned NOT NULL default '0',
-
- -- Filename of target image.
- -- This is also the page_title of the file's description page;
- -- all such pages are in namespace 6 (NS_FILE).
- il_to varchar(255) binary NOT NULL default '',
-
- UNIQUE KEY il_from(il_from,il_to),
- KEY (il_to)
-
-) TYPE=InnoDB
-ENDS;
- $fields = array(
- 'il_from' => MW_UPGRADE_COPY,
- 'il_to' => MW_UPGRADE_ENCODE );
- $this->copyTable( 'imagelinks', $tabledef, $fields );
- }
- }
-
- function upgradeCategorylinks() {
- global $wgUseLatin1;
- if( $wgUseLatin1 ) {
- $tabledef = <<<ENDS
-CREATE TABLE $1 (
- cl_from int(8) unsigned NOT NULL default '0',
- cl_to varchar(255) binary NOT NULL default '',
- cl_sortkey varchar(86) binary NOT NULL default '',
- cl_timestamp timestamp NOT NULL,
-
- UNIQUE KEY cl_from(cl_from,cl_to),
- KEY cl_sortkey(cl_to,cl_sortkey),
- KEY cl_timestamp(cl_to,cl_timestamp)
-) TYPE=InnoDB
-ENDS;
- $fields = array(
- 'cl_from' => MW_UPGRADE_COPY,
- 'cl_to' => MW_UPGRADE_ENCODE,
- 'cl_sortkey' => MW_UPGRADE_ENCODE,
- 'cl_timestamp' => MW_UPGRADE_COPY );
- $this->copyTable( 'categorylinks', $tabledef, $fields );
- }
- }
-
- function upgradeIpblocks() {
- global $wgUseLatin1;
- if( $wgUseLatin1 ) {
- $tabledef = <<<ENDS
-CREATE TABLE $1 (
- ipb_id int(8) NOT NULL auto_increment,
- ipb_address varchar(40) binary NOT NULL default '',
- ipb_user int(8) unsigned NOT NULL default '0',
- ipb_by int(8) unsigned NOT NULL default '0',
- ipb_reason tinyblob NOT NULL default '',
- ipb_timestamp char(14) binary NOT NULL default '',
- ipb_auto tinyint(1) NOT NULL default '0',
- ipb_expiry char(14) binary NOT NULL default '',
-
- PRIMARY KEY ipb_id (ipb_id),
- INDEX ipb_address (ipb_address),
- INDEX ipb_user (ipb_user)
-
-) TYPE=InnoDB
-ENDS;
- $fields = array(
- 'ipb_id' => MW_UPGRADE_COPY,
- 'ipb_address' => MW_UPGRADE_COPY,
- 'ipb_user' => MW_UPGRADE_COPY,
- 'ipb_by' => MW_UPGRADE_COPY,
- 'ipb_reason' => MW_UPGRADE_ENCODE,
- 'ipb_timestamp' => MW_UPGRADE_COPY,
- 'ipb_auto' => MW_UPGRADE_COPY,
- 'ipb_expiry' => MW_UPGRADE_COPY );
- $this->copyTable( 'ipblocks', $tabledef, $fields );
- }
- }
-
- function upgradeRecentchanges() {
- // There's a format change in the namespace field
- $tabledef = <<<ENDS
-CREATE TABLE $1 (
- rc_id int(8) NOT NULL auto_increment,
- rc_timestamp varchar(14) binary NOT NULL default '',
- rc_cur_time varchar(14) binary NOT NULL default '',
-
- rc_user int(10) unsigned NOT NULL default '0',
- rc_user_text varchar(255) binary NOT NULL default '',
-
- rc_namespace int NOT NULL default '0',
- rc_title varchar(255) binary NOT NULL default '',
-
- rc_comment varchar(255) binary NOT NULL default '',
- rc_minor tinyint(3) unsigned NOT NULL default '0',
-
- rc_bot tinyint(3) unsigned NOT NULL default '0',
- rc_new tinyint(3) unsigned NOT NULL default '0',
-
- rc_cur_id int(10) unsigned NOT NULL default '0',
- rc_this_oldid int(10) unsigned NOT NULL default '0',
- rc_last_oldid int(10) unsigned NOT NULL default '0',
-
- rc_type tinyint(3) unsigned NOT NULL default '0',
- rc_moved_to_ns tinyint(3) unsigned NOT NULL default '0',
- rc_moved_to_title varchar(255) binary NOT NULL default '',
-
- rc_patrolled tinyint(3) unsigned NOT NULL default '0',
-
- rc_ip char(15) NOT NULL default '',
-
- PRIMARY KEY rc_id (rc_id),
- INDEX rc_timestamp (rc_timestamp),
- INDEX rc_namespace_title (rc_namespace, rc_title),
- INDEX rc_cur_id (rc_cur_id),
- INDEX new_name_timestamp(rc_new,rc_namespace,rc_timestamp),
- INDEX rc_ip (rc_ip)
-
-) TYPE=InnoDB
-ENDS;
- $fields = array(
- 'rc_id' => MW_UPGRADE_COPY,
- 'rc_timestamp' => MW_UPGRADE_COPY,
- 'rc_cur_time' => MW_UPGRADE_COPY,
- 'rc_user' => MW_UPGRADE_COPY,
- 'rc_user_text' => MW_UPGRADE_ENCODE,
- 'rc_namespace' => MW_UPGRADE_COPY,
- 'rc_title' => MW_UPGRADE_ENCODE,
- 'rc_comment' => MW_UPGRADE_ENCODE,
- 'rc_minor' => MW_UPGRADE_COPY,
- 'rc_bot' => MW_UPGRADE_COPY,
- 'rc_new' => MW_UPGRADE_COPY,
- 'rc_cur_id' => MW_UPGRADE_COPY,
- 'rc_this_oldid' => MW_UPGRADE_COPY,
- 'rc_last_oldid' => MW_UPGRADE_COPY,
- 'rc_type' => MW_UPGRADE_COPY,
- 'rc_moved_to_ns' => MW_UPGRADE_COPY,
- 'rc_moved_to_title' => MW_UPGRADE_ENCODE,
- 'rc_patrolled' => MW_UPGRADE_COPY,
- 'rc_ip' => MW_UPGRADE_COPY );
- $this->copyTable( 'recentchanges', $tabledef, $fields );
- }
-
- function upgradeQuerycache() {
- // There's a format change in the namespace field
- $tabledef = <<<ENDS
-CREATE TABLE $1 (
- -- A key name, generally the base name of of the special page.
- qc_type char(32) NOT NULL,
-
- -- Some sort of stored value. Sizes, counts...
- qc_value int(5) unsigned NOT NULL default '0',
-
- -- Target namespace+title
- qc_namespace int NOT NULL default '0',
- qc_title char(255) binary NOT NULL default '',
-
- KEY (qc_type,qc_value)
-
-) TYPE=InnoDB
-ENDS;
- $fields = array(
- 'qc_type' => MW_UPGRADE_COPY,
- 'qc_value' => MW_UPGRADE_COPY,
- 'qc_namespace' => MW_UPGRADE_COPY,
- 'qc_title' => MW_UPGRADE_ENCODE );
- $this->copyTable( 'querycache', $tabledef, $fields );
- }
-
- /**
- * Rename all our temporary tables into final place.
- * We've left things in place so a read-only wiki can continue running
- * on the old code during all this.
- */
- function upgradeCleanup() {
- $this->renameTable( 'old', 'text' );
-
- foreach( $this->cleanupSwaps as $table ) {
- $this->swap( $table );
- }
- }
-
- function renameTable( $from, $to ) {
- $this->log( "Renaming $from to $to..." );
-
- $fromtable = $this->dbw->tableName( $from );
- $totable = $this->dbw->tableName( $to );
- $this->dbw->query( "ALTER TABLE $fromtable RENAME TO $totable" );
- }
-
- function swap( $base ) {
- $this->renameTable( $base, "{$base}_old" );
- $this->renameTable( "{$base}_temp", $base );
- }
-
-}
diff --git a/maintenance/Maintenance.php b/maintenance/Maintenance.php
index ee35df7c..d7297e98 100644
--- a/maintenance/Maintenance.php
+++ b/maintenance/Maintenance.php
@@ -6,16 +6,27 @@
*/
// Define this so scripts can easily find doMaintenance.php
-define( 'DO_MAINTENANCE', dirname( __FILE__ ) . '/doMaintenance.php' );
+define( 'RUN_MAINTENANCE_IF_MAIN', dirname( __FILE__ ) . '/doMaintenance.php' );
+define( 'DO_MAINTENANCE', RUN_MAINTENANCE_IF_MAIN ); // original name, harmless
+
$maintClass = false;
// Make sure we're on PHP5 or better
-if( version_compare( PHP_VERSION, '5.0.0' ) < 0 ) {
- echo( "Sorry! This version of MediaWiki requires PHP 5; you are running " .
+if ( version_compare( PHP_VERSION, '5.2.3' ) < 0 ) {
+ die ( "Sorry! This version of MediaWiki requires PHP 5.2.3; you are running " .
PHP_VERSION . ".\n\n" .
- "If you are sure you already have PHP 5 installed, it may be installed\n" .
- "in a different path from PHP 4. Check with your system administrator.\n" );
- die();
+ "If you are sure you already have PHP 5.2.3 or higher installed, it may be\n" .
+ "installed in a different path from PHP " . PHP_VERSION . ". Check with your system\n" .
+ "administrator.\n" );
+}
+
+// Wrapper for posix_isatty()
+if ( !function_exists( 'posix_isatty' ) ) {
+ # We default as considering stdin a tty (for nice readline methods)
+ # but treating stout as not a tty to avoid color codes
+ function posix_isatty( $fd ) {
+ return !$fd;
+ }
}
/**
@@ -93,15 +104,38 @@ abstract class Maintenance {
protected static $mCoreScripts = null;
/**
- * Default constructor. Children should call this if implementing
+ * Default constructor. Children should call this *first* if implementing
* their own constructors
*/
public function __construct() {
+ // Setup $IP, using MW_INSTALL_PATH if it exists
+ global $IP;
+ $IP = strval( getenv( 'MW_INSTALL_PATH' ) ) !== ''
+ ? getenv( 'MW_INSTALL_PATH' )
+ : realpath( dirname( __FILE__ ) . '/..' );
+
$this->addDefaultParams();
register_shutdown_function( array( $this, 'outputChanneled' ), false );
}
/**
+ * Should we execute the maintenance script, or just allow it to be included
+ * as a standalone class? It checks that the call stack only includes this
+ * function and a require (meaning was called from the file scope)
+ *
+ * @return Boolean
+ */
+ public static function shouldExecute() {
+ $bt = debug_backtrace();
+ if( count( $bt ) !== 2 ) {
+ return false;
+ }
+ return $bt[1]['function'] == 'require_once' &&
+ $bt[0]['class'] == 'Maintenance' &&
+ $bt[0]['function'] == 'shouldExecute';
+ }
+
+ /**
* Do the actual work. All child classes will need to implement this
*/
abstract public function execute();
@@ -110,10 +144,10 @@ abstract class Maintenance {
* Add a parameter to the script. Will be displayed on --help
* with the associated description
*
- * @param $name String The name of the param (help, version, etc)
- * @param $description String The description of the param to show on --help
- * @param $required boolean Is the param required?
- * @param $withArg Boolean Is an argument required with this option?
+ * @param $name String: the name of the param (help, version, etc)
+ * @param $description String: the description of the param to show on --help
+ * @param $required Boolean: is the param required?
+ * @param $withArg Boolean: is an argument required with this option?
*/
protected function addOption( $name, $description, $required = false, $withArg = false ) {
$this->mParams[$name] = array( 'desc' => $description, 'require' => $required, 'withArg' => $withArg );
@@ -121,8 +155,8 @@ abstract class Maintenance {
/**
* Checks to see if a particular param exists.
- * @param $name String The name of the param
- * @return boolean
+ * @param $name String: the name of the param
+ * @return Boolean
*/
protected function hasOption( $name ) {
return isset( $this->mOptions[$name] );
@@ -130,12 +164,12 @@ abstract class Maintenance {
/**
* Get an option, or return the default
- * @param $name String The name of the param
- * @param $default mixed Anything you want, default null
- * @return mixed
+ * @param $name String: the name of the param
+ * @param $default Mixed: anything you want, default null
+ * @return Mixed
*/
protected function getOption( $name, $default = null ) {
- if( $this->hasOption( $name ) ) {
+ if ( $this->hasOption( $name ) ) {
return $this->mOptions[$name];
} else {
// Set it so we don't have to provide the default again
@@ -146,22 +180,38 @@ abstract class Maintenance {
/**
* Add some args that are needed
- * @param $arg String Name of the arg, like 'start'
- * @param $description String Short description of the arg
- * @param $required Boolean Is this required?
+ * @param $arg String: name of the arg, like 'start'
+ * @param $description String: short description of the arg
+ * @param $required Boolean: is this required?
*/
protected function addArg( $arg, $description, $required = true ) {
- $this->mArgList[] = array(
+ $this->mArgList[] = array(
'name' => $arg,
- 'desc' => $description,
- 'require' => $required
+ 'desc' => $description,
+ 'require' => $required
);
}
/**
+ * Remove an option. Useful for removing options that won't be used in your script.
+ * @param $name String: the option to remove.
+ */
+ protected function deleteOption( $name ) {
+ unset( $this->mParams[$name] );
+ }
+
+ /**
+ * Set the description text.
+ * @param $text String: the text of the description
+ */
+ protected function addDescription( $text ) {
+ $this->mDescription = $text;
+ }
+
+ /**
* Does a given argument exist?
- * @param $argId int The integer value (from zero) for the arg
- * @return boolean
+ * @param $argId Integer: the integer value (from zero) for the arg
+ * @return Boolean
*/
protected function hasArg( $argId = 0 ) {
return isset( $this->mArgs[$argId] );
@@ -169,8 +219,8 @@ abstract class Maintenance {
/**
* Get an argument.
- * @param $argId int The integer value (from zero) for the arg
- * @param $default mixed The default if it doesn't exist
+ * @param $argId Integer: the integer value (from zero) for the arg
+ * @param $default Mixed: the default if it doesn't exist
* @return mixed
*/
protected function getArg( $argId = 0, $default = null ) {
@@ -179,7 +229,7 @@ abstract class Maintenance {
/**
* Set the batch size.
- * @param $s int The number of operations to do in a batch
+ * @param $s Integer: the number of operations to do in a batch
*/
protected function setBatchSize( $s = 0 ) {
$this->mBatchSize = $s;
@@ -195,33 +245,42 @@ abstract class Maintenance {
/**
* Return input from stdin.
- * @param $length int The number of bytes to read. If null,
+ * @param $len Integer: the number of bytes to read. If null,
* just return the handle. Maintenance::STDIN_ALL returns
* the full length
- * @return mixed
+ * @return Mixed
*/
protected function getStdin( $len = null ) {
- if ( $len == Maintenance::STDIN_ALL )
+ if ( $len == Maintenance::STDIN_ALL ) {
return file_get_contents( 'php://stdin' );
+ }
$f = fopen( 'php://stdin', 'rt' );
- if( !$len )
+ if ( !$len ) {
return $f;
+ }
$input = fgets( $f, $len );
fclose( $f );
return rtrim( $input );
}
+ public function isQuiet() {
+ return $this->mQuiet;
+ }
+
/**
* Throw some output to the user. Scripts can call this with no fears,
* as we handle all --quiet stuff here
- * @param $out String The text to show to the user
- * @param $channel Mixed Unique identifier for the channel. See function outputChanneled.
+ * @param $out String: the text to show to the user
+ * @param $channel Mixed: unique identifier for the channel. See
+ * function outputChanneled.
*/
protected function output( $out, $channel = null ) {
- if( $this->mQuiet ) {
+ if ( $this->mQuiet ) {
return;
}
if ( $channel === null ) {
+ $this->cleanupChanneled();
+
$f = fopen( 'php://stdout', 'w' );
fwrite( $f, $out );
fclose( $f );
@@ -235,41 +294,54 @@ abstract class Maintenance {
/**
* Throw an error to the user. Doesn't respect --quiet, so don't use
* this for non-error output
- * @param $err String The error to display
- * @param $die boolean If true, go ahead and die out.
+ * @param $err String: the error to display
+ * @param $die Boolean: If true, go ahead and die out.
*/
protected function error( $err, $die = false ) {
$this->outputChanneled( false );
if ( php_sapi_name() == 'cli' ) {
fwrite( STDERR, $err . "\n" );
} else {
- $f = fopen( 'php://stderr', 'w' );
+ $f = fopen( 'php://stderr', 'w' );
fwrite( $f, $err . "\n" );
fclose( $f );
}
- if( $die ) die();
+ if ( $die ) {
+ die();
+ }
}
private $atLineStart = true;
private $lastChannel = null;
-
+
+ /**
+ * Clean up channeled output. Output a newline if necessary.
+ */
+ public function cleanupChanneled() {
+ if ( !$this->atLineStart ) {
+ $handle = fopen( 'php://stdout', 'w' );
+ fwrite( $handle, "\n" );
+ fclose( $handle );
+ $this->atLineStart = true;
+ }
+ }
+
/**
* Message outputter with channeled message support. Messages on the
* same channel are concatenated, but any intervening messages in another
* channel start a new line.
- * @param $msg String The message without trailing newline
- * @param $channel Channel identifier or null for no channel. Channel comparison uses ===.
+ * @param $msg String: the message without trailing newline
+ * @param $channel Channel identifier or null for no
+ * channel. Channel comparison uses ===.
*/
public function outputChanneled( $msg, $channel = null ) {
- $handle = fopen( 'php://stdout', 'w' );
-
if ( $msg === false ) {
- // For cleanup
- if ( !$this->atLineStart ) fwrite( $handle, "\n" );
- fclose( $handle );
+ $this->cleanupChanneled();
return;
}
+ $handle = fopen( 'php://stdout', 'w' );
+
// End the current line if necessary
if ( !$this->atLineStart && $channel !== $this->lastChannel ) {
fwrite( $handle, "\n" );
@@ -292,12 +364,12 @@ abstract class Maintenance {
/**
* Does the script need different DB access? By default, we give Maintenance
* scripts normal rights to the DB. Sometimes, a script needs admin rights
- * access for a reason and sometimes they want no access. Subclasses should
+ * access for a reason and sometimes they want no access. Subclasses should
* override and return one of the following values, as needed:
* Maintenance::DB_NONE - For no DB access at all
* Maintenance::DB_STD - For normal DB access, default
* Maintenance::DB_ADMIN - For admin DB access
- * @return int
+ * @return Integer
*/
public function getDbType() {
return Maintenance::DB_STD;
@@ -307,44 +379,41 @@ abstract class Maintenance {
* Add the default parameters to the scripts
*/
protected function addDefaultParams() {
- $this->addOption( 'help', "Display this help message" );
- $this->addOption( 'quiet', "Whether to supress non-error output" );
- $this->addOption( 'conf', "Location of LocalSettings.php, if not default", false, true );
- $this->addOption( 'wiki', "For specifying the wiki ID", false, true );
- $this->addOption( 'globals', "Output globals at the end of processing for debugging" );
- $this->addOption( 'server', "The protocol and server name to use in URLs, e.g.\n" .
- "\t\thttp://en.wikipedia.org. This is sometimes necessary because\n" .
- "\t\tserver name detection may fail in command line scripts.", false, true );
+ $this->addOption( 'help', 'Display this help message' );
+ $this->addOption( 'quiet', 'Whether to supress non-error output' );
+ $this->addOption( 'conf', 'Location of LocalSettings.php, if not default', false, true );
+ $this->addOption( 'wiki', 'For specifying the wiki ID', false, true );
+ $this->addOption( 'globals', 'Output globals at the end of processing for debugging' );
+ $this->addOption( 'memory-limit', 'Set a specific memory limit for the script, "max" for no limit or "default" to avoid changing it' );
+ $this->addOption( 'server', "The protocol and server name to use in URLs, e.g. " .
+ "http://en.wikipedia.org. This is sometimes necessary because " .
+ "server name detection may fail in command line scripts.", false, true );
// If we support a DB, show the options
- if( $this->getDbType() > 0 ) {
- $this->addOption( 'dbuser', "The DB user to use for this script", false, true );
- $this->addOption( 'dbpass', "The password to use for this script", false, true );
+ if ( $this->getDbType() > 0 ) {
+ $this->addOption( 'dbuser', 'The DB user to use for this script', false, true );
+ $this->addOption( 'dbpass', 'The password to use for this script', false, true );
}
// If we support $mBatchSize, show the option
- if( $this->mBatchSize ) {
+ if ( $this->mBatchSize ) {
$this->addOption( 'batch-size', 'Run this many operations ' .
- 'per batch, default: ' . $this->mBatchSize , false, true );
+ 'per batch, default: ' . $this->mBatchSize, false, true );
}
}
/**
* Run a child maintenance script. Pass all of the current arguments
* to it.
- * @param $maintClass String A name of a child maintenance class
- * @param $classFile String Full path of where the child is
+ * @param $maintClass String: a name of a child maintenance class
+ * @param $classFile String: full path of where the child is
* @return Maintenance child
*/
- protected function runChild( $maintClass, $classFile = null ) {
- // If we haven't already specified, kill setup procedures
- // for child scripts, we've already got a sane environment
- self::disableSetup();
-
+ public function runChild( $maintClass, $classFile = null ) {
// Make sure the class is loaded first
- if( !class_exists( $maintClass ) ) {
- if( $classFile ) {
+ if ( !class_exists( $maintClass ) ) {
+ if ( $classFile ) {
require_once( $classFile );
}
- if( !class_exists( $maintClass ) ) {
+ if ( !class_exists( $maintClass ) ) {
$this->error( "Cannot spawn child: $maintClass" );
}
}
@@ -355,34 +424,26 @@ abstract class Maintenance {
}
/**
- * Disable Setup.php mostly
- */
- protected static function disableSetup() {
- if( !defined( 'MW_NO_SETUP' ) )
- define( 'MW_NO_SETUP', true );
- }
-
- /**
* Do some sanity checking and basic setup
*/
public function setup() {
- global $IP, $wgCommandLineMode, $wgRequestTime;
+ global $wgCommandLineMode, $wgRequestTime;
# Abort if called from a web server
- if ( isset( $_SERVER ) && array_key_exists( 'REQUEST_METHOD', $_SERVER ) ) {
- $this->error( "This script must be run from the command line", true );
+ if ( isset( $_SERVER ) && isset( $_SERVER['REQUEST_METHOD'] ) ) {
+ $this->error( 'This script must be run from the command line', true );
}
# Make sure we can handle script parameters
- if( !ini_get( 'register_argc_argv' ) ) {
- $this->error( "Cannot get command line arguments, register_argc_argv is set to false", true );
+ if ( !ini_get( 'register_argc_argv' ) ) {
+ $this->error( 'Cannot get command line arguments, register_argc_argv is set to false', true );
}
- if( version_compare( phpversion(), '5.2.4' ) >= 0 ) {
+ if ( version_compare( phpversion(), '5.2.4' ) >= 0 ) {
// Send PHP warnings and errors to stderr instead of stdout.
// This aids in diagnosing problems, while keeping messages
// out of redirected output.
- if( ini_get( 'display_errors' ) ) {
+ if ( ini_get( 'display_errors' ) ) {
ini_set( 'display_errors', 'stderr' );
}
@@ -393,9 +454,12 @@ abstract class Maintenance {
// command-line mode is on, regardless of PHP version.
}
+ $this->loadParamsAndArgs();
+ $this->maybeHelp();
+
# Set the memory limit
# Note we need to set it again later in cache LocalSettings changed it
- ini_set( 'memory_limit', $this->memoryLimit() );
+ $this->adjustMemoryLimit();
# Set max execution time to 0 (no limit). PHP.net says that
# "When running PHP from the command line the default setting is 0."
@@ -407,27 +471,38 @@ abstract class Maintenance {
# Define us as being in MediaWiki
define( 'MEDIAWIKI', true );
- # Setup $IP, using MW_INSTALL_PATH if it exists
- $IP = strval( getenv( 'MW_INSTALL_PATH' ) ) !== ''
- ? getenv( 'MW_INSTALL_PATH' )
- : realpath( dirname( __FILE__ ) . '/..' );
-
$wgCommandLineMode = true;
# Turn off output buffering if it's on
@ob_end_flush();
- $this->loadParamsAndArgs();
- $this->maybeHelp();
$this->validateParamsAndArgs();
}
/**
* Normally we disable the memory_limit when running admin scripts.
* Some scripts may wish to actually set a limit, however, to avoid
- * blowing up unexpectedly.
+ * blowing up unexpectedly. We also support a --memory-limit option,
+ * to allow sysadmins to explicitly set one if they'd prefer to override
+ * defaults (or for people using Suhosin which yells at you for trying
+ * to disable the limits)
*/
public function memoryLimit() {
- return -1;
+ $limit = $this->getOption( 'memory-limit', 'max' );
+ $limit = trim( $limit, "\" '" ); // trim quotes in case someone misunderstood
+ return $limit;
+ }
+
+ /**
+ * Adjusts PHP's memory limit to better suit our needs, if needed.
+ */
+ protected function adjustMemoryLimit() {
+ $limit = $this->memoryLimit();
+ if ( $limit == 'max' ) {
+ $limit = -1; // no memory limit
+ }
+ if ( $limit != 'default' ) {
+ ini_set( 'memory_limit', $limit );
+ }
}
/**
@@ -450,15 +525,15 @@ abstract class Maintenance {
*/
public function loadParamsAndArgs( $self = null, $opts = null, $args = null ) {
# If we were given opts or args, set those and return early
- if( $self ) {
+ if ( $self ) {
$this->mSelf = $self;
$this->mInputLoaded = true;
}
- if( $opts ) {
+ if ( $opts ) {
$this->mOptions = $opts;
$this->mInputLoaded = true;
}
- if( $args ) {
+ if ( $args ) {
$this->mArgs = $args;
$this->mInputLoaded = true;
}
@@ -466,7 +541,7 @@ abstract class Maintenance {
# If we've already loaded input (either by user values or from $argv)
# skip on loading it again. The array_shift() will corrupt values if
# it's run again and again
- if( $this->mInputLoaded ) {
+ if ( $this->mInputLoaded ) {
$this->loadSpecialVars();
return;
}
@@ -478,11 +553,11 @@ abstract class Maintenance {
$args = array();
# Parse arguments
- for( $arg = reset( $argv ); $arg !== false; $arg = next( $argv ) ) {
+ for ( $arg = reset( $argv ); $arg !== false; $arg = next( $argv ) ) {
if ( $arg == '--' ) {
# End of options, remainder should be considered arguments
$arg = next( $argv );
- while( $arg !== false ) {
+ while ( $arg !== false ) {
$args[] = $arg;
$arg = next( $argv );
}
@@ -499,7 +574,7 @@ abstract class Maintenance {
$options[$option] = $param;
} else {
$bits = explode( '=', $option, 2 );
- if( count( $bits ) > 1 ) {
+ if ( count( $bits ) > 1 ) {
$option = $bits[0];
$param = $bits[1];
} else {
@@ -509,8 +584,8 @@ abstract class Maintenance {
}
} elseif ( substr( $arg, 0, 1 ) == '-' ) {
# Short options
- for ( $p=1; $p<strlen( $arg ); $p++ ) {
- $option = $arg{$p};
+ for ( $p = 1; $p < strlen( $arg ); $p++ ) {
+ $option = $arg { $p } ;
if ( isset( $this->mParams[$option]['withArg'] ) && $this->mParams[$option]['withArg'] ) {
$param = next( $argv );
if ( $param === false ) {
@@ -539,35 +614,41 @@ abstract class Maintenance {
protected function validateParamsAndArgs() {
$die = false;
# Check to make sure we've got all the required options
- foreach( $this->mParams as $opt => $info ) {
- if( $info['require'] && !$this->hasOption( $opt ) ) {
+ foreach ( $this->mParams as $opt => $info ) {
+ if ( $info['require'] && !$this->hasOption( $opt ) ) {
$this->error( "Param $opt required!" );
$die = true;
}
}
# Check arg list too
- foreach( $this->mArgList as $k => $info ) {
- if( $info['require'] && !$this->hasArg($k) ) {
- $this->error( "Argument <" . $info['name'] . "> required!" );
+ foreach ( $this->mArgList as $k => $info ) {
+ if ( $info['require'] && !$this->hasArg( $k ) ) {
+ $this->error( 'Argument <' . $info['name'] . '> required!' );
$die = true;
}
}
-
- if( $die ) $this->maybeHelp( true );
+
+ if ( $die ) {
+ $this->maybeHelp( true );
+ }
}
/**
* Handle the special variables that are global to all scripts
*/
protected function loadSpecialVars() {
- if( $this->hasOption( 'dbuser' ) )
+ if ( $this->hasOption( 'dbuser' ) ) {
$this->mDbUser = $this->getOption( 'dbuser' );
- if( $this->hasOption( 'dbpass' ) )
+ }
+ if ( $this->hasOption( 'dbpass' ) ) {
$this->mDbPass = $this->getOption( 'dbpass' );
- if( $this->hasOption( 'quiet' ) )
+ }
+ if ( $this->hasOption( 'quiet' ) ) {
$this->mQuiet = true;
- if( $this->hasOption( 'batch-size' ) )
+ }
+ if ( $this->hasOption( 'batch-size' ) ) {
$this->mBatchSize = $this->getOption( 'batch-size' );
+ }
}
/**
@@ -575,40 +656,62 @@ abstract class Maintenance {
* @param $force boolean Whether to force the help to show, default false
*/
protected function maybeHelp( $force = false ) {
- $screenWidth = 80; // TODO: Caculate this!
+ if( !$force && !$this->hasOption( 'help' ) ) {
+ return;
+ }
+
+ $screenWidth = 80; // TODO: Caculate this!
$tab = " ";
$descWidth = $screenWidth - ( 2 * strlen( $tab ) );
-
+
ksort( $this->mParams );
- if( $this->hasOption( 'help' ) || $force ) {
- $this->mQuiet = false;
+ $this->mQuiet = false;
- if( $this->mDescription ) {
- $this->output( "\n" . $this->mDescription . "\n" );
- }
- $output = "\nUsage: php " . basename( $this->mSelf );
- if( $this->mParams ) {
- $output .= " [--" . implode( array_keys( $this->mParams ), "|--" ) . "]";
- }
- if( $this->mArgList ) {
- $output .= " <";
- foreach( $this->mArgList as $k => $arg ) {
- $output .= $arg['name'] . ">";
- if( $k < count( $this->mArgList ) - 1 )
- $output .= " <";
+ // Description ...
+ if ( $this->mDescription ) {
+ $this->output( "\n" . $this->mDescription . "\n" );
+ }
+ $output = "\nUsage: php " . basename( $this->mSelf );
+
+ // ... append parameters ...
+ if ( $this->mParams ) {
+ $output .= " [--" . implode( array_keys( $this->mParams ), "|--" ) . "]";
+ }
+
+ // ... and append arguments.
+ if ( $this->mArgList ) {
+ $output .= ' ';
+ foreach ( $this->mArgList as $k => $arg ) {
+ if ( $arg['require'] ) {
+ $output .= '<' . $arg['name'] . '>';
+ } else {
+ $output .= '[' . $arg['name'] . ']';
}
+ if ( $k < count( $this->mArgList ) - 1 )
+ $output .= ' ';
}
- $this->output( "$output\n" );
- foreach( $this->mParams as $par => $info ) {
- $this->output( wordwrap( "$tab$par : " . $info['desc'], $descWidth,
- "\n$tab$tab" ) . "\n" );
- }
- foreach( $this->mArgList as $info ) {
- $this->output( wordwrap( "$tab<" . $info['name'] . "> : " .
- $info['desc'], $descWidth, "\n$tab$tab" ) . "\n" );
- }
- die( 1 );
}
+ $this->output( "$output\n\n" );
+
+ // Parameters description
+ foreach ( $this->mParams as $par => $info ) {
+ $this->output(
+ wordwrap( "$tab--$par: " . $info['desc'], $descWidth,
+ "\n$tab$tab" ) . "\n"
+ );
+ }
+
+ // Arguments description
+ foreach ( $this->mArgList as $info ) {
+ $openChar = $info['require'] ? '<' : '[';
+ $closeChar = $info['require'] ? '>' : ']';
+ $this->output(
+ wordwrap( "$tab$openChar" . $info['name'] . "$closeChar: " .
+ $info['desc'], $descWidth, "\n$tab$tab" ) . "\n"
+ );
+ }
+
+ die( 1 );
}
/**
@@ -616,11 +719,11 @@ abstract class Maintenance {
*/
public function finalSetup() {
global $wgCommandLineMode, $wgShowSQLErrors, $wgServer;
- global $wgTitle, $wgProfiling, $IP, $wgDBadminuser, $wgDBadminpassword;
+ global $wgProfiling, $wgDBadminuser, $wgDBadminpassword;
global $wgDBuser, $wgDBpassword, $wgDBservers, $wgLBFactoryConf;
# Turn off output buffering again, it might have been turned on in the settings files
- if( ob_get_level() ) {
+ if ( ob_get_level() ) {
ob_end_flush();
}
# Same with these
@@ -632,45 +735,54 @@ abstract class Maintenance {
}
# If these were passed, use them
- if( $this->mDbUser )
+ if ( $this->mDbUser ) {
$wgDBadminuser = $this->mDbUser;
- if( $this->mDbPass )
+ }
+ if ( $this->mDbPass ) {
$wgDBadminpassword = $this->mDbPass;
+ }
if ( $this->getDbType() == self::DB_ADMIN && isset( $wgDBadminuser ) ) {
$wgDBuser = $wgDBadminuser;
$wgDBpassword = $wgDBadminpassword;
- if( $wgDBservers ) {
+ if ( $wgDBservers ) {
foreach ( $wgDBservers as $i => $server ) {
$wgDBservers[$i]['user'] = $wgDBuser;
$wgDBservers[$i]['password'] = $wgDBpassword;
}
}
- if( isset( $wgLBFactoryConf['serverTemplate'] ) ) {
+ if ( isset( $wgLBFactoryConf['serverTemplate'] ) ) {
$wgLBFactoryConf['serverTemplate']['user'] = $wgDBuser;
$wgLBFactoryConf['serverTemplate']['password'] = $wgDBpassword;
}
+ LBFactory::destroyInstance();
}
- if ( defined( 'MW_CMDLINE_CALLBACK' ) ) {
- $fn = MW_CMDLINE_CALLBACK;
- $fn();
- }
+ $this->afterFinalSetup();
$wgShowSQLErrors = true;
@set_time_limit( 0 );
- ini_set( 'memory_limit', $this->memoryLimit() );
+ $this->adjustMemoryLimit();
$wgProfiling = false; // only for Profiler.php mode; avoids OOM errors
}
/**
+ * Execute a callback function at the end of initialisation
+ */
+ protected function afterFinalSetup() {
+ if ( defined( 'MW_CMDLINE_CALLBACK' ) ) {
+ call_user_func( MW_CMDLINE_CALLBACK );
+ }
+ }
+
+ /**
* Potentially debug globals. Originally a feature only
* for refreshLinks
*/
public function globals() {
- if( $this->hasOption( 'globals' ) ) {
+ if ( $this->hasOption( 'globals' ) ) {
print_r( $GLOBALS );
}
}
@@ -711,13 +823,12 @@ abstract class Maintenance {
# This is for the IRC scripts, which now run as the apache user
# The apache user doesn't have access to the wikiadmin_pass command
if ( $_ENV['USER'] == 'apache' ) {
- #if ( posix_geteuid() == 48 ) {
+ # if ( posix_geteuid() == 48 ) {
$wgUseNormalUser = true;
}
putenv( 'wikilang=' . $lang );
- $DP = $IP;
ini_set( 'include_path', ".:$IP:$IP/includes:$IP/languages:$IP/maintenance" );
if ( $lang == 'test' && $site == 'wikipedia' ) {
@@ -730,11 +841,13 @@ abstract class Maintenance {
* @return String
*/
public function loadSettings() {
- global $wgWikiFarm, $wgCommandLineMode, $IP, $DP;
+ global $wgWikiFarm, $wgCommandLineMode, $IP;
$wgWikiFarm = false;
if ( isset( $this->mOptions['conf'] ) ) {
$settingsFile = $this->mOptions['conf'];
+ } else if ( defined("MW_CONFIG_FILE") ) {
+ $settingsFile = MW_CONFIG_FILE;
} else {
$settingsFile = "$IP/LocalSettings.php";
}
@@ -749,19 +862,19 @@ abstract class Maintenance {
if ( !is_readable( $settingsFile ) ) {
$this->error( "A copy of your installation's LocalSettings.php\n" .
- "must exist and be readable in the source directory.", true );
+ "must exist and be readable in the source directory.\n" .
+ "Use --conf to specify it." , true );
}
$wgCommandLineMode = true;
- $DP = $IP;
return $settingsFile;
}
/**
* Support function for cleaning up redundant text records
- * @param $delete boolean Whether or not to actually delete the records
+ * @param $delete Boolean: whether or not to actually delete the records
* @author Rob Church <robchur@gmail.com>
*/
- protected function purgeRedundantText( $delete = true ) {
+ public function purgeRedundantText( $delete = true ) {
# Data should come off the master, wrapped in a transaction
$dbw = wfGetDB( DB_MASTER );
$dbw->begin();
@@ -771,27 +884,27 @@ abstract class Maintenance {
$tbl_txt = $dbw->tableName( 'text' );
# Get "active" text records from the revisions table
- $this->output( "Searching for active text records in revisions table..." );
+ $this->output( 'Searching for active text records in revisions table...' );
$res = $dbw->query( "SELECT DISTINCT rev_text_id FROM $tbl_rev" );
- foreach( $res as $row ) {
+ foreach ( $res as $row ) {
$cur[] = $row->rev_text_id;
}
$this->output( "done.\n" );
# Get "active" text records from the archive table
- $this->output( "Searching for active text records in archive table..." );
+ $this->output( 'Searching for active text records in archive table...' );
$res = $dbw->query( "SELECT DISTINCT ar_text_id FROM $tbl_arc" );
- foreach( $res as $row ) {
+ foreach ( $res as $row ) {
$cur[] = $row->ar_text_id;
}
$this->output( "done.\n" );
# Get the IDs of all text records not in these sets
- $this->output( "Searching for inactive text records..." );
+ $this->output( 'Searching for inactive text records...' );
$set = implode( ', ', $cur );
$res = $dbw->query( "SELECT old_id FROM $tbl_txt WHERE old_id NOT IN ( $set )" );
$old = array();
- foreach( $res as $row ) {
+ foreach ( $res as $row ) {
$old[] = $row->old_id;
}
$this->output( "done.\n" );
@@ -801,8 +914,8 @@ abstract class Maintenance {
$this->output( "$count inactive items found.\n" );
# Delete as appropriate
- if( $delete && $count ) {
- $this->output( "Deleting..." );
+ if ( $delete && $count ) {
+ $this->output( 'Deleting...' );
$set = implode( ', ', $old );
$dbw->query( "DELETE FROM $tbl_txt WHERE old_id IN ( $set )" );
$this->output( "done.\n" );
@@ -823,7 +936,7 @@ abstract class Maintenance {
* Get the list of available maintenance scripts. Note
* that if you call this _before_ calling doMaintenance
* you won't have any extensions in it yet
- * @return array
+ * @return Array
*/
public static function getMaintenanceScripts() {
global $wgMaintenanceScripts;
@@ -835,8 +948,7 @@ abstract class Maintenance {
* @return array
*/
protected static function getCoreScripts() {
- if( !self::$mCoreScripts ) {
- self::disableSetup();
+ if ( !self::$mCoreScripts ) {
$paths = array(
dirname( __FILE__ ),
dirname( __FILE__ ) . '/gearman',
@@ -844,19 +956,20 @@ abstract class Maintenance {
dirname( __FILE__ ) . '/storage',
);
self::$mCoreScripts = array();
- foreach( $paths as $p ) {
+ foreach ( $paths as $p ) {
$handle = opendir( $p );
- while( ( $file = readdir( $handle ) ) !== false ) {
- if( $file == 'Maintenance.php' )
+ while ( ( $file = readdir( $handle ) ) !== false ) {
+ if ( $file == 'Maintenance.php' ) {
continue;
+ }
$file = $p . '/' . $file;
- if( is_dir( $file ) || !strpos( $file, '.php' ) ||
+ if ( is_dir( $file ) || !strpos( $file, '.php' ) ||
( strpos( file_get_contents( $file ), '$maintClass' ) === false ) ) {
continue;
}
require( $file );
$vars = get_defined_vars();
- if( array_key_exists( 'maintClass', $vars ) ) {
+ if ( array_key_exists( 'maintClass', $vars ) ) {
self::$mCoreScripts[$vars['maintClass']] = $file;
}
}
@@ -865,4 +978,167 @@ abstract class Maintenance {
}
return self::$mCoreScripts;
}
+
+ /**
+ * Lock the search index
+ * @param &$db Database object
+ */
+ private function lockSearchindex( &$db ) {
+ $write = array( 'searchindex' );
+ $read = array( 'page', 'revision', 'text', 'interwiki', 'l10n_cache' );
+ $db->lockTables( $read, $write, __CLASS__ . '::' . __METHOD__ );
+ }
+
+ /**
+ * Unlock the tables
+ * @param &$db Database object
+ */
+ private function unlockSearchindex( &$db ) {
+ $db->unlockTables( __CLASS__ . '::' . __METHOD__ );
+ }
+
+ /**
+ * Unlock and lock again
+ * Since the lock is low-priority, queued reads will be able to complete
+ * @param &$db Database object
+ */
+ private function relockSearchindex( &$db ) {
+ $this->unlockSearchindex( $db );
+ $this->lockSearchindex( $db );
+ }
+
+ /**
+ * Perform a search index update with locking
+ * @param $maxLockTime Integer: the maximum time to keep the search index locked.
+ * @param $callback callback String: the function that will update the function.
+ * @param $dbw Database object
+ * @param $results
+ */
+ public function updateSearchIndex( $maxLockTime, $callback, $dbw, $results ) {
+ $lockTime = time();
+
+ # Lock searchindex
+ if ( $maxLockTime ) {
+ $this->output( " --- Waiting for lock ---" );
+ $this->lockSearchindex( $dbw );
+ $lockTime = time();
+ $this->output( "\n" );
+ }
+
+ # Loop through the results and do a search update
+ foreach ( $results as $row ) {
+ # Allow reads to be processed
+ if ( $maxLockTime && time() > $lockTime + $maxLockTime ) {
+ $this->output( " --- Relocking ---" );
+ $this->relockSearchindex( $dbw );
+ $lockTime = time();
+ $this->output( "\n" );
+ }
+ call_user_func( $callback, $dbw, $row );
+ }
+
+ # Unlock searchindex
+ if ( $maxLockTime ) {
+ $this->output( " --- Unlocking --" );
+ $this->unlockSearchindex( $dbw );
+ $this->output( "\n" );
+ }
+
+ }
+
+ /**
+ * Update the searchindex table for a given pageid
+ * @param $dbw Database: a database write handle
+ * @param $pageId Integer: the page ID to update.
+ */
+ public function updateSearchIndexForPage( $dbw, $pageId ) {
+ // Get current revision
+ $rev = Revision::loadFromPageId( $dbw, $pageId );
+ $title = null;
+ if ( $rev ) {
+ $titleObj = $rev->getTitle();
+ $title = $titleObj->getPrefixedDBkey();
+ $this->output( "$title..." );
+ # Update searchindex
+ $u = new SearchUpdate( $pageId, $titleObj->getText(), $rev->getText() );
+ $u->doUpdate();
+ $this->output( "\n" );
+ }
+ return $title;
+ }
+
+ /**
+ * Prompt the console for input
+ * @param $prompt String what to begin the line with, like '> '
+ * @return String response
+ */
+ public static function readconsole( $prompt = '> ' ) {
+ static $isatty = null;
+ if ( is_null( $isatty ) ) {
+ if ( posix_isatty( 0 /*STDIN*/ ) ) {
+ $isatty = true;
+ } else {
+ $isatty = false;
+ }
+ }
+
+ if ( $isatty && function_exists( 'readline' ) ) {
+ return readline( $prompt );
+ } else {
+ if ( $isatty ) {
+ $st = self::readlineEmulation( $prompt );
+ } else {
+ if ( feof( STDIN ) ) {
+ $st = false;
+ } else {
+ $st = fgets( STDIN, 1024 );
+ }
+ }
+ if ( $st === false ) return false;
+ $resp = trim( $st );
+ return $resp;
+ }
+ }
+
+ /**
+ * Emulate readline()
+ * @param $prompt String what to begin the line with, like '> '
+ * @return String
+ */
+ private static function readlineEmulation( $prompt ) {
+ $bash = Installer::locateExecutableInDefaultPaths( array( 'bash' ) );
+ if ( !wfIsWindows() && $bash ) {
+ $retval = false;
+ $encPrompt = wfEscapeShellArg( $prompt );
+ $command = "read -er -p $encPrompt && echo \"\$REPLY\"";
+ $encCommand = wfEscapeShellArg( $command );
+ $line = wfShellExec( "$bash -c $encCommand", $retval );
+
+ if ( $retval == 0 ) {
+ return $line;
+ } elseif ( $retval == 127 ) {
+ // Couldn't execute bash even though we thought we saw it.
+ // Shell probably spit out an error message, sorry :(
+ // Fall through to fgets()...
+ } else {
+ // EOF/ctrl+D
+ return false;
+ }
+ }
+
+ // Fallback... we'll have no editing controls, EWWW
+ if ( feof( STDIN ) ) {
+ return false;
+ }
+ print $prompt;
+ return fgets( STDIN, 1024 );
+ }
}
+
+class FakeMaintenance extends Maintenance {
+ protected $mSelf = "FakeMaintenanceScript";
+ public function execute() {
+ return;
+ }
+}
+
diff --git a/maintenance/Makefile b/maintenance/Makefile
index 82476139..a92751c9 100644
--- a/maintenance/Makefile
+++ b/maintenance/Makefile
@@ -3,7 +3,7 @@ help:
@echo "Run 'make doc' to run the doxygen generation."
test:
- php parserTests.php --quiet
+ php tests/parserTests.php --quiet
doc:
php mwdocgen.php --all
diff --git a/maintenance/addwiki.php b/maintenance/addwiki.php
index 0cb4d74a..e86a8c5d 100644
--- a/maintenance/addwiki.php
+++ b/maintenance/addwiki.php
@@ -27,15 +27,16 @@
* @ingroup Wikimedia
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class AddWiki extends Maintenance {
public function __construct() {
parent::__construct();
$this->mDescription = "Add a new wiki to the family. Wikimedia specific!";
- $this->addArg( 'language', 'Language code of new site' );
- $this->addArg( 'site', 'Type of site' );
- $this->addArg( 'dbname', 'Name of database to create' );
+ $this->addArg( 'language', 'Language code of new site, e.g. en' );
+ $this->addArg( 'site', 'Type of site, e.g. wikipedia' );
+ $this->addArg( 'dbname', 'Name of database to create, e.g. enwiki' );
+ $this->addArg( 'domain', 'Domain name of the wiki, e.g. en.wikipedia.org' );
}
public function getDbType() {
@@ -43,17 +44,19 @@ class AddWiki extends Maintenance {
}
public function execute() {
- global $IP, $wgLanguageNames, $wgDefaultExternalStore, $wgNoDBParam;
+ global $IP, $wgDefaultExternalStore, $wgNoDBParam;
$wgNoDBParam = true;
- $lang = $this->getArg(0);
- $site = $this->getArg(1);
- $dbName = $this->getArg(2);
+ $lang = $this->getArg( 0 );
+ $site = $this->getArg( 1 );
+ $dbName = $this->getArg( 2 );
+ $domain = $this->getArg( 3 );
+ $languageNames = Language::getLanguageNames();
- if ( !isset( $wgLanguageNames[$lang] ) ) {
+ if ( !isset( $languageNames[$lang] ) ) {
$this->error( "Language $lang not found in \$wgLanguageNames", true );
}
- $name = $wgLanguageNames[$lang];
+ $name = $languageNames[$lang];
$dbw = wfGetDB( DB_MASTER );
$common = "/home/wikipedia/common";
@@ -80,13 +83,14 @@ class AddWiki extends Maintenance {
$dbw->sourceFile( "$IP/extensions/UsabilityInitiative/ClickTracking/ClickTrackingEvents.sql" );
$dbw->sourceFile( "$IP/extensions/UsabilityInitiative/ClickTracking/ClickTracking.sql" );
$dbw->sourceFile( "$IP/extensions/UsabilityInitiative/UserDailyContribs/UserDailyContribs.sql" );
+ $dbw->sourceFile( "$IP/extensions/UsabilityInitiative/OptIn/OptIn.sql" );
$dbw->query( "INSERT INTO site_stats(ss_row_id) VALUES (1)" );
# Initialise external storage
if ( is_array( $wgDefaultExternalStore ) ) {
$stores = $wgDefaultExternalStore;
- } elseif ( $stores ) {
+ } elseif ( $wgDefaultExternalStore ) {
$stores = array( $wgDefaultExternalStore );
} else {
$stores = array();
@@ -138,11 +142,21 @@ class AddWiki extends Maintenance {
fclose( $file );
# Update the sublists
- shell_exec("cd $common && ./refresh-dblist");
+ shell_exec( "cd $common && ./refresh-dblist" );
- #print "Constructing interwiki SQL\n";
+ # print "Constructing interwiki SQL\n";
# Rebuild interwiki tables
- #passthru( '/home/wikipedia/conf/interwiki/update' );
+ # passthru( '/home/wikipedia/conf/interwiki/update' );
+
+ $time = wfTimestamp( TS_RFC2822 );
+ // These arguments need to be escaped twice: once for echo and once for at
+ $escDbName = wfEscapeShellArg( wfEscapeShellArg( $dbName ) );
+ $escTime = wfEscapeShellArg( wfEscapeShellArg( $time ) );
+ $escUcsite = wfEscapeShellArg( wfEscapeShellArg( $ucsite ) );
+ $escName = wfEscapeShellArg( wfEscapeShellArg( $name ) );
+ $escLang = wfEscapeShellArg( wfEscapeShellArg( $lang ) );
+ $escDomain = wfEscapeShellArg( wfEscapeShellArg( $domain ) );
+ shell_exec( "echo notifyNewProjects $escDbName $escTime $escUcsite $escName $escLang $escDomain | at now + 15 minutes" );
$this->output( "Script ended. You still have to:
* Add any required settings in InitialiseSettings.php
@@ -150,7 +164,7 @@ class AddWiki extends Maintenance {
* Run /home/wikipedia/conf/interwiki/update
" );
}
-
+
private function getFirstArticle( $ucsite, $name ) {
return <<<EOT
==This subdomain is reserved for the creation of a [[wikimedia:Our projects|$ucsite]] in '''[[w:en:{$name}|{$name}]]''' language==
@@ -451,4 +465,4 @@ EOT;
}
$maintClass = "AddWiki";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/archives/patch-archive_ar_revid.sql b/maintenance/archives/patch-archive_ar_revid.sql
new file mode 100644
index 00000000..67ee97b1
--- /dev/null
+++ b/maintenance/archives/patch-archive_ar_revid.sql
@@ -0,0 +1,4 @@
+-- Hopefully temporary index.
+-- For https://bugzilla.wikimedia.org/show_bug.cgi?id=21279
+ALTER TABLE /*$wgDBprefix*/archive
+ ADD INDEX ar_revid ( ar_rev_id ); \ No newline at end of file
diff --git a/maintenance/archives/patch-archive_kill_ar_page_revid.sql b/maintenance/archives/patch-archive_kill_ar_page_revid.sql
new file mode 100644
index 00000000..2e6fe453
--- /dev/null
+++ b/maintenance/archives/patch-archive_kill_ar_page_revid.sql
@@ -0,0 +1,4 @@
+-- Used for killing the wrong index added during SVN for 1.17
+-- Won't affect most people, but it doesn't need to exist
+ALTER TABLE /*$wgDBprefix*/archive
+ DROP INDEX ar_page_revid; \ No newline at end of file
diff --git a/maintenance/archives/patch-categorylinks-better-collation.sql b/maintenance/archives/patch-categorylinks-better-collation.sql
new file mode 100644
index 00000000..c1499c15
--- /dev/null
+++ b/maintenance/archives/patch-categorylinks-better-collation.sql
@@ -0,0 +1,19 @@
+--
+-- patch-categorylinks-better-collation.sql
+--
+-- Bugs 164, 1211, 23682. This is the second version of this patch; the
+-- changes are also incorporated into patch-categorylinks-better-collation2.sql,
+-- for the benefit of trunk users who applied the original.
+--
+-- Due to bug 25254, the length limit of 255 bytes for cl_sortkey_prefix
+-- is also enforced in php. If you change the length of that field, make
+-- sure to also change the check in LinksUpdate.php.
+ALTER TABLE /*$wgDBprefix*/categorylinks
+ CHANGE COLUMN cl_sortkey cl_sortkey varbinary(230) NOT NULL default '',
+ ADD COLUMN cl_sortkey_prefix varchar(255) binary NOT NULL default '',
+ ADD COLUMN cl_collation varbinary(32) NOT NULL default '',
+ ADD COLUMN cl_type ENUM('page', 'subcat', 'file') NOT NULL default 'page',
+ ADD INDEX (cl_collation),
+ DROP INDEX cl_sortkey,
+ ADD INDEX cl_sortkey (cl_to, cl_type, cl_sortkey, cl_from);
+INSERT IGNORE INTO /*$wgDBprefix*/updatelog (ul_key) VALUES ('cl_fields_update');
diff --git a/maintenance/archives/patch-categorylinks-better-collation2.sql b/maintenance/archives/patch-categorylinks-better-collation2.sql
new file mode 100644
index 00000000..e9574693
--- /dev/null
+++ b/maintenance/archives/patch-categorylinks-better-collation2.sql
@@ -0,0 +1,12 @@
+--
+-- patch-categorylinks-better-collation2.sql
+--
+-- Bugs 164, 1211, 23682. This patch exists for trunk users who already
+-- applied the first patch in its original version. The first patch was
+-- updated to incorporate the changes as well, so as not to do two alters on a
+-- large table unnecessarily for people upgrading from 1.16, so this will be
+-- skipped if unneeded.
+ALTER TABLE /*$wgDBprefix*/categorylinks
+ CHANGE COLUMN cl_sortkey cl_sortkey varbinary(230) NOT NULL default '',
+ CHANGE COLUMN cl_collation cl_collation varbinary(32) NOT NULL default '';
+INSERT IGNORE INTO /*$wgDBprefix*/updatelog (ul_key) VALUES ('cl_fields_update');
diff --git a/maintenance/archives/patch-iw_api_and_wikiid.sql b/maintenance/archives/patch-iw_api_and_wikiid.sql
new file mode 100644
index 00000000..4384a715
--- /dev/null
+++ b/maintenance/archives/patch-iw_api_and_wikiid.sql
@@ -0,0 +1,9 @@
+--
+-- Add iw_api and iw_wikiid to interwiki table
+--
+
+ALTER TABLE /*_*/interwiki
+ ADD iw_api BLOB NOT NULL;
+ALTER TABLE /*_*/interwiki
+ ADD iw_wikiid varchar(64) NOT NULL;
+
diff --git a/maintenance/archives/patch-iwlinks.sql b/maintenance/archives/patch-iwlinks.sql
new file mode 100644
index 00000000..89b34cb1
--- /dev/null
+++ b/maintenance/archives/patch-iwlinks.sql
@@ -0,0 +1,16 @@
+--
+-- Track inline interwiki links
+--
+CREATE TABLE /*_*/iwlinks (
+ -- page_id of the referring page
+ iwl_from int unsigned NOT NULL default 0,
+
+ -- Interwiki prefix code of the target
+ iwl_prefix varbinary(20) NOT NULL default '',
+
+ -- Title of the target, including namespace
+ iwl_title varchar(255) binary NOT NULL default ''
+) /*$wgDBTableOptions*/;
+
+CREATE UNIQUE INDEX /*i*/iwl_from ON /*_*/iwlinks (iwl_from, iwl_prefix, iwl_title);
+CREATE UNIQUE INDEX /*i*/iwl_prefix_title_from ON /*_*/iwlinks (iwl_prefix, iwl_title, iwl_from);
diff --git a/maintenance/archives/patch-kill-iwl_pft.sql b/maintenance/archives/patch-kill-iwl_pft.sql
new file mode 100644
index 00000000..96e14356
--- /dev/null
+++ b/maintenance/archives/patch-kill-iwl_pft.sql
@@ -0,0 +1,7 @@
+--
+-- Kill the old iwl_prefix_from_title index, which may be present on some
+-- installs if they ran update.php between it being added and being renamed
+--
+
+DROP INDEX /*i*/iwl_prefix_from_title ON /*_*/iwlinks;
+
diff --git a/maintenance/archives/patch-kill-iwl_prefix.sql b/maintenance/archives/patch-kill-iwl_prefix.sql
new file mode 100644
index 00000000..1cd9b454
--- /dev/null
+++ b/maintenance/archives/patch-kill-iwl_prefix.sql
@@ -0,0 +1,7 @@
+--
+-- Kill the old iwl_prefix index, which may be present on some
+-- installs if they ran update.php between it being added and being renamed
+--
+
+DROP INDEX /*i*/iwl_prefix ON /*_*/iwlinks;
+
diff --git a/maintenance/archives/patch-langlinks-ll_lang-20.sql b/maintenance/archives/patch-langlinks-ll_lang-20.sql
new file mode 100644
index 00000000..ce026382
--- /dev/null
+++ b/maintenance/archives/patch-langlinks-ll_lang-20.sql
@@ -0,0 +1,3 @@
+ALTER TABLE /*$wgDBprefix*/langlinks
+ MODIFY `ll_lang`
+ VARBINARY(20) NOT NULL DEFAULT ''; \ No newline at end of file
diff --git a/maintenance/archives/patch-mime_minor_length.sql b/maintenance/archives/patch-mime_minor_length.sql
index 3a3c5c4f..8b63d1f0 100644
--- a/maintenance/archives/patch-mime_minor_length.sql
+++ b/maintenance/archives/patch-mime_minor_length.sql
@@ -7,4 +7,4 @@ ALTER TABLE /*_*/image
ALTER TABLE /*_*/oldimage
MODIFY COLUMN oi_minor_mime varbinary(100) NOT NULL default "unknown";
-INSERT INTO /*_*/updatelog VALUES ('mime_minor_length'); \ No newline at end of file
+INSERT INTO /*_*/updatelog(ul_key) VALUES ('mime_minor_length');
diff --git a/maintenance/archives/patch-module_deps.sql b/maintenance/archives/patch-module_deps.sql
new file mode 100644
index 00000000..ffc94829
--- /dev/null
+++ b/maintenance/archives/patch-module_deps.sql
@@ -0,0 +1,12 @@
+-- Table for tracking which local files a module depends on that aren't
+-- registered directly.
+-- Currently only used for tracking images that CSS depends on
+CREATE TABLE /*_*/module_deps (
+ -- Module name
+ md_module varbinary(255) NOT NULL,
+ -- Skin name
+ md_skin varbinary(32) NOT NULL,
+ -- JSON blob with file dependencies
+ md_deps mediumblob NOT NULL
+) /*$wgDBTableOptions*/;
+CREATE UNIQUE INDEX /*i*/md_module_skin ON /*_*/module_deps (md_module, md_skin);
diff --git a/maintenance/archives/patch-msg_resource.sql b/maintenance/archives/patch-msg_resource.sql
new file mode 100644
index 00000000..f4f35339
--- /dev/null
+++ b/maintenance/archives/patch-msg_resource.sql
@@ -0,0 +1,20 @@
+-- Table for storing JSON message blobs for the resource loader
+CREATE TABLE /*_*/msg_resource (
+ -- Resource name
+ mr_resource varbinary(255) NOT NULL,
+ -- Language code
+ mr_lang varbinary(32) NOT NULL,
+ -- JSON blob. This is an incomplete JSON object, i.e. without the wrapping {}
+ mr_blob mediumblob NOT NULL,
+ -- Timestamp of last update
+ mr_timestamp binary(14) NOT NULL
+) /*$wgDBTableOptions*/;
+CREATE UNIQUE INDEX /*i*/mr_resource_lang ON /*_*/msg_resource(mr_resource, mr_lang);
+
+-- Table for administering which message is contained in which resource
+CREATE TABLE /*_*/msg_resource_links (
+ mrl_resource varbinary(255) NOT NULL,
+ -- Message key
+ mrl_message varbinary(255) NOT NULL
+) /*$wgDBTableOptions*/;
+CREATE UNIQUE INDEX /*i*/mrl_message_resource ON /*_*/msg_resource_links (mrl_message, mrl_resource);
diff --git a/maintenance/archives/patch-profiling.sql b/maintenance/archives/patch-profiling.sql
index e748ca31..29663341 100644
--- a/maintenance/archives/patch-profiling.sql
+++ b/maintenance/archives/patch-profiling.sql
@@ -1,11 +1,12 @@
-- profiling table
-- This is optional
-CREATE TABLE /*$wgDBprefix*/profiling (
+CREATE TABLE /*_*/profiling (
pf_count int NOT NULL default 0,
pf_time float NOT NULL default 0,
pf_memory float NOT NULL default 0,
pf_name varchar(255) NOT NULL default '',
- pf_server varchar(30) NOT NULL default '',
- UNIQUE KEY pf_name_server (pf_name, pf_server)
+ pf_server varchar(30) NOT NULL default ''
) ENGINE=HEAP;
+
+CREATE UNIQUE INDEX /*i*/pf_name_server ON /*_*/profiling (pf_name, pf_server) \ No newline at end of file
diff --git a/maintenance/archives/patch-rename-iwl_prefix.sql b/maintenance/archives/patch-rename-iwl_prefix.sql
new file mode 100644
index 00000000..4b11b36b
--- /dev/null
+++ b/maintenance/archives/patch-rename-iwl_prefix.sql
@@ -0,0 +1,4 @@
+--
+-- Recreates the iwl_prefix index for the iwlinks table
+--
+CREATE UNIQUE INDEX /*i*/iwl_prefix_title_from ON /*_*/iwlinks (iwl_prefix, iwl_title, iwl_from);
diff --git a/maintenance/archives/patch-tc-timestamp.sql b/maintenance/archives/patch-tc-timestamp.sql
index 4d90cf34..3f7dde41 100644
--- a/maintenance/archives/patch-tc-timestamp.sql
+++ b/maintenance/archives/patch-tc-timestamp.sql
@@ -1,4 +1,4 @@
ALTER TABLE /*_*/transcache MODIFY tc_time binary(14);
UPDATE /*_*/transcache SET tc_time = DATE_FORMAT(FROM_UNIXTIME(tc_time), "%Y%c%d%H%i%s");
-INSERT INTO /*_*/updatelog VALUES ('convert transcache field');
+INSERT INTO /*_*/updatelog(ul_key) VALUES ('convert transcache field');
diff --git a/maintenance/testRunner.sql b/maintenance/archives/patch-testrun.sql
index 8591d81d..8591d81d 100644
--- a/maintenance/testRunner.sql
+++ b/maintenance/archives/patch-testrun.sql
diff --git a/maintenance/archives/patch-ul_value.sql b/maintenance/archives/patch-ul_value.sql
new file mode 100644
index 00000000..50f4e9a8
--- /dev/null
+++ b/maintenance/archives/patch-ul_value.sql
@@ -0,0 +1,4 @@
+-- Add the ul_value column to updatelog
+
+ALTER TABLE /*_*/updatelog
+ add ul_value blob;
diff --git a/maintenance/archives/upgradeLogging.php b/maintenance/archives/upgradeLogging.php
index f79bbabc..54a82c09 100644
--- a/maintenance/archives/upgradeLogging.php
+++ b/maintenance/archives/upgradeLogging.php
@@ -6,7 +6,7 @@
* @ingroup MaintenanceArchive
*/
-require( dirname(__FILE__).'/../commandLine.inc' );
+require( dirname( __FILE__ ) . '/../commandLine.inc' );
class UpdateLogging {
var $dbw;
@@ -130,7 +130,7 @@ EOT;
} else {
$conds = array( 'log_timestamp > ' . $this->dbw->addQuotes( $copyPos ) );
}
- $srcRes = $this->dbw->select( $srcTable, '*', $conds, __METHOD__,
+ $srcRes = $this->dbw->select( $srcTable, '*', $conds, __METHOD__,
array( 'LIMIT' => $batchSize, 'ORDER BY' => 'log_timestamp' ) );
if ( ! $srcRes->numRows() ) {
diff --git a/maintenance/attachLatest.php b/maintenance/attachLatest.php
index 67f3088b..e6287f43 100644
--- a/maintenance/attachLatest.php
+++ b/maintenance/attachLatest.php
@@ -3,7 +3,7 @@
* quick hackjob to fix damages imports on wikisource
* page records have page_latest wrong
*
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
@@ -21,19 +21,20 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class AttachLatest extends Maintenance {
-
+
public function __construct() {
parent::__construct();
$this->addOption( "fix", "Actually fix the entries, will dry run otherwise" );
$this->mDescription = "Fix page_latest entries in the page table";
}
-
+
public function execute() {
$this->output( "Looking for pages with page_latest set to 0...\n" );
$dbw = wfGetDB( DB_MASTER );
@@ -43,7 +44,7 @@ class AttachLatest extends Maintenance {
__METHOD__ );
$n = 0;
- foreach( $result as $row ) {
+ foreach ( $result as $row ) {
$pageId = intval( $row->page_id );
$title = Title::makeTitle( $row->page_namespace, $row->page_title );
$name = $title->getPrefixedText();
@@ -51,31 +52,30 @@ class AttachLatest extends Maintenance {
'MAX(rev_timestamp)',
array( 'rev_page' => $pageId ),
__METHOD__ );
- if( !$latestTime ) {
- $this->output( wfWikiID()." $pageId [[$name]] can't find latest rev time?!\n" );
+ if ( !$latestTime ) {
+ $this->output( wfWikiID() . " $pageId [[$name]] can't find latest rev time?!\n" );
continue;
}
-
+
$revision = Revision::loadFromTimestamp( $dbw, $title, $latestTime );
- if( is_null( $revision ) ) {
- $this->output( wfWikiID()." $pageId [[$name]] latest time $latestTime, can't find revision id\n" );
+ if ( is_null( $revision ) ) {
+ $this->output( wfWikiID() . " $pageId [[$name]] latest time $latestTime, can't find revision id\n" );
continue;
}
$id = $revision->getId();
- $this->output( wfWikiID()." $pageId [[$name]] latest time $latestTime, rev id $id\n" );
- if( $this->hasOption('fix') ) {
+ $this->output( wfWikiID() . " $pageId [[$name]] latest time $latestTime, rev id $id\n" );
+ if ( $this->hasOption( 'fix' ) ) {
$article = new Article( $title );
$article->updateRevisionOn( $dbw, $revision );
}
$n++;
}
- $dbw->freeResult( $result );
$this->output( "Done! Processed $n pages.\n" );
- if( !$this->hasOption('fix') ) {
+ if ( !$this->hasOption( 'fix' ) ) {
$this->output( "This was a dry run; rerun with --fix to update page_latest.\n" );
}
}
}
$maintClass = "AttachLatest";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/backup.inc b/maintenance/backup.inc
index 30bd0d88..9ed463c9 100644
--- a/maintenance/backup.inc
+++ b/maintenance/backup.inc
@@ -1,6 +1,8 @@
<?php
/**
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Base classes for database dumpers
+ *
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
@@ -27,7 +29,7 @@
*/
class DumpDBZip2Output extends DumpPipeOutput {
function DumpDBZip2Output( $file ) {
- parent::DumpPipeOutput( "dbzip2", $file );
+ parent::__construct( "dbzip2", $file );
}
}
@@ -67,16 +69,16 @@ class BackupDumper {
}
/**
- * @param string $name
- * @param string $class name of output filter plugin class
+ * @param $name String
+ * @param $class String: name of output filter plugin class
*/
function registerOutput( $name, $class ) {
$this->outputTypes[$name] = $class;
}
/**
- * @param string $name
- * @param string $class name of filter plugin class
+ * @param $name String
+ * @param $class String: name of filter plugin class
*/
function registerFilter( $name, $class ) {
$this->filterTypes[$name] = $class;
@@ -84,12 +86,13 @@ class BackupDumper {
/**
* Load a plugin and register it
- * @param string $class Name of plugin class; must have a static 'register'
- * method that takes a BackupDumper as a parameter.
- * @param string $file Full or relative path to the PHP file to load, or empty
+ *
+ * @param $class String: name of plugin class; must have a static 'register'
+ * method that takes a BackupDumper as a parameter.
+ * @param $file String: full or relative path to the PHP file to load, or empty
*/
function loadPlugin( $class, $file ) {
- if( $file != '' ) {
+ if ( $file != '' ) {
require_once( $file );
}
$register = array( $class, 'register' );
@@ -97,37 +100,36 @@ class BackupDumper {
}
/**
- * @param array $args
- * @return array
- * @static
+ * @param $args Array
+ * @return Array
*/
function processArgs( $args ) {
$sink = null;
$sinks = array();
- foreach( $args as $arg ) {
+ foreach ( $args as $arg ) {
$matches = array();
- if( preg_match( '/^--(.+?)(?:=(.+?)(?::(.+?))?)?$/', $arg, $matches ) ) {
+ if ( preg_match( '/^--(.+?)(?:=(.+?)(?::(.+?))?)?$/', $arg, $matches ) ) {
@list( /* $full */ , $opt, $val, $param ) = $matches;
switch( $opt ) {
case "plugin":
$this->loadPlugin( $val, $param );
break;
case "output":
- if( !is_null( $sink ) ) {
+ if ( !is_null( $sink ) ) {
$sinks[] = $sink;
}
- if( !isset( $this->outputTypes[$val] ) ) {
+ if ( !isset( $this->outputTypes[$val] ) ) {
wfDie( "Unrecognized output sink type '$val'\n" );
}
$type = $this->outputTypes[$val];
$sink = new $type( $param );
break;
case "filter":
- if( is_null( $sink ) ) {
+ if ( is_null( $sink ) ) {
$this->progress( "Warning: assuming stdout for filter output\n" );
$sink = new DumpOutput();
}
- if( !isset( $this->filterTypes[$val] ) ) {
+ if ( !isset( $this->filterTypes[$val] ) ) {
wfDie( "Unrecognized filter type '$val'\n" );
}
$type = $this->filterTypes[$val];
@@ -145,9 +147,9 @@ class BackupDumper {
$this->server = $val;
break;
case "force-normal":
- if( !function_exists( 'utf8_normalize' ) ) {
- dl( "php_utfnormal.so" );
- if( !function_exists( 'utf8_normalize' ) ) {
+ if ( !function_exists( 'utf8_normalize' ) ) {
+ wfDl( "php_utfnormal.so" );
+ if ( !function_exists( 'utf8_normalize' ) ) {
wfDie( "Failed to load UTF-8 normalization extension. " .
"Install or remove --force-normal parameter to use slower code.\n" );
}
@@ -159,12 +161,12 @@ class BackupDumper {
}
}
- if( is_null( $sink ) ) {
+ if ( is_null( $sink ) ) {
$sink = new DumpOutput();
}
$sinks[] = $sink;
- if( count( $sinks ) > 1 ) {
+ if ( count( $sinks ) > 1 ) {
return new DumpMultiWriter( $sinks );
} else {
return $sink;
@@ -178,7 +180,7 @@ class BackupDumper {
function dump( $history, $text = WikiExporter::TEXT ) {
# Notice messages will foul up your XML output even if they're
# relatively harmless.
- if( ini_get( 'display_errors' ) )
+ if ( ini_get( 'display_errors' ) )
ini_set( 'display_errors', 'stderr' );
$this->initProgress( $history );
@@ -190,18 +192,18 @@ class BackupDumper {
$wrapper = new ExportProgressFilter( $this->sink, $this );
$exporter->setOutputSink( $wrapper );
- if( !$this->skipHeader )
+ if ( !$this->skipHeader )
$exporter->openStream();
# Log item dumps: all or by range
- if( $history & WikiExporter::LOGS ) {
- if( $this->startId || $this->endId ) {
+ if ( $history & WikiExporter::LOGS ) {
+ if ( $this->startId || $this->endId ) {
$exporter->logsByRange( $this->startId, $this->endId );
} else {
$exporter->allLogs();
}
# Page dumps: all or by page ID range
- } else if( is_null( $this->pages ) ) {
- if( $this->startId || $this->endId ) {
+ } else if ( is_null( $this->pages ) ) {
+ if ( $this->startId || $this->endId ) {
$exporter->pagesByRange( $this->startId, $this->endId );
} else {
$exporter->allPages();
@@ -211,44 +213,45 @@ class BackupDumper {
$exporter->pagesByName( $this->pages );
}
- if( !$this->skipFooter )
+ if ( !$this->skipFooter )
$exporter->closeStream();
$this->report( true );
}
-
+
/**
* Initialise starting time and maximum revision count.
* We'll make ETA calculations based an progress, assuming relatively
* constant per-revision rate.
- * @param int $history WikiExporter::CURRENT or WikiExporter::FULL
+ * @param $history Integer: WikiExporter::CURRENT or WikiExporter::FULL
*/
function initProgress( $history = WikiExporter::FULL ) {
- $table = ($history == WikiExporter::CURRENT) ? 'page' : 'revision';
- $field = ($history == WikiExporter::CURRENT) ? 'page_id' : 'rev_id';
-
+ $table = ( $history == WikiExporter::CURRENT ) ? 'page' : 'revision';
+ $field = ( $history == WikiExporter::CURRENT ) ? 'page_id' : 'rev_id';
+
$dbr = wfGetDB( DB_SLAVE );
- $this->maxCount = $dbr->selectField( $table, "MAX($field)", '', 'BackupDumper::dump' );
+ $this->maxCount = $dbr->selectField( $table, "MAX($field)", '', __METHOD__ );
$this->startTime = wfTime();
}
/**
- * @fixme the --server parameter is currently not respected, as it doesn't seem
- * terribly easy to ask the load balancer for a particular connection by name.
+ * @todo Fixme: the --server parameter is currently not respected, as it
+ * doesn't seem terribly easy to ask the load balancer for a particular
+ * connection by name.
*/
function backupDb() {
$this->lb = wfGetLBFactory()->newMainLB();
$db = $this->lb->getConnection( DB_SLAVE, 'backup' );
-
+
// Discourage the server from disconnecting us if it takes a long time
// to read out the big ol' batch query.
$db->setTimeout( 3600 * 24 );
-
+
return $db;
}
-
+
function __destruct() {
- if( isset( $this->lb ) ) {
+ if ( isset( $this->lb ) ) {
$this->lb->closeAll();
}
}
@@ -270,16 +273,16 @@ class BackupDumper {
}
function report( $final = false ) {
- if( $final xor ( $this->revCount % $this->reportingInterval == 0 ) ) {
+ if ( $final xor ( $this->revCount % $this->reportingInterval == 0 ) ) {
$this->showReport();
}
}
function showReport() {
- if( $this->reporting ) {
+ if ( $this->reporting ) {
$delta = wfTime() - $this->startTime;
$now = wfTimestamp( TS_DB );
- if( $delta ) {
+ if ( $delta ) {
$rate = $this->pageCount / $delta;
$revrate = $this->revCount / $delta;
$portion = $this->revCount / $this->maxCount;
@@ -302,7 +305,7 @@ class BackupDumper {
class ExportProgressFilter extends DumpFilter {
function ExportProgressFilter( &$sink, &$progress ) {
- parent::DumpFilter( $sink );
+ parent::__construct( $sink );
$this->progress = $progress;
}
diff --git a/maintenance/backupPrefetch.inc b/maintenance/backupPrefetch.inc
index 512af1c7..9d743137 100644
--- a/maintenance/backupPrefetch.inc
+++ b/maintenance/backupPrefetch.inc
@@ -1,42 +1,10 @@
<?php
-
-// Some smart guy removed XMLReader's global constants from PHP 5.1
-// and replaced them with class constants. Breaking source compatibility
-// is SUPER awesome, and I love languages which do this constantly!
-$xmlReaderConstants = array(
- "NONE",
- "ELEMENT",
- "ATTRIBUTE",
- "TEXT",
- "CDATA",
- "ENTITY_REF",
- "ENTITY",
- "PI",
- "COMMENT",
- "DOC",
- "DOC_TYPE",
- "DOC_FRAGMENT",
- "NOTATION",
- "WHITESPACE",
- "SIGNIFICANT_WHITESPACE",
- "END_ELEMENT",
- "END_ENTITY",
- "XML_DECLARATION",
- "LOADDTD",
- "DEFAULTATTRS",
- "VALIDATE",
- "SUBST_ENTITIES" );
-foreach( $xmlReaderConstants as $name ) {
- $fullName = "XMLREADER_$name";
- $newName = "XMLReader::$name";
- if( !defined( $fullName ) ) {
- if( defined( $newName ) ) {
- define( $fullName, constant( $newName ) );
- } else {
- // broken or missing the extension...
- }
- }
-}
+/**
+ * Helper class for the --prefetch option of dumpTextPass.php
+ *
+ * @file
+ * @ingroup Maintenance
+ */
/**
* Readahead helper for making large MediaWiki data dumps;
@@ -51,7 +19,6 @@ foreach( $xmlReaderConstants as $name ) {
* - text contents are immutable and should not change once
* recorded, so the previous dump is a reliable source
*
- * Requires PHP 5 and the XMLReader PECL extension.
* @ingroup Maintenance
*/
class BaseDump {
@@ -60,9 +27,12 @@ class BaseDump {
var $atPageEnd = false;
var $lastPage = 0;
var $lastRev = 0;
+ var $infiles = null;
function BaseDump( $infile ) {
+ $this->infiles = explode(';',$infile);
$this->reader = new XMLReader();
+ $infile = array_shift($this->infiles);
$this->reader->open( $infile );
}
@@ -71,26 +41,26 @@ class BaseDump {
* from the dump stream. May return null if the page is
* unavailable.
*
- * @param int $page ID number of page to read
- * @param int $rev ID number of revision to read
+ * @param $page Integer: ID number of page to read
+ * @param $rev Integer: ID number of revision to read
* @return string or null
*/
function prefetch( $page, $rev ) {
$page = intval( $page );
$rev = intval( $rev );
- while( $this->lastPage < $page && !$this->atEnd ) {
+ while ( $this->lastPage < $page && !$this->atEnd ) {
$this->debug( "BaseDump::prefetch at page $this->lastPage, looking for $page" );
$this->nextPage();
}
- if( $this->lastPage > $page || $this->atEnd ) {
+ if ( $this->lastPage > $page || $this->atEnd ) {
$this->debug( "BaseDump::prefetch already past page $page looking for rev $rev [$this->lastPage, $this->lastRev]" );
return null;
}
- while( $this->lastRev < $rev && !$this->atEnd && !$this->atPageEnd ) {
+ while ( $this->lastRev < $rev && !$this->atEnd && !$this->atPageEnd ) {
$this->debug( "BaseDump::prefetch at page $this->lastPage, rev $this->lastRev, looking for $page, $rev" );
$this->nextRev();
}
- if( $this->lastRev == $rev && !$this->atEnd ) {
+ if ( $this->lastRev == $rev && !$this->atEnd ) {
$this->debug( "BaseDump::prefetch hit on $page, $rev [$this->lastPage, $this->lastRev]" );
return $this->nextText();
} else {
@@ -101,22 +71,27 @@ class BaseDump {
function debug( $str ) {
wfDebug( $str . "\n" );
- //global $dumper;
- //$dumper->progress( $str );
+ // global $dumper;
+ // $dumper->progress( $str );
}
/**
* @access private
*/
function nextPage() {
- if( $this->skipTo( 'page', 'mediawiki' ) ) {
- if( $this->skipTo( 'id' ) ) {
+ if ( $this->skipTo( 'page', 'mediawiki' ) ) {
+ if ( $this->skipTo( 'id' ) ) {
$this->lastPage = intval( $this->nodeContents() );
$this->lastRev = 0;
$this->atPageEnd = false;
}
} else {
- $this->atEnd = true;
+ $this->close();
+ if (count($this->infiles)) {
+ $infile = array_shift($this->infiles);
+ $this->reader->open( $infile );
+ $this->atEnd = false;
+ }
}
}
@@ -124,8 +99,8 @@ class BaseDump {
* @access private
*/
function nextRev() {
- if( $this->skipTo( 'revision' ) ) {
- if( $this->skipTo( 'id' ) ) {
+ if ( $this->skipTo( 'revision' ) ) {
+ if ( $this->skipTo( 'id' ) ) {
$this->lastRev = intval( $this->nodeContents() );
}
} else {
@@ -144,16 +119,16 @@ class BaseDump {
/**
* @access private
*/
- function skipTo( $name, $parent='page' ) {
- if( $this->atEnd ) {
+ function skipTo( $name, $parent = 'page' ) {
+ if ( $this->atEnd ) {
return false;
}
- while( $this->reader->read() ) {
- if( $this->reader->nodeType == XMLREADER_ELEMENT &&
+ while ( $this->reader->read() ) {
+ if ( $this->reader->nodeType == XMLReader::ELEMENT &&
$this->reader->name == $name ) {
return true;
}
- if( $this->reader->nodeType == XMLREADER_END_ELEMENT &&
+ if ( $this->reader->nodeType == XMLReader::END_ELEMENT &&
$this->reader->name == $parent ) {
$this->debug( "BaseDump::skipTo found </$parent> searching for <$name>" );
return false;
@@ -166,25 +141,26 @@ class BaseDump {
* Shouldn't something like this be built-in to XMLReader?
* Fetches text contents of the current element, assuming
* no sub-elements or such scary things.
- * @return string
+ *
+ * @return String
* @access private
*/
function nodeContents() {
- if( $this->atEnd ) {
+ if ( $this->atEnd ) {
return null;
}
- if( $this->reader->isEmptyElement ) {
+ if ( $this->reader->isEmptyElement ) {
return "";
}
$buffer = "";
- while( $this->reader->read() ) {
+ while ( $this->reader->read() ) {
switch( $this->reader->nodeType ) {
- case XMLREADER_TEXT:
-// case XMLREADER_WHITESPACE:
- case XMLREADER_SIGNIFICANT_WHITESPACE:
+ case XMLReader::TEXT:
+// case XMLReader::WHITESPACE:
+ case XMLReader::SIGNIFICANT_WHITESPACE:
$buffer .= $this->reader->value;
break;
- case XMLREADER_END_ELEMENT:
+ case XMLReader::END_ELEMENT:
return $buffer;
}
}
diff --git a/maintenance/benchmarkPurge.php b/maintenance/benchmarkPurge.php
index d167cf92..8360ef85 100644
--- a/maintenance/benchmarkPurge.php
+++ b/maintenance/benchmarkPurge.php
@@ -17,39 +17,40 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class BenchmarkPurge extends Maintenance {
-
+
public function __construct() {
parent::__construct();
$this->addOption( "count", "How many URLs to feed to Squid for purging", false, true );
$this->mDescription = "Benchmark the Squid purge functions.";
}
-
+
public function execute() {
- global $wgUseSquid;
- if( !$wgUseSquid ) {
- $this->error( "Squid purge benchmark doesn't do much without squid support on.". true );
+ global $wgUseSquid, $wgSquidServers;
+ if ( !$wgUseSquid ) {
+ $this->error( "Squid purge benchmark doesn't do much without squid support on.", true );
} else {
$this->output( "There are " . count( $wgSquidServers ) . " defined squid servers:\n" );
- if( $this->hasOption( 'count' ) ) {
- $lengths = array( intval( $this->getOption('count') ) );
+ if ( $this->hasOption( 'count' ) ) {
+ $lengths = array( intval( $this->getOption( 'count' ) ) );
} else {
$lengths = array( 1, 10, 100 );
}
- foreach( $lengths as $length ) {
+ foreach ( $lengths as $length ) {
$urls = $this->randomUrlList( $length );
$trial = $this->benchSquid( $urls );
$this->output( $trial . "\n" );
}
}
}
-
- /**
+
+ /**
* Run a bunch of URLs through SquidUpdate::purge()
* to benchmark Squid response times.
* @param $urls array A bunch of URLs to purge
@@ -57,7 +58,7 @@ class BenchmarkPurge extends Maintenance {
*/
private function benchSquid( $urls, $trials = 1 ) {
$start = wfTime();
- for( $i = 0; $i < $trials; $i++) {
+ for ( $i = 0; $i < $trials; $i++ ) {
SquidUpdate::purge( $urls );
}
$delta = wfTime() - $start;
@@ -66,37 +67,37 @@ class BenchmarkPurge extends Maintenance {
return sprintf( "%4d titles in %6.2fms (%6.2fms each)",
count( $urls ), $pertrial * 1000.0, $pertitle * 1000.0 );
}
-
- /**
+
+ /**
* Get an array of randomUrl()'s.
* @param $length int How many urls to add to the array
*/
private function randomUrlList( $length ) {
$list = array();
- for( $i = 0; $i < $length; $i++ ) {
+ for ( $i = 0; $i < $length; $i++ ) {
$list[] = $this->randomUrl();
}
return $list;
}
-
- /**
+
+ /**
* Return a random URL of the wiki. Not necessarily an actual title in the
- * database, but at least a URL that looks like one.
+ * database, but at least a URL that looks like one.
*/
private function randomUrl() {
global $wgServer, $wgArticlePath;
return $wgServer . str_replace( '$1', $this->randomTitle(), $wgArticlePath );
}
-
- /**
- * Create a random title string (not necessarily a Title object).
+
+ /**
+ * Create a random title string (not necessarily a Title object).
* For use with randomUrl().
*/
private function randomTitle() {
$str = '';
$length = mt_rand( 1, 20 );
- for( $i = 0; $i < $length; $i++ ) {
- $str .= chr( mt_rand( ord('a'), ord('z') ) );
+ for ( $i = 0; $i < $length; $i++ ) {
+ $str .= chr( mt_rand( ord( 'a' ), ord( 'z' ) ) );
}
return ucfirst( $str );
}
diff --git a/maintenance/benchmarks/Benchmarker.php b/maintenance/benchmarks/Benchmarker.php
new file mode 100644
index 00000000..66789ea4
--- /dev/null
+++ b/maintenance/benchmarks/Benchmarker.php
@@ -0,0 +1,72 @@
+<?php
+/**
+ * Create a doxygen subgroup of Maintenance for benchmarks
+ * @defgroup Benchmark Benchmark
+ * @ingroup Maintenance
+ */
+
+/**
+ * TODO: report PHP version, OS ..
+ * @file
+ * @ingroup Benchmark
+ */
+
+require_once( dirname( __FILE__ ) . '/../Maintenance.php' );
+abstract class Benchmarker extends Maintenance {
+ private $results;
+
+ public function __construct() {
+ parent::__construct();
+ $this->addOption( 'count', "How many time to run a benchmark", false, true );
+ }
+
+ public function bench( array $benchs ) {
+ $bench_number = 0;
+ $count = $this->getOption( 'count', 100 );
+
+ foreach( $benchs as $bench ) {
+ // handle empty args
+ if(!array_key_exists( 'args', $bench )) {
+ $bench['args'] = array();
+ }
+
+ $bench_number++;
+ $start = wfTime();
+ for( $i=0; $i<$count; $i++ ) {
+ call_user_func_array( $bench['function'], $bench['args'] );
+ }
+ $delta = wfTime() - $start;
+
+ // function passed as a callback
+ if( is_array( $bench['function'] ) ) {
+ $ret = get_class( $bench['function'][0] ). '->' . $bench['function'][1];
+ $bench['function'] = $ret;
+ }
+
+ $this->results[$bench_number] = array(
+ 'function' => $bench['function'],
+ 'arguments' => $bench['args'],
+ 'count' => $count,
+ 'delta' => $delta,
+ 'average' => $delta / $count,
+ );
+ }
+ }
+
+ public function getFormattedResults( ) {
+ $ret = '';
+ foreach( $this->results as $res ) {
+ // show function with args
+ $ret .= sprintf( "%s times: function %s(%s) :\n",
+ $res['count'],
+ $res['function'],
+ join( ', ', $res['arguments'] )
+ );
+ $ret .= sprintf( " %6.2fms (%6.2fms each)\n",
+ $res['delta'] * 1000,
+ $res['average'] * 1000
+ );
+ }
+ return $ret;
+ }
+}
diff --git a/maintenance/benchmarks/bench_HTTP_HTTPS.php b/maintenance/benchmarks/bench_HTTP_HTTPS.php
new file mode 100644
index 00000000..13d15fce
--- /dev/null
+++ b/maintenance/benchmarks/bench_HTTP_HTTPS.php
@@ -0,0 +1,38 @@
+<?php
+/**
+ * This come from r75429 message
+ * @author Platonides
+ */
+
+require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
+class bench_HTTP_HTTPS extends Benchmarker {
+
+ public function __construct() {
+ parent::__construct();
+ }
+
+ public function execute() {
+ $this->bench( array(
+ array( 'function' => array( $this, 'getHTTP' ) ),
+ array( 'function' => array( $this, 'getHTTPS' ) ),
+ ));
+ print $this->getFormattedResults();
+ }
+
+ static function doRequest( $proto ) {
+ Http::get( "$proto://localhost/" );
+ }
+
+ // bench function 1
+ function getHTTP() {
+ $this->doRequest( 'http' );
+ }
+
+ // bench function 2
+ function getHTTPS() {
+ $this->doRequest( 'https' );
+ }
+}
+
+$maintClass = 'bench_HTTP_HTTPS';
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/benchmarks/bench_wfIsWindows.php b/maintenance/benchmarks/bench_wfIsWindows.php
new file mode 100644
index 00000000..2f759e07
--- /dev/null
+++ b/maintenance/benchmarks/bench_wfIsWindows.php
@@ -0,0 +1,42 @@
+<?php
+/**
+ * This come from r75429 message
+ * @author Platonides
+ */
+
+require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
+class bench_wfIsWindows extends Benchmarker {
+
+ public function __construct() {
+ parent::__construct();
+ }
+
+ public function execute() {
+ $this->bench( array(
+ array( 'function' => array( $this, 'wfIsWindows' ) ),
+ array( 'function' => array( $this, 'wfIsWindowsCached' ) ),
+ ));
+ print $this->getFormattedResults();
+ }
+
+ static function is_win() {
+ return substr( php_uname(), 0, 7 ) == 'Windows' ;
+ }
+
+ // bench function 1
+ function wfIsWindows() {
+ return self::is_win();
+ }
+
+ // bench function 2
+ function wfIsWindowsCached() {
+ static $isWindows = null;
+ if( $isWindows == null ) {
+ $isWindows = self::is_win();
+ }
+ return $isWindows;
+ }
+}
+
+$maintClass = 'bench_wfIsWindows';
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/changePassword.php b/maintenance/changePassword.php
index fbc3fa76..568952b9 100644
--- a/maintenance/changePassword.php
+++ b/maintenance/changePassword.php
@@ -2,6 +2,8 @@
/**
* Change the password of a given user
*
+ * Copyright © 2005, Ævar Arnfjörð Bjarmason
+ *
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
@@ -17,13 +19,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @author Ævar Arnfjörð Bjarmason <avarab@gmail.com>
- * @copyright Copyright © 2005, Ævar Arnfjörð Bjarmason
- * @license http://www.gnu.org/copyleft/gpl.html GNU General Public License 2.0 or later
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class ChangePassword extends Maintenance {
public function __construct() {
@@ -32,21 +33,21 @@ class ChangePassword extends Maintenance {
$this->addOption( "password", "The password to use", true, true );
$this->mDescription = "Change a user's password";
}
-
+
public function execute() {
- $user = User::newFromName( $this->getOption('user') );
- if( !$user->getId() ) {
- $this->error( "No such user: " . $this->getOption('user'), true );
+ $user = User::newFromName( $this->getOption( 'user' ) );
+ if ( !$user->getId() ) {
+ $this->error( "No such user: " . $this->getOption( 'user' ), true );
}
try {
- $user->setPassword( $this->getOption('password') );
+ $user->setPassword( $this->getOption( 'password' ) );
$user->saveSettings();
$this->output( "Password set for " . $user->getName() . "\n" );
- } catch( PasswordError $pwe ) {
+ } catch ( PasswordError $pwe ) {
$this->error( $pwe->getText(), true );
}
}
}
$maintClass = "ChangePassword";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/checkAutoLoader.php b/maintenance/checkAutoLoader.php
index 9c8f29e3..d199b6fe 100644
--- a/maintenance/checkAutoLoader.php
+++ b/maintenance/checkAutoLoader.php
@@ -17,10 +17,11 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class CheckAutoLoader extends Maintenance {
public function __construct() {
@@ -31,8 +32,8 @@ class CheckAutoLoader extends Maintenance {
global $wgAutoloadLocalClasses, $IP;
$files = array_unique( $wgAutoloadLocalClasses );
- foreach( $files as $file ) {
- if( function_exists( 'parsekit_compile_file' ) ){
+ foreach ( $files as $file ) {
+ if ( function_exists( 'parsekit_compile_file' ) ) {
$parseInfo = parsekit_compile_file( "$IP/$file" );
$classes = array_keys( $parseInfo['class_table'] );
} else {
@@ -43,7 +44,7 @@ class CheckAutoLoader extends Maintenance {
}
foreach ( $classes as $class ) {
if ( !isset( $wgAutoloadLocalClasses[$class] ) ) {
- //printf( "%-50s Unlisted, in %s\n", $class, $file );
+ // printf( "%-50s Unlisted, in %s\n", $class, $file );
$this->output( "\t'$class' => '$file',\n" );
} elseif ( $wgAutoloadLocalClasses[$class] !== $file ) {
$this->output( "$class: Wrong file: found in $file, listed in " . $wgAutoloadLocalClasses[$class] . "\n" );
@@ -54,4 +55,4 @@ class CheckAutoLoader extends Maintenance {
}
$maintClass = "CheckAutoLoader";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/checkBadRedirects.php b/maintenance/checkBadRedirects.php
index 32f04f45..52bfa65a 100644
--- a/maintenance/checkBadRedirects.php
+++ b/maintenance/checkBadRedirects.php
@@ -18,10 +18,11 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class CheckBadRedirects extends Maintenance {
public function __construct() {
@@ -34,19 +35,19 @@ class CheckBadRedirects extends Maintenance {
$dbr = wfGetDB( DB_SLAVE );
$result = $dbr->select(
array( 'page' ),
- array( 'page_namespace','page_title', 'page_latest' ),
+ array( 'page_namespace', 'page_title', 'page_latest' ),
array( 'page_is_redirect' => 1 ) );
-
+
$count = $result->numRows();
$this->output( "Found $count total redirects.\n" .
"Looking for bad redirects:\n\n" );
-
- foreach( $result as $row ) {
+
+ foreach ( $result as $row ) {
$title = Title::makeTitle( $row->page_namespace, $row->page_title );
$rev = Revision::newFromId( $row->page_latest );
- if( $rev ) {
+ if ( $rev ) {
$target = Title::newFromRedirect( $rev->getText() );
- if( !$target ) {
+ if ( !$target ) {
$this->output( $title->getPrefixedText() . "\n" );
}
}
@@ -56,4 +57,4 @@ class CheckBadRedirects extends Maintenance {
}
$maintClass = "CheckBadRedirects";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/checkImages.php b/maintenance/checkImages.php
index 5dcaac28..96b93f22 100644
--- a/maintenance/checkImages.php
+++ b/maintenance/checkImages.php
@@ -17,9 +17,10 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class CheckImages extends Maintenance {
@@ -28,16 +29,16 @@ class CheckImages extends Maintenance {
$this->mDescription = "Check images to see if they exist, are readable, etc";
$this->setBatchSize( 1000 );
}
-
+
public function execute() {
$start = '';
$dbr = wfGetDB( DB_SLAVE );
$numImages = 0;
$numGood = 0;
-
+
do {
- $res = $dbr->select( 'image', '*', array( 'img_name > ' . $dbr->addQuotes( $start ) ),
+ $res = $dbr->select( 'image', '*', array( 'img_name > ' . $dbr->addQuotes( $start ) ),
__METHOD__, array( 'LIMIT' => $this->mBatchSize ) );
foreach ( $res as $row ) {
$numImages++;
@@ -53,30 +54,30 @@ class CheckImages extends Maintenance {
$this->output( "{$row->img_name}: missing\n" );
continue;
}
-
+
if ( $stat['mode'] & 040000 ) {
$this->output( "{$row->img_name}: is a directory\n" );
continue;
}
-
+
if ( $stat['size'] == 0 && $row->img_size != 0 ) {
$this->output( "{$row->img_name}: truncated, was {$row->img_size}\n" );
continue;
}
-
+
if ( $stat['size'] != $row->img_size ) {
$this->output( "{$row->img_name}: size mismatch DB={$row->img_size}, actual={$stat['size']}\n" );
continue;
}
-
+
$numGood++;
}
-
+
} while ( $res->numRows() );
-
+
$this->output( "Good images: $numGood/$numImages\n" );
}
}
$maintClass = "CheckImages";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/checkSyntax.php b/maintenance/checkSyntax.php
index 22832dce..396cac5f 100644
--- a/maintenance/checkSyntax.php
+++ b/maintenance/checkSyntax.php
@@ -17,9 +17,10 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-
+
require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class CheckSyntax extends Maintenance {
@@ -33,8 +34,8 @@ class CheckSyntax extends Maintenance {
$this->mDescription = "Check syntax for all PHP files in MediaWiki";
$this->addOption( 'with-extensions', 'Also recurse the extensions folder' );
$this->addOption( 'path', 'Specific path (file or directory) to check, either with absolute path or relative to the root of this MediaWiki installation',
- false, true);
- $this->addOption( 'list-file', 'Text file containing list of files or directories to check', false, true);
+ false, true );
+ $this->addOption( 'list-file', 'Text file containing list of files or directories to check', false, true );
$this->addOption( 'modified', 'Check only files that were modified (requires SVN command-line client)' );
$this->addOption( 'syntax-only', 'Check for syntax validity only, skip code style warnings' );
}
@@ -49,16 +50,16 @@ class CheckSyntax extends Maintenance {
// ParseKit is broken on PHP 5.3+, disabled until this is fixed
$useParseKit = function_exists( 'parsekit_compile_file' ) && version_compare( PHP_VERSION, '5.3', '<' );
- $str = 'Checking syntax (using ' . ( $useParseKit ?
- 'parsekit)' : ' php -l, this can take a long time)' );
+ $str = 'Checking syntax (using ' . ( $useParseKit ?
+ 'parsekit' : ' php -l, this can take a long time' ) . ")\n";
$this->output( $str );
- foreach( $this->mFiles as $f ) {
- if( $useParseKit ) {
+ foreach ( $this->mFiles as $f ) {
+ if ( $useParseKit ) {
$this->checkFileWithParsekit( $f );
} else {
$this->checkFileWithCli( $f );
}
- if( !$this->hasOption( 'syntax-only' ) ) {
+ if ( !$this->hasOption( 'syntax-only' ) ) {
$this->checkForMistakes( $f );
}
}
@@ -76,18 +77,17 @@ class CheckSyntax extends Maintenance {
$this->mIgnorePaths = array(
// Compat stuff, explodes on PHP 5.3
"includes/NamespaceCompat.php$",
- "DiscussionThreading/REV",
);
-
+
$this->mNoStyleCheckPaths = array(
// Third-party code we don't care about
"/activemq_stomp/",
- "EmailPage/phpMailer",
+ "EmailPage/PHPMailer",
"FCKeditor/fckeditor/",
'\bphplot-',
"/svggraph/",
"\bjsmin.php$",
- "OggHandler/PEAR/",
+ "PEAR/File_Ogg/",
"QPoll/Excel/",
"/geshi/",
"/smarty/",
@@ -105,7 +105,8 @@ class CheckSyntax extends Maintenance {
if ( !$f ) {
$this->error( "Can't open file $file\n", true );
}
- while( $path = trim( fgets( $f ) ) ) {
+ $path = trim( fgets( $f ) );
+ while ( $path ) {
$this->addPath( $path );
}
fclose( $f );
@@ -113,6 +114,7 @@ class CheckSyntax extends Maintenance {
} elseif ( $this->hasOption( 'modified' ) ) {
$this->output( "Retrieving list from Subversion... " );
$parentDir = wfEscapeShellArg( dirname( __FILE__ ) . '/..' );
+ $retval = null;
$output = wfShellExec( "svn status --ignore-externals $parentDir", $retval );
if ( $retval ) {
$this->error( "Error retrieving list from Subversion!\n", true );
@@ -122,7 +124,7 @@ class CheckSyntax extends Maintenance {
preg_match_all( '/^\s*[AM].{7}(.*?)\r?$/m', $output, $matches );
foreach ( $matches[1] as $file ) {
- if ( self::isSuitableFile( $file ) && !is_dir( $file ) ) {
+ if ( $this->isSuitableFile( $file ) && !is_dir( $file ) ) {
$this->mFiles[] = $file;
}
}
@@ -131,20 +133,20 @@ class CheckSyntax extends Maintenance {
$this->output( 'Building file list...', 'listfiles' );
- // Only check files in these directories.
+ // Only check files in these directories.
// Don't just put $IP, because the recursive dir thingie goes into all subdirs
- $dirs = array(
+ $dirs = array(
$IP . '/includes',
$IP . '/config',
$IP . '/languages',
$IP . '/maintenance',
$IP . '/skins',
);
- if( $this->hasOption( 'with-extensions' ) ) {
+ if ( $this->hasOption( 'with-extensions' ) ) {
$dirs[] = $IP . '/extensions';
}
- foreach( $dirs as $d ) {
+ foreach ( $dirs as $d ) {
$this->addDirectoryContent( $d );
}
@@ -158,15 +160,16 @@ class CheckSyntax extends Maintenance {
$this->output( 'done.', 'listfiles' );
}
-
+
/**
* Returns true if $file is of a type we can check
*/
private function isSuitableFile( $file ) {
+ $file = str_replace( '\\', '/', $file );
$ext = pathinfo( $file, PATHINFO_EXTENSION );
if ( $ext != 'php' && $ext != 'inc' && $ext != 'php5' )
return false;
- foreach( $this->mIgnorePaths as $regex ) {
+ foreach ( $this->mIgnorePaths as $regex ) {
$m = array();
if ( preg_match( "~{$regex}~", $file, $m ) )
return false;
@@ -203,7 +206,7 @@ class CheckSyntax extends Maintenance {
*/
private function addDirectoryContent( $dir ) {
$iterator = new RecursiveIteratorIterator(
- new RecursiveDirectoryIterator( $dir ),
+ new RecursiveDirectoryIterator( $dir ),
RecursiveIteratorIterator::SELF_FIRST
);
foreach ( $iterator as $file ) {
@@ -248,8 +251,8 @@ class CheckSyntax extends Maintenance {
* @return boolean
*/
private function checkFileWithCli( $file ) {
- $res = exec( 'php -l ' . wfEscapeShellArg( $file ) );
- if( strpos( $res, 'No syntax errors detected' ) === false ) {
+ $res = exec( 'php -l ' . wfEscapeShellArg( $file ) );
+ if ( strpos( $res, 'No syntax errors detected' ) === false ) {
$this->mFailures[$file] = $res;
$this->output( $res . "\n" );
return false;
@@ -265,7 +268,7 @@ class CheckSyntax extends Maintenance {
* @return boolean
*/
private function checkForMistakes( $file ) {
- foreach( $this->mNoStyleCheckPaths as $regex ) {
+ foreach ( $this->mNoStyleCheckPaths as $regex ) {
$m = array();
if ( preg_match( "~{$regex}~", $file, $m ) )
return;
@@ -292,5 +295,5 @@ class CheckSyntax extends Maintenance {
}
$maintClass = "CheckSyntax";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/checkUsernames.php b/maintenance/checkUsernames.php
index 85a3d157..9b98721d 100644
--- a/maintenance/checkUsernames.php
+++ b/maintenance/checkUsernames.php
@@ -19,11 +19,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class CheckUsernames extends Maintenance {
@@ -44,11 +45,11 @@ class CheckUsernames extends Maintenance {
foreach ( $res as $row ) {
if ( ! User::isValidUserName( $row->user_name ) ) {
$this->error( sprintf( "%s: %6d: '%s'\n", wfWikiID(), $row->user_id, $row->user_name ) );
- wfDebugLog( 'checkUsernames', $out );
+ wfDebugLog( 'checkUsernames', $row->user_name );
}
}
}
}
$maintClass = "CheckUsernames";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/cleanupCaps.php b/maintenance/cleanupCaps.php
index 6a48ea83..2d945a52 100644
--- a/maintenance/cleanupCaps.php
+++ b/maintenance/cleanupCaps.php
@@ -1,12 +1,12 @@
<?php
-/*
+/**
* Script to clean up broken page links when somebody turns on $wgCapitalLinks.
*
* Usage: php cleanupCaps.php [--dry-run]
* Options:
* --dry-run don't actually try moving them
*
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
@@ -24,11 +24,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @author Brion Vibber <brion at pobox.com>
- * @ingroup maintenance
+ * @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/cleanupTable.inc' );
+require_once( dirname( __FILE__ ) . '/cleanupTable.inc' );
class CapsCleanup extends TableCleanup {
public function __construct() {
@@ -42,7 +43,7 @@ class CapsCleanup extends TableCleanup {
$this->namespace = intval( $this->getOption( 'namespace', 0 ) );
$this->dryrun = $this->hasOption( 'dry-run' );
$wgUser->setName( 'Conversion script' );
- if( $wgCapitalLinks )
+ if ( $wgCapitalLinks )
$this->error( "\$wgCapitalLinks is on -- no need for caps links cleanup.", true );
$this->runTable( array(
@@ -59,31 +60,31 @@ class CapsCleanup extends TableCleanup {
$display = $current->getPrefixedText();
$upper = $row->page_title;
$lower = $wgContLang->lcfirst( $row->page_title );
- if( $upper == $lower ) {
+ if ( $upper == $lower ) {
$this->output( "\"$display\" already lowercase.\n" );
return $this->progress( 0 );
}
$target = Title::makeTitle( $row->page_namespace, $lower );
$targetDisplay = $target->getPrefixedText();
- if( $target->exists() ) {
+ if ( $target->exists() ) {
$this->output( "\"$display\" skipped; \"$targetDisplay\" already exists\n" );
return $this->progress( 0 );
}
- if( $this->dryrun ) {
+ if ( $this->dryrun ) {
$this->output( "\"$display\" -> \"$targetDisplay\": DRY RUN, NOT MOVED\n" );
$ok = true;
} else {
$ok = $current->moveTo( $target, false, 'Converting page titles to lowercase' );
$this->output( "\"$display\" -> \"$targetDisplay\": $ok\n" );
}
- if( $ok === true ) {
+ if ( $ok === true ) {
$this->progress( 1 );
- if( $row->page_namespace == $this->namespace ) {
+ if ( $row->page_namespace == $this->namespace ) {
$talk = $target->getTalkPage();
$row->page_namespace = $talk->getNamespace();
- if( $talk->exists() ) {
+ if ( $talk->exists() ) {
return $this->processRow( $row );
}
}
@@ -94,4 +95,4 @@ class CapsCleanup extends TableCleanup {
}
$maintClass = "CapsCleanup";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/cleanupDupes.inc b/maintenance/cleanupDupes.inc
deleted file mode 100644
index bb408007..00000000
--- a/maintenance/cleanupDupes.inc
+++ /dev/null
@@ -1,128 +0,0 @@
-<?php
-# Copyright (C) 2004 Brion Vibber <brion@pobox.com>
-# http://www.mediawiki.org/
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
-# http://www.gnu.org/copyleft/gpl.html
-
-/**
- * If on the old non-unique indexes, check the cur table for duplicate
- * entries and remove them...
- *
- * @file
- * @ingroup Maintenance
- */
-
-function fixDupes( $fixthem = false) {
- $dbw = wfGetDB( DB_MASTER );
- $cur = $dbw->tableName( 'cur' );
- $old = $dbw->tableName( 'old' );
- $dbw->query( "LOCK TABLES $cur WRITE, $old WRITE" );
- echo "Checking for duplicate cur table entries... (this may take a while on a large wiki)\n";
- $res = $dbw->query( <<<END
-SELECT cur_namespace,cur_title,count(*) as c,min(cur_id) as id
- FROM $cur
- GROUP BY cur_namespace,cur_title
-HAVING c > 1
-END
- );
- $n = $dbw->numRows( $res );
- echo "Found $n titles with duplicate entries.\n";
- if( $n > 0 ) {
- if( $fixthem ) {
- echo "Correcting...\n";
- } else {
- echo "Just a demo...\n";
- }
- while( $row = $dbw->fetchObject( $res ) ) {
- $ns = intval( $row->cur_namespace );
- $title = $dbw->addQuotes( $row->cur_title );
-
- # Get the first responding ID; that'll be the one we keep.
- $id = $dbw->selectField( 'cur', 'cur_id', array(
- 'cur_namespace' => $row->cur_namespace,
- 'cur_title' => $row->cur_title ) );
-
- echo "$ns:$row->cur_title (canonical ID $id)\n";
- if( $id != $row->id ) {
- echo " ** minimum ID $row->id; ";
- $timeMin = $dbw->selectField( 'cur', 'cur_timestamp', array(
- 'cur_id' => $row->id ) );
- $timeFirst = $dbw->selectField( 'cur', 'cur_timestamp', array(
- 'cur_id' => $id ) );
- if( $timeMin == $timeFirst ) {
- echo "timestamps match at $timeFirst; ok\n";
- } else {
- echo "timestamps don't match! min: $timeMin, first: $timeFirst; ";
- if( $timeMin > $timeFirst ) {
- $id = $row->id;
- echo "keeping minimum: $id\n";
- } else {
- echo "keeping first: $id\n";
- }
- }
- }
-
- if( $fixthem ) {
- $dbw->query( <<<END
-INSERT
- INTO $old
- (old_namespace, old_title, old_text,
- old_comment, old_user, old_user_text,
- old_timestamp, old_minor_edit, old_flags,
- inverse_timestamp)
-SELECT cur_namespace, cur_title, cur_text,
- cur_comment, cur_user, cur_user_text,
- cur_timestamp, cur_minor_edit, '',
- inverse_timestamp
- FROM $cur
- WHERE cur_namespace=$ns
- AND cur_title=$title
- AND cur_id != $id
-END
- );
- $dbw->query( <<<END
-DELETE
- FROM $cur
- WHERE cur_namespace=$ns
- AND cur_title=$title
- AND cur_id != $id
-END
- );
- }
- }
- }
- $dbw->query( 'UNLOCK TABLES' );
- if( $fixthem ) {
- echo "Done.\n";
- } else {
- echo "Run again with --fix option to delete the duplicates.\n";
- }
-}
-
-function checkDupes( $fixthem = false, $indexonly = false ) {
- $dbw = wfGetDB( DB_MASTER );
- if( $dbw->indexExists( 'cur', 'name_title' ) &&
- $dbw->indexUnique( 'cur', 'name_title' ) ) {
- echo wfWikiID().": cur table has the current unique index; no duplicate entries.\n";
- } elseif( $dbw->indexExists( 'cur', 'name_title_dup_prevention' ) ) {
- echo wfWikiID().": cur table has a temporary name_title_dup_prevention unique index; no duplicate entries.\n";
- } else {
- echo wfWikiID().": cur table has the old non-unique index and may have duplicate entries.\n";
- if( !$indexonly ) {
- fixDupes( $fixthem );
- }
- }
-}
diff --git a/maintenance/cleanupImages.php b/maintenance/cleanupImages.php
index db13f4c9..b25b9bbe 100644
--- a/maintenance/cleanupImages.php
+++ b/maintenance/cleanupImages.php
@@ -1,5 +1,5 @@
<?php
-/*
+/**
* Script to clean up broken, unparseable upload filenames.
*
* Usage: php cleanupImages.php [--fix]
@@ -24,11 +24,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @author Brion Vibber <brion at pobox.com>
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/cleanupTable.inc' );
+require_once( dirname( __FILE__ ) . '/cleanupTable.inc' );
class ImageCleanup extends TableCleanup {
protected $defaultParams = array(
@@ -47,38 +48,38 @@ class ImageCleanup extends TableCleanup {
global $wgContLang;
$source = $row->img_name;
- if( $source == '' ) {
+ if ( $source == '' ) {
// Ye olde empty rows. Just kill them.
$this->killRow( $source );
return $this->progress( 1 );
}
-
+
$cleaned = $source;
-
+
// About half of old bad image names have percent-codes
$cleaned = rawurldecode( $cleaned );
// We also have some HTML entities there
$cleaned = Sanitizer::decodeCharReferences( $cleaned );
-
+
// Some are old latin-1
$cleaned = $wgContLang->checkTitleEncoding( $cleaned );
-
+
// Many of remainder look like non-normalized unicode
$cleaned = $wgContLang->normalize( $cleaned );
-
+
$title = Title::makeTitleSafe( NS_FILE, $cleaned );
-
- if( is_null( $title ) ) {
+
+ if ( is_null( $title ) ) {
$this->output( "page $source ($cleaned) is illegal.\n" );
$safe = $this->buildSafeTitle( $cleaned );
- if( $safe === false )
+ if ( $safe === false )
return $this->progress( 0 );
$this->pokeFile( $source, $safe );
return $this->progress( 1 );
}
- if( $title->getDBkey() !== $source ) {
+ if ( $title->getDBkey() !== $source ) {
$munged = $title->getDBkey();
$this->output( "page $source ($munged) doesn't match self.\n" );
$this->pokeFile( $source, $munged );
@@ -89,7 +90,7 @@ class ImageCleanup extends TableCleanup {
}
private function killRow( $name ) {
- if( $this->dryrun ) {
+ if ( $this->dryrun ) {
$this->output( "DRY RUN: would delete bogus row '$name'\n" );
} else {
$this->output( "deleting bogus row '$name'\n" );
@@ -99,7 +100,7 @@ class ImageCleanup extends TableCleanup {
__METHOD__ );
}
}
-
+
private function filePath( $name ) {
if ( !isset( $this->repo ) ) {
$this->repo = RepoGroup::singleton()->getLocalRepo();
@@ -114,14 +115,14 @@ class ImageCleanup extends TableCleanup {
private function pageExists( $name, $db ) {
return $db->selectField( 'page', '1', array( 'page_namespace' => NS_FILE, 'page_title' => $name ), __METHOD__ );
}
-
+
private function pokeFile( $orig, $new ) {
$path = $this->filePath( $orig );
- if( !file_exists( $path ) ) {
+ if ( !file_exists( $path ) ) {
$this->output( "missing file: $path\n" );
return $this->killRow( $orig );
}
-
+
$db = wfGetDB( DB_MASTER );
/*
@@ -134,18 +135,18 @@ class ImageCleanup extends TableCleanup {
$version = 0;
$final = $new;
$conflict = ( $this->imageExists( $final, $db ) ||
- ( $this->pageExists( $orig, $db ) && $this->pageExists( $final, $db ) ) );
-
- while( $conflict ) {
+ ( $this->pageExists( $orig, $db ) && $this->pageExists( $final, $db ) ) );
+
+ while ( $conflict ) {
$this->output( "Rename conflicts with '$final'...\n" );
$version++;
$final = $this->appendTitle( $new, "_$version" );
$conflict = ( $this->imageExists( $final, $db ) || $this->pageExists( $final, $db ) );
}
-
+
$finalPath = $this->filePath( $final );
-
- if( $this->dryrun ) {
+
+ if ( $this->dryrun ) {
$this->output( "DRY RUN: would rename $path to $finalPath\n" );
} else {
$this->output( "renaming $path to $finalPath\n" );
@@ -164,14 +165,14 @@ class ImageCleanup extends TableCleanup {
array( 'page_title' => $orig, 'page_namespace' => NS_FILE ),
__METHOD__ );
$dir = dirname( $finalPath );
- if( !file_exists( $dir ) ) {
- if( !wfMkdirParents( $dir ) ) {
+ if ( !file_exists( $dir ) ) {
+ if ( !wfMkdirParents( $dir ) ) {
$this->log( "RENAME FAILED, COULD NOT CREATE $dir" );
$db->rollback();
return;
}
}
- if( rename( $path, $finalPath ) ) {
+ if ( rename( $path, $finalPath ) ) {
$db->commit();
} else {
$this->error( "RENAME FAILED" );
@@ -191,16 +192,16 @@ class ImageCleanup extends TableCleanup {
"/([^$wgLegalTitleChars]|~)/",
array( $this, 'hexChar' ),
$name );
-
+
$test = Title::makeTitleSafe( NS_FILE, $x );
- if( is_null( $test ) || $test->getDBkey() !== $x ) {
+ if ( is_null( $test ) || $test->getDBkey() !== $x ) {
$this->error( "Unable to generate safe title from '$name', got '$x'" );
return false;
}
-
+
return $x;
}
}
$maintClass = "ImageCleanup";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/cleanupRemovedModules.php b/maintenance/cleanupRemovedModules.php
new file mode 100644
index 00000000..fb8afd2d
--- /dev/null
+++ b/maintenance/cleanupRemovedModules.php
@@ -0,0 +1,89 @@
+<?php
+/**
+ * Maintenance script to remove cache entries for removed ResourceLoader modules
+ * from the database
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @file
+ * @ingroup Maintenance
+ * @author Roan Kattouw
+ */
+
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
+
+class CleanupRemovedModules extends Maintenance {
+
+ public function __construct() {
+ parent::__construct();
+ $this->mDescription = 'Remove cache entries for removed ResourceLoader modules from the database';
+ $this->addOption( 'batchsize', 'Delete rows in batches of this size. Default: 500', false, true );
+ $this->addOption( 'max-slave-lag', 'If the slave lag exceeds this many seconds, wait until it drops below this value. Default: 5', false, true );
+ }
+
+ public function execute() {
+ $dbw = wfGetDB( DB_MASTER );
+ $rl = new ResourceLoader();
+ $moduleNames = $rl->getModuleNames();
+ $moduleList = implode( ', ', array_map( array( $dbw, 'addQuotes' ), $moduleNames ) );
+ $limit = max( 1, intval( $this->getOption( 'batchsize', 500 ) ) );
+ $maxlag = intval( $this->getOption( 'max-slave-lag', 5 ) );
+
+ $this->output( "Cleaning up module_deps table...\n" );
+ $i = 1;
+ $modDeps = $dbw->tableName( 'module_deps' );
+ do {
+ // $dbw->delete() doesn't support LIMIT :(
+ $where = $moduleList ? "md_module NOT IN ($moduleList)" : '1=1';
+ $dbw->query( "DELETE FROM $modDeps WHERE $where LIMIT $limit", __METHOD__ );
+ $numRows = $dbw->affectedRows();
+ $this->output( "Batch $i: $numRows rows\n" );
+ $i++;
+ wfWaitForSlaves( $maxlag );
+ } while( $numRows > 0 );
+ $this->output( "done\n" );
+
+ $this->output( "Cleaning up msg_resource table...\n" );
+ $i = 1;
+
+ $mrRes = $dbw->tableName( 'msg_resource' );
+ do {
+ $where = $moduleList ? "mr_resource NOT IN ($moduleList)" : '1=1';
+ $dbw->query( "DELETE FROM $mrRes WHERE $where LIMIT $limit", __METHOD__ );
+ $numRows = $dbw->affectedRows();
+ $this->output( "Batch $i: $numRows rows\n" );
+ $i++;
+ wfWaitForSlaves( $maxlag );
+ } while( $numRows > 0 );
+ $this->output( "done\n" );
+
+ $this->output( "Cleaning up msg_resource_links table...\n" );
+ $i = 1;
+ $msgResLinks = $dbw->tableName( 'msg_resource_links' );
+ do {
+ $where = $moduleList ? "mrl_resource NOT IN ($moduleList)" : '1=1';
+ $dbw->query( "DELETE FROM $msgResLinks WHERE $where LIMIT $limit", __METHOD__ );
+ $numRows = $dbw->affectedRows();
+ $this->output( "Batch $i: $numRows rows\n" );
+ $i++;
+ wfWaitForSlaves( $maxlag );
+ } while( $numRows > 0 );
+ $this->output( "done\n" );
+ }
+}
+
+$maintClass = "CleanupRemovedModules";
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/cleanupSpam.php b/maintenance/cleanupSpam.php
index e78ffe41..39abe4c5 100644
--- a/maintenance/cleanupSpam.php
+++ b/maintenance/cleanupSpam.php
@@ -17,10 +17,11 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class CleanupSpam extends Maintenance {
public function __construct() {
@@ -31,7 +32,7 @@ class CleanupSpam extends Maintenance {
}
public function execute() {
- global $wgLocalDatabases;
+ global $wgLocalDatabases, $wgUser;
$username = wfMsg( 'spambot_username' );
$wgUser = User::newFromName( $username );
@@ -44,15 +45,15 @@ class CleanupSpam extends Maintenance {
if ( !$like ) {
$this->error( "Not a valid hostname specification: $spec", true );
}
-
- if ( $this->hasOption('all') ) {
+
+ if ( $this->hasOption( 'all' ) ) {
// Clean up spam on all wikis
$this->output( "Finding spam on " . count( $wgLocalDatabases ) . " wikis\n" );
$found = false;
foreach ( $wgLocalDatabases as $wikiID ) {
$dbr = wfGetDB( DB_SLAVE, array(), $wikiID );
- $count = $dbr->selectField( 'externallinks', 'COUNT(*)',
+ $count = $dbr->selectField( 'externallinks', 'COUNT(*)',
array( 'el_index' . $dbr->buildLike( $like ) ), __METHOD__ );
if ( $count ) {
$found = true;
@@ -68,7 +69,7 @@ class CleanupSpam extends Maintenance {
// Clean up spam on this wiki
$dbr = wfGetDB( DB_SLAVE );
- $res = $dbr->select( 'externallinks', array( 'DISTINCT el_from' ),
+ $res = $dbr->select( 'externallinks', array( 'DISTINCT el_from' ),
array( 'el_index' . $dbr->buildLike( $like ) ), __METHOD__ );
$count = $dbr->numRows( $res );
$this->output( "Found $count articles containing $spec\n" );
@@ -87,15 +88,15 @@ class CleanupSpam extends Maintenance {
$this->error( "Internal error: no page for ID $id" );
return;
}
-
+
$this->output( $title->getPrefixedDBkey() . " ..." );
$rev = Revision::newFromTitle( $title );
$revId = $rev->getId();
$currentRevId = $revId;
-
+
while ( $rev && LinkFilter::matchEntry( $rev->getText() , $domain ) ) {
# Revision::getPrevious can't be used in this way before MW 1.6 (Revision.php 1.26)
- #$rev = $rev->getPrevious();
+ # $rev = $rev->getPrevious();
$revId = $title->getPreviousRevisionID( $revId );
if ( $revId ) {
$rev = Revision::newFromTitle( $title, $revId );
@@ -114,14 +115,12 @@ class CleanupSpam extends Maintenance {
// Didn't find a non-spammy revision, blank the page
$this->output( "blanking\n" );
$article = new Article( $title );
- $article->updateArticle( '', wfMsg( 'spam_blanking', $domain ),
- false, false );
-
+ $article->doEdit( '', wfMsg( 'spam_blanking', $domain ) );
} else {
// Revert to this revision
$this->output( "reverting\n" );
$article = new Article( $title );
- $article->updateArticle( $rev->getText(), wfMsg( 'spam_reverting', $domain ), false, false );
+ $article->doEdit( $rev->getText(), wfMsg( 'spam_reverting', $domain ), EDIT_UPDATE );
}
$dbw->commit();
wfDoUpdates();
@@ -130,4 +129,4 @@ class CleanupSpam extends Maintenance {
}
$maintClass = "CleanupSpam";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/cleanupTable.inc b/maintenance/cleanupTable.inc
index 3549a9a1..67a32510 100644
--- a/maintenance/cleanupTable.inc
+++ b/maintenance/cleanupTable.inc
@@ -17,10 +17,11 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class TableCleanup extends Maintenance {
protected $defaultParams = array(
@@ -44,7 +45,7 @@ class TableCleanup extends Maintenance {
global $wgUser;
$wgUser->setName( 'Conversion script' );
$this->dryrun = $this->hasOption( 'dry-run' );
- if( $this->dryrun ) {
+ if ( $this->dryrun ) {
$this->output( "Checking for bad titles...\n" );
} else {
$this->output( "Checking and fixing bad titles...\n" );
@@ -63,7 +64,7 @@ class TableCleanup extends Maintenance {
protected function progress( $updated ) {
$this->updated += $updated;
$this->processed++;
- if( $this->processed % $this->reportInterval != 0 ) {
+ if ( $this->processed % $this->reportInterval != 0 ) {
return;
}
$portion = $this->processed / $this->count;
@@ -74,7 +75,7 @@ class TableCleanup extends Maintenance {
$estimatedTotalTime = $delta / $portion;
$eta = $this->startTime + $estimatedTotalTime;
- $this->output(
+ $this->output(
sprintf( "%s %s: %6.2f%% done on %s; ETA %s [%d/%d] %.2f/sec <%.2f%% updated>\n",
wfWikiID(),
wfTimestamp( TS_DB, intval( $now ) ),
@@ -84,7 +85,7 @@ class TableCleanup extends Maintenance {
$this->processed,
$this->count,
$this->processed / $delta,
- $updateRate * 100.0
+ $updateRate * 100.0
)
);
flush();
@@ -94,9 +95,9 @@ class TableCleanup extends Maintenance {
$dbr = wfGetDB( DB_SLAVE );
if ( array_diff( array_keys( $params ),
- array( 'table', 'conds', 'index', 'callback' ) ) )
+ array( 'table', 'conds', 'index', 'callback' ) ) )
{
- throw new MWException( __METHOD__.': Missing parameter ' . implode( ', ', $params ) );
+ throw new MWException( __METHOD__ . ': Missing parameter ' . implode( ', ', $params ) );
}
$table = $params['table'];
diff --git a/maintenance/cleanupTitles.php b/maintenance/cleanupTitles.php
index ed714b2d..f03b7957 100644
--- a/maintenance/cleanupTitles.php
+++ b/maintenance/cleanupTitles.php
@@ -1,12 +1,12 @@
<?php
-/*
+/**
* Script to clean up broken, unparseable titles.
*
* Usage: php cleanupTitles.php [--fix]
* Options:
* --fix Actually clean up titles; otherwise just checks for them
*
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
@@ -24,11 +24,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @author Brion Vibber <brion at pobox.com>
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/cleanupTable.inc' );
+require_once( dirname( __FILE__ ) . '/cleanupTable.inc' );
class TitleCleanup extends TableCleanup {
public function __construct() {
@@ -42,7 +43,7 @@ class TitleCleanup extends TableCleanup {
$verified = $wgContLang->normalize( $display );
$title = Title::newFromText( $verified );
- if( !is_null( $title )
+ if ( !is_null( $title )
&& $title->canExist()
&& $title->getNamespace() == $row->page_namespace
&& $title->getDBkey() === $row->page_title )
@@ -50,10 +51,10 @@ class TitleCleanup extends TableCleanup {
return $this->progress( 0 ); // all is fine
}
- if( $row->page_namespace == NS_FILE && $this->fileExists( $row->page_title ) ) {
+ if ( $row->page_namespace == NS_FILE && $this->fileExists( $row->page_title ) ) {
$this->output( "file $row->page_title needs cleanup, please run cleanupImages.php.\n" );
return $this->progress( 0 );
- } elseif( is_null( $title ) ) {
+ } elseif ( is_null( $title ) ) {
$this->output( "page $row->page_id ($display) is illegal.\n" );
$this->moveIllegalPage( $row );
return $this->progress( 1 );
@@ -77,23 +78,23 @@ class TitleCleanup extends TableCleanup {
$legalized = preg_replace_callback( "!([^$legal])!",
array( &$this, 'hexChar' ),
$row->page_title );
- if( $legalized == '.' ) $legalized = '(dot)';
- if( $legalized == '_' ) $legalized = '(space)';
+ if ( $legalized == '.' ) $legalized = '(dot)';
+ if ( $legalized == '_' ) $legalized = '(space)';
$legalized = 'Broken/' . $legalized;
$title = Title::newFromText( $legalized );
- if( is_null( $title ) ) {
+ if ( is_null( $title ) ) {
$clean = 'Broken/id:' . $row->page_id;
$this->output( "Couldn't legalize; form '$legalized' still invalid; using '$clean'\n" );
$title = Title::newFromText( $clean );
- } elseif( $title->exists() ) {
+ } elseif ( $title->exists() ) {
$clean = 'Broken/id:' . $row->page_id;
$this->output( "Legalized for '$legalized' exists; using '$clean'\n" );
$title = Title::newFromText( $clean );
}
$dest = $title->getDBkey();
- if( $this->dryrun ) {
+ if ( $this->dryrun ) {
$this->output( "DRY RUN: would rename $row->page_id ($row->page_namespace,'$row->page_title') to ($row->page_namespace,'$dest')\n" );
} else {
$this->output( "renaming $row->page_id ($row->page_namespace,'$row->page_title') to ($row->page_namespace,'$dest')\n" );
@@ -106,28 +107,34 @@ class TitleCleanup extends TableCleanup {
}
protected function moveInconsistentPage( $row, $title ) {
- if( $title->exists() || $title->getInterwiki() ) {
- if( $title->getInterwiki() ) {
+ if ( $title->exists() || $title->getInterwiki() || !$title->canExist() ) {
+ if ( $title->getInterwiki() || !$title->canExist() ) {
$prior = $title->getPrefixedDbKey();
} else {
$prior = $title->getDBkey();
}
+
+ # Old cleanupTitles could move articles there. See bug 23147.
+ $ns = $row->page_namespace;
+ if ( $ns < 0 ) $ns = 0;
+
$clean = 'Broken/' . $prior;
- $verified = Title::makeTitleSafe( $row->page_namespace, $clean );
- if( $verified->exists() ) {
+ $verified = Title::makeTitleSafe( $ns, $clean );
+ if ( $verified->exists() ) {
$blah = "Broken/id:" . $row->page_id;
$this->output( "Couldn't legalize; form '$clean' exists; using '$blah'\n" );
- $verified = Title::makeTitleSafe( $row->page_namespace, $blah );
+ $verified = Title::makeTitleSafe( $ns, $blah );
}
$title = $verified;
}
- if( is_null( $title ) ) {
+ if ( is_null( $title ) ) {
$this->error( "Something awry; empty title.", true );
}
$ns = $title->getNamespace();
$dest = $title->getDBkey();
- if( $this->dryrun ) {
- $this->output( "DRY RUN: would rename $row->page_id ($row->page_namespace,'$row->page_title') to ($row->page_namespace,'$dest')\n" );
+
+ if ( $this->dryrun ) {
+ $this->output( "DRY RUN: would rename $row->page_id ($row->page_namespace,'$row->page_title') to ($ns,'$dest')\n" );
} else {
$this->output( "renaming $row->page_id ($row->page_namespace,'$row->page_title') to ($ns,'$dest')\n" );
$dbw = wfGetDB( DB_MASTER );
@@ -145,4 +152,4 @@ class TitleCleanup extends TableCleanup {
}
$maintClass = "TitleCleanup";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/cleanupWatchlist.php b/maintenance/cleanupWatchlist.php
index ed84b268..a9b20fea 100644
--- a/maintenance/cleanupWatchlist.php
+++ b/maintenance/cleanupWatchlist.php
@@ -1,5 +1,5 @@
<?php
-/*
+/**
* Script to remove broken, unparseable titles in the Watchlist.
*
* Usage: php cleanupWatchlist.php [--fix]
@@ -24,11 +24,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @author Brion Vibber <brion at pobox.com>
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/cleanupTable.inc' );
+require_once( dirname( __FILE__ ) . '/cleanupTable.inc' );
class WatchlistCleanup extends TableCleanup {
protected $defaultParams = array(
@@ -58,7 +59,7 @@ class WatchlistCleanup extends TableCleanup {
$verified = $wgContLang->normalize( $display );
$title = Title::newFromText( $verified );
- if( $row->wl_user == 0 || is_null( $title ) || !$title->equals( $current ) ) {
+ if ( $row->wl_user == 0 || is_null( $title ) || !$title->equals( $current ) ) {
$this->output( "invalid watch by {$row->wl_user} for ({$row->wl_namespace}, \"{$row->wl_title}\")\n" );
$updated = $this->removeWatch( $row );
$this->progress( $updated );
@@ -68,7 +69,7 @@ class WatchlistCleanup extends TableCleanup {
}
private function removeWatch( $row ) {
- if( !$this->dryrun && $this->hasOption( 'fix' ) ) {
+ if ( !$this->dryrun && $this->hasOption( 'fix' ) ) {
$dbw = wfGetDB( DB_MASTER );
$dbw->delete( 'watchlist', array(
'wl_user' => $row->wl_user,
@@ -84,4 +85,4 @@ class WatchlistCleanup extends TableCleanup {
}
$maintClass = "WatchlistCleanup";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/clear_interwiki_cache.php b/maintenance/clear_interwiki_cache.php
index a3510a06..953bd4ce 100644
--- a/maintenance/clear_interwiki_cache.php
+++ b/maintenance/clear_interwiki_cache.php
@@ -18,10 +18,11 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class ClearInterwikiCache extends Maintenance {
@@ -42,7 +43,7 @@ class ClearInterwikiCache extends Maintenance {
foreach ( $wgLocalDatabases as $db ) {
$this->output( "$db..." );
foreach ( $prefixes as $prefix ) {
- $wgMemc->delete("$db:interwiki:$prefix");
+ $wgMemc->delete( "$db:interwiki:$prefix" );
}
$this->output( "done\n" );
}
@@ -50,4 +51,4 @@ class ClearInterwikiCache extends Maintenance {
}
$maintClass = "ClearInterwikiCache";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/clear_stats.php b/maintenance/clear_stats.php
index 6a6a4981..8f91864e 100644
--- a/maintenance/clear_stats.php
+++ b/maintenance/clear_stats.php
@@ -1,7 +1,7 @@
<?php
/**
* This script remove all statistics tracking from the cache
- *
+ *
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
@@ -17,10 +17,11 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class clear_stats extends Maintenance {
@@ -32,22 +33,22 @@ class clear_stats extends Maintenance {
public function execute() {
global $wgLocalDatabases, $wgMemc;
foreach ( $wgLocalDatabases as $db ) {
- $wgMemc->delete("$db:stats:request_with_session");
- $wgMemc->delete("$db:stats:request_without_session");
- $wgMemc->delete("$db:stats:pcache_hit");
- $wgMemc->delete("$db:stats:pcache_miss_invalid");
- $wgMemc->delete("$db:stats:pcache_miss_expired");
- $wgMemc->delete("$db:stats:pcache_miss_absent");
- $wgMemc->delete("$db:stats:pcache_miss_stub");
- $wgMemc->delete("$db:stats:image_cache_hit");
- $wgMemc->delete("$db:stats:image_cache_miss");
- $wgMemc->delete("$db:stats:image_cache_update");
- $wgMemc->delete("$db:stats:diff_cache_hit");
- $wgMemc->delete("$db:stats:diff_cache_miss");
- $wgMemc->delete("$db:stats:diff_uncacheable");
+ $wgMemc->delete( "$db:stats:request_with_session" );
+ $wgMemc->delete( "$db:stats:request_without_session" );
+ $wgMemc->delete( "$db:stats:pcache_hit" );
+ $wgMemc->delete( "$db:stats:pcache_miss_invalid" );
+ $wgMemc->delete( "$db:stats:pcache_miss_expired" );
+ $wgMemc->delete( "$db:stats:pcache_miss_absent" );
+ $wgMemc->delete( "$db:stats:pcache_miss_stub" );
+ $wgMemc->delete( "$db:stats:image_cache_hit" );
+ $wgMemc->delete( "$db:stats:image_cache_miss" );
+ $wgMemc->delete( "$db:stats:image_cache_update" );
+ $wgMemc->delete( "$db:stats:diff_cache_hit" );
+ $wgMemc->delete( "$db:stats:diff_cache_miss" );
+ $wgMemc->delete( "$db:stats:diff_uncacheable" );
}
}
}
$maintClass = "clear_stats";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/commandLine.inc b/maintenance/commandLine.inc
index 332527ba..4ae753ba 100644
--- a/maintenance/commandLine.inc
+++ b/maintenance/commandLine.inc
@@ -3,10 +3,11 @@
/**
* Backwards-compatibility wrapper for old-style maintenance scripts
*/
-require( dirname(__FILE__) . '/Maintenance.php' );
+require( dirname( __FILE__ ) . '/Maintenance.php' );
+global $optionsWithArgs;
if ( !isset( $optionsWithArgs ) ) {
- $optionsWithArgs = array();
+ $optionsWithArgs = array();
}
class CommandLineInc extends Maintenance {
@@ -42,5 +43,5 @@ class CommandLineInc extends Maintenance {
}
$maintClass = 'CommandLineInc';
-require( DO_MAINTENANCE );
+require( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/convertLinks.inc b/maintenance/convertLinks.inc
deleted file mode 100644
index 7c7b8aff..00000000
--- a/maintenance/convertLinks.inc
+++ /dev/null
@@ -1,218 +0,0 @@
-<?php
-/**
- * @file
- * @todo document
- * @ingroup Maintenance
- */
-
-/** */
-function convertLinks() {
- global $wgDBtype;
- if( $wgDBtype == 'postgres' ) {
- wfOut( "Links table already ok on Postgres.\n" );
- return;
- }
-
- wfOut( "Converting links table to ID-ID...\n" );
-
- global $wgLang, $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname;
- global $noKeys, $logPerformance, $fh;
-
- $tuplesAdded = $numBadLinks = $curRowsRead = 0; #counters etc
- $totalTuplesInserted = 0; # total tuples INSERTed into links_temp
-
- $reportCurReadProgress = true; #whether or not to give progress reports while reading IDs from cur table
- $curReadReportInterval = 1000; #number of rows between progress reports
-
- $reportLinksConvProgress = true; #whether or not to give progress reports during conversion
- $linksConvInsertInterval = 1000; #number of rows per INSERT
-
- $initialRowOffset = 0;
- #$finalRowOffset = 0; # not used yet; highest row number from links table to process
-
- # Overwrite the old links table with the new one. If this is set to false,
- # the new table will be left at links_temp.
- $overwriteLinksTable = true;
-
- # Don't create keys, and so allow duplicates in the new links table.
- # This gives a huge speed improvement for very large links tables which are MyISAM. (What about InnoDB?)
- $noKeys = false;
-
-
- $logPerformance = false; # output performance data to a file
- $perfLogFilename = "convLinksPerf.txt";
- #--------------------------------------------------------------------
-
- $dbw = wfGetDB( DB_MASTER );
- list ($cur, $links, $links_temp, $links_backup) = $dbw->tableNamesN( 'cur', 'links', 'links_temp', 'links_backup' );
-
- // Get database-agnostic limit clause
- $sql_limit = $dbw->limitResult( "SELECT l_from FROM $links", 1 );
- $res = $dbw->query( $sql_limit );
- if ( $dbw->fieldType( $res, 0 ) == "int" ) {
- wfOut( "Schema already converted\n" );
- return;
- }
-
- $res = $dbw->query( "SELECT COUNT(*) AS count FROM $links" );
- $row = $dbw->fetchObject($res);
- $numRows = $row->count;
- $dbw->freeResult( $res );
-
- if ( $numRows == 0 ) {
- wfOut( "Updating schema (no rows to convert)...\n" );
- createTempTable();
- } else {
- if ( $logPerformance ) { $fh = fopen ( $perfLogFilename, "w" ); }
- $baseTime = $startTime = getMicroTime();
- # Create a title -> cur_id map
- wfOut( "Loading IDs from $cur table...\n" );
- performanceLog ( "Reading $numRows rows from cur table...\n" );
- performanceLog ( "rows read vs seconds elapsed:\n" );
-
- $dbw->bufferResults( false );
- $res = $dbw->query( "SELECT cur_namespace,cur_title,cur_id FROM $cur" );
- $ids = array();
-
- while ( $row = $dbw->fetchObject( $res ) ) {
- $title = $row->cur_title;
- if ( $row->cur_namespace ) {
- $title = $wgLang->getNsText( $row->cur_namespace ) . ":$title";
- }
- $ids[$title] = $row->cur_id;
- $curRowsRead++;
- if ($reportCurReadProgress) {
- if (($curRowsRead % $curReadReportInterval) == 0) {
- performanceLog( $curRowsRead . " " . (getMicroTime() - $baseTime) . "\n" );
- wfOut( "\t$curRowsRead rows of $cur table read.\n" );
- }
- }
- }
- $dbw->freeResult( $res );
- $dbw->bufferResults( true );
- wfOut( "Finished loading IDs.\n\n" );
- performanceLog( "Took " . (getMicroTime() - $baseTime) . " seconds to load IDs.\n\n" );
- #--------------------------------------------------------------------
-
- # Now, step through the links table (in chunks of $linksConvInsertInterval rows),
- # convert, and write to the new table.
- createTempTable();
- performanceLog( "Resetting timer.\n\n" );
- $baseTime = getMicroTime();
- wfOut( "Processing $numRows rows from $links table...\n" );
- performanceLog( "Processing $numRows rows from $links table...\n" );
- performanceLog( "rows inserted vs seconds elapsed:\n" );
-
- for ($rowOffset = $initialRowOffset; $rowOffset < $numRows; $rowOffset += $linksConvInsertInterval) {
- $sqlRead = "SELECT * FROM $links ";
- $sqlRead = $dbw->limitResult($sqlRead, $linksConvInsertInterval,$rowOffset);
- $res = $dbw->query($sqlRead);
- if ( $noKeys ) {
- $sqlWrite = array("INSERT INTO $links_temp (l_from,l_to) VALUES ");
- } else {
- $sqlWrite = array("INSERT IGNORE INTO $links_temp (l_from,l_to) VALUES ");
- }
-
- $tuplesAdded = 0; # no tuples added to INSERT yet
- while ( $row = $dbw->fetchObject($res) ) {
- $fromTitle = $row->l_from;
- if ( array_key_exists( $fromTitle, $ids ) ) { # valid title
- $from = $ids[$fromTitle];
- $to = $row->l_to;
- if ( $tuplesAdded != 0 ) {
- $sqlWrite[] = ",";
- }
- $sqlWrite[] = "($from,$to)";
- $tuplesAdded++;
- } else { # invalid title
- $numBadLinks++;
- }
- }
- $dbw->freeResult($res);
- #wfOut( "rowOffset: $rowOffset\ttuplesAdded: $tuplesAdded\tnumBadLinks: $numBadLinks\n" );
- if ( $tuplesAdded != 0 ) {
- if ($reportLinksConvProgress) {
- wfOut( "Inserting $tuplesAdded tuples into $links_temp..." );
- }
- $dbw->query( implode("",$sqlWrite) );
- $totalTuplesInserted += $tuplesAdded;
- if ($reportLinksConvProgress)
- wfOut( " done. Total $totalTuplesInserted tuples inserted.\n" );
- performanceLog( $totalTuplesInserted . " " . (getMicroTime() - $baseTime) . "\n" );
- }
- }
- wfOut( "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n\n" );
- performanceLog( "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n" );
- performanceLog( "Total execution time: " . (getMicroTime() - $startTime) . " seconds.\n" );
- if ( $logPerformance ) { fclose ( $fh ); }
- }
- #--------------------------------------------------------------------
-
- if ( $overwriteLinksTable ) {
- $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname );
- if (!($dbConn->isOpen())) {
- wfOut( "Opening connection to database failed.\n" );
- return;
- }
- # Check for existing links_backup, and delete it if it exists.
- wfOut( "Dropping backup links table if it exists..." );
- $dbConn->query( "DROP TABLE IF EXISTS $links_backup", DB_MASTER);
- wfOut( " done.\n" );
-
- # Swap in the new table, and move old links table to links_backup
- wfOut( "Swapping tables '$links' to '$links_backup'; '$links_temp' to '$links'..." );
- $dbConn->query( "RENAME TABLE links TO $links_backup, $links_temp TO $links", DB_MASTER );
- wfOut( " done.\n\n" );
-
- $dbConn->close();
- wfOut( "Conversion complete. The old table remains at $links_backup;\n" );
- wfOut( "delete at your leisure.\n" );
- } else {
- wfOut( "Conversion complete. The converted table is at $links_temp;\n" );
- wfOut( "the original links table is unchanged.\n" );
- }
-}
-
-#--------------------------------------------------------------------
-
-function createTempTable() {
- global $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname;
- global $noKeys;
- $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname );
-
- if (!($dbConn->isOpen())) {
- wfOut( "Opening connection to database failed.\n" );
- return;
- }
- $links_temp = $dbConn->tableName( 'links_temp' );
-
- wfOut( "Dropping temporary links table if it exists..." );
- $dbConn->query( "DROP TABLE IF EXISTS $links_temp");
- wfOut( " done.\n" );
-
- wfOut( "Creating temporary links table..." );
- if ( $noKeys ) {
- $dbConn->query( "CREATE TABLE $links_temp ( " .
- "l_from int(8) unsigned NOT NULL default '0', " .
- "l_to int(8) unsigned NOT NULL default '0')");
- } else {
- $dbConn->query( "CREATE TABLE $links_temp ( " .
- "l_from int(8) unsigned NOT NULL default '0', " .
- "l_to int(8) unsigned NOT NULL default '0', " .
- "UNIQUE KEY l_from(l_from,l_to), " .
- "KEY (l_to))");
- }
- wfOut( " done.\n\n" );
-}
-
-function performanceLog( $text ) {
- global $logPerformance, $fh;
- if ( $logPerformance ) {
- fwrite( $fh, $text );
- }
-}
-
-function getMicroTime() { # return time in seconds, with microsecond accuracy
- list($usec, $sec) = explode(" ", microtime());
- return ((float)$usec + (float)$sec);
-}
diff --git a/maintenance/convertLinks.php b/maintenance/convertLinks.php
index 415662a0..b7a55d57 100644
--- a/maintenance/convertLinks.php
+++ b/maintenance/convertLinks.php
@@ -18,95 +18,111 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class ConvertLinks extends Maintenance {
+ private $logPerformance;
public function __construct() {
parent::__construct();
$this->mDescription = "Convert from the old links schema (string->ID) to the new schema (ID->ID)
The wiki should be put into read-only mode while this script executes";
+
+ $this->addArg( 'logperformance', "Log performance to perfLogFilename.", false );
+ $this->addArg( 'perfLogFilename', "Filename where performance is logged if --logperformance was set (defaults to 'convLinksPerf.txt').", false );
+ $this->addArg( 'keep-links-table', "Don't overwrite the old links table with the new one, leave the new table at links_temp.", false );
+ $this->addArg( 'nokeys', "Don't create keys, and so allow duplicates in the new links table.\n
+This gives a huge speed improvement for very large links tables which are MyISAM." /* (What about InnoDB?) */, false );
+ }
+
+ public function getDbType() {
+ return Maintenance::DB_ADMIN;
}
public function execute() {
- global $wgDBtype;
- if( $wgDBtype == 'postgres' ) {
- $this->output( "Links table already ok on Postgres.\n" );
+ $dbw = wfGetDB( DB_MASTER );
+
+ $type = $dbw->getType();
+ if ( $type != 'mysql' ) {
+ $this->output( "Link table conversion not necessary for $type\n" );
return;
}
- $this->output( "Converting links table to ID-ID...\n" );
+ global $wgContLang;
- global $wgLang, $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname;
- global $noKeys, $logPerformance, $fh;
-
- $tuplesAdded = $numBadLinks = $curRowsRead = 0; #counters etc
+ $numBadLinks = $curRowsRead = 0; # counters etc
$totalTuplesInserted = 0; # total tuples INSERTed into links_temp
-
- $reportCurReadProgress = true; #whether or not to give progress reports while reading IDs from cur table
- $curReadReportInterval = 1000; #number of rows between progress reports
-
- $reportLinksConvProgress = true; #whether or not to give progress reports during conversion
- $linksConvInsertInterval = 1000; #number of rows per INSERT
-
+
+ $reportCurReadProgress = true; # whether or not to give progress reports while reading IDs from cur table
+ $curReadReportInterval = 1000; # number of rows between progress reports
+
+ $reportLinksConvProgress = true; # whether or not to give progress reports during conversion
+ $linksConvInsertInterval = 1000; # number of rows per INSERT
+
$initialRowOffset = 0;
- #$finalRowOffset = 0; # not used yet; highest row number from links table to process
-
- # Overwrite the old links table with the new one. If this is set to false,
- # the new table will be left at links_temp.
- $overwriteLinksTable = true;
-
- # Don't create keys, and so allow duplicates in the new links table.
- # This gives a huge speed improvement for very large links tables which are MyISAM. (What about InnoDB?)
- $noKeys = false;
-
-
- $logPerformance = false; # output performance data to a file
- $perfLogFilename = "convLinksPerf.txt";
- #--------------------------------------------------------------------
-
- $dbw = wfGetDB( DB_MASTER );
- list ($cur, $links, $links_temp, $links_backup) = $dbw->tableNamesN( 'cur', 'links', 'links_temp', 'links_backup' );
-
+ # $finalRowOffset = 0; # not used yet; highest row number from links table to process
+
+ $overwriteLinksTable = !$this->hasOption( 'keep-links-table' );
+ $noKeys = $this->hasOption( 'noKeys' );
+ $this->logPerformance = $this->hasOption( 'logperformance' );
+ $perfLogFilename = $this->getArg( 'perfLogFilename', "convLinksPerf.txt" );
+
+ # --------------------------------------------------------------------
+
+ list ( $cur, $links, $links_temp, $links_backup ) = $dbw->tableNamesN( 'cur', 'links', 'links_temp', 'links_backup' );
+
+ if( $dbw->tableExists( 'pagelinks' ) ) {
+ $this->output( "...have pagelinks; skipping old links table updates\n" );
+ return;
+ }
+
$res = $dbw->query( "SELECT l_from FROM $links LIMIT 1" );
if ( $dbw->fieldType( $res, 0 ) == "int" ) {
$this->output( "Schema already converted\n" );
return;
}
-
+
$res = $dbw->query( "SELECT COUNT(*) AS count FROM $links" );
- $row = $dbw->fetchObject($res);
+ $row = $dbw->fetchObject( $res );
$numRows = $row->count;
$dbw->freeResult( $res );
-
+
if ( $numRows == 0 ) {
$this->output( "Updating schema (no rows to convert)...\n" );
$this->createTempTable();
} else {
- if ( $logPerformance ) { $fh = fopen ( $perfLogFilename, "w" ); }
+ $fh = false;
+ if ( $this->logPerformance ) {
+ $fh = fopen ( $perfLogFilename, "w" );
+ if ( !$fh ) {
+ $this->error( "Couldn't open $perfLogFilename" );
+ $this->logPerformance = false;
+ }
+ }
$baseTime = $startTime = $this->getMicroTime();
# Create a title -> cur_id map
$this->output( "Loading IDs from $cur table...\n" );
- $this->performanceLog ( "Reading $numRows rows from cur table...\n" );
- $this->performanceLog ( "rows read vs seconds elapsed:\n" );
+ $this->performanceLog ( $fh, "Reading $numRows rows from cur table...\n" );
+ $this->performanceLog ( $fh, "rows read vs seconds elapsed:\n" );
$dbw->bufferResults( false );
$res = $dbw->query( "SELECT cur_namespace,cur_title,cur_id FROM $cur" );
$ids = array();
- while ( $row = $dbw->fetchObject( $res ) ) {
+ foreach ( $res as $row ) {
$title = $row->cur_title;
if ( $row->cur_namespace ) {
- $title = $wgLang->getNsText( $row->cur_namespace ) . ":$title";
+ $title = $wgContLang->getNsText( $row->cur_namespace ) . ":$title";
}
$ids[$title] = $row->cur_id;
$curRowsRead++;
- if ($reportCurReadProgress) {
- if (($curRowsRead % $curReadReportInterval) == 0) {
- $this->performanceLog( $curRowsRead . " " . ($this->getMicroTime() - $baseTime) . "\n" );
+ if ( $reportCurReadProgress ) {
+ if ( ( $curRowsRead % $curReadReportInterval ) == 0 ) {
+ $this->performanceLog( $fh, $curRowsRead . " " . ( $this->getMicroTime() - $baseTime ) . "\n" );
$this->output( "\t$curRowsRead rows of $cur table read.\n" );
}
}
@@ -114,30 +130,31 @@ The wiki should be put into read-only mode while this script executes";
$dbw->freeResult( $res );
$dbw->bufferResults( true );
$this->output( "Finished loading IDs.\n\n" );
- $this->performanceLog( "Took " . ($this->getMicroTime() - $baseTime) . " seconds to load IDs.\n\n" );
- #--------------------------------------------------------------------
-
+ $this->performanceLog( $fh, "Took " . ( $this->getMicroTime() - $baseTime ) . " seconds to load IDs.\n\n" );
+
+ # --------------------------------------------------------------------
+
# Now, step through the links table (in chunks of $linksConvInsertInterval rows),
# convert, and write to the new table.
$this->createTempTable();
- $this->performanceLog( "Resetting timer.\n\n" );
+ $this->performanceLog( $fh, "Resetting timer.\n\n" );
$baseTime = $this->getMicroTime();
$this->output( "Processing $numRows rows from $links table...\n" );
- $this->performanceLog( "Processing $numRows rows from $links table...\n" );
- $this->performanceLog( "rows inserted vs seconds elapsed:\n" );
-
- for ($rowOffset = $initialRowOffset; $rowOffset < $numRows; $rowOffset += $linksConvInsertInterval) {
+ $this->performanceLog( $fh, "Processing $numRows rows from $links table...\n" );
+ $this->performanceLog( $fh, "rows inserted vs seconds elapsed:\n" );
+
+ for ( $rowOffset = $initialRowOffset; $rowOffset < $numRows; $rowOffset += $linksConvInsertInterval ) {
$sqlRead = "SELECT * FROM $links ";
- $sqlRead = $dbw->limitResult($sqlRead, $linksConvInsertInterval,$rowOffset);
- $res = $dbw->query($sqlRead);
+ $sqlRead = $dbw->limitResult( $sqlRead, $linksConvInsertInterval, $rowOffset );
+ $res = $dbw->query( $sqlRead );
if ( $noKeys ) {
- $sqlWrite = array("INSERT INTO $links_temp (l_from,l_to) VALUES ");
+ $sqlWrite = array( "INSERT INTO $links_temp (l_from,l_to) VALUES " );
} else {
- $sqlWrite = array("INSERT IGNORE INTO $links_temp (l_from,l_to) VALUES ");
+ $sqlWrite = array( "INSERT IGNORE INTO $links_temp (l_from,l_to) VALUES " );
}
-
+
$tuplesAdded = 0; # no tuples added to INSERT yet
- while ( $row = $dbw->fetchObject($res) ) {
+ foreach ( $res as $row ) {
$fromTitle = $row->l_from;
if ( array_key_exists( $fromTitle, $ids ) ) { # valid title
$from = $ids[$fromTitle];
@@ -151,43 +168,40 @@ The wiki should be put into read-only mode while this script executes";
$numBadLinks++;
}
}
- $dbw->freeResult($res);
- #$this->output( "rowOffset: $rowOffset\ttuplesAdded: $tuplesAdded\tnumBadLinks: $numBadLinks\n" );
+ $dbw->freeResult( $res );
+ # $this->output( "rowOffset: $rowOffset\ttuplesAdded: $tuplesAdded\tnumBadLinks: $numBadLinks\n" );
if ( $tuplesAdded != 0 ) {
- if ($reportLinksConvProgress) {
+ if ( $reportLinksConvProgress ) {
$this->output( "Inserting $tuplesAdded tuples into $links_temp..." );
}
- $dbw->query( implode("",$sqlWrite) );
+ $dbw->query( implode( "", $sqlWrite ) );
$totalTuplesInserted += $tuplesAdded;
- if ($reportLinksConvProgress)
+ if ( $reportLinksConvProgress )
$this->output( " done. Total $totalTuplesInserted tuples inserted.\n" );
- $this->performanceLog( $totalTuplesInserted . " " . ($this->getMicroTime() - $baseTime) . "\n" );
+ $this->performanceLog( $fh, $totalTuplesInserted . " " . ( $this->getMicroTime() - $baseTime ) . "\n" );
}
}
$this->output( "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n\n" );
- $this->performanceLog( "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n" );
- $this->performanceLog( "Total execution time: " . ($this->getMicroTime() - $startTime) . " seconds.\n" );
- if ( $logPerformance ) { fclose ( $fh ); }
+ $this->performanceLog( $fh, "$totalTuplesInserted valid titles and $numBadLinks invalid titles were processed.\n" );
+ $this->performanceLog( $fh, "Total execution time: " . ( $this->getMicroTime() - $startTime ) . " seconds.\n" );
+ if ( $this->logPerformance ) {
+ fclose ( $fh );
+ }
}
- #--------------------------------------------------------------------
-
+ # --------------------------------------------------------------------
+
if ( $overwriteLinksTable ) {
- $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname );
- if (!($dbConn->isOpen())) {
- $this->output( "Opening connection to database failed.\n" );
- return;
- }
# Check for existing links_backup, and delete it if it exists.
$this->output( "Dropping backup links table if it exists..." );
- $dbConn->query( "DROP TABLE IF EXISTS $links_backup", DB_MASTER);
+ $dbw->query( "DROP TABLE IF EXISTS $links_backup", DB_MASTER );
$this->output( " done.\n" );
-
+
# Swap in the new table, and move old links table to links_backup
$this->output( "Swapping tables '$links' to '$links_backup'; '$links_temp' to '$links'..." );
- $dbConn->query( "RENAME TABLE links TO $links_backup, $links_temp TO $links", DB_MASTER );
+ $dbw->query( "RENAME TABLE links TO $links_backup, $links_temp TO $links", DB_MASTER );
$this->output( " done.\n\n" );
-
- $dbConn->close();
+
+ $dbw->close();
$this->output( "Conversion complete. The old table remains at $links_backup;\n" );
$this->output( "delete at your leisure.\n" );
} else {
@@ -197,47 +211,44 @@ The wiki should be put into read-only mode while this script executes";
}
private function createTempTable() {
- global $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname;
- global $noKeys;
- $dbConn = Database::newFromParams( $wgDBserver, $wgDBadminuser, $wgDBadminpassword, $wgDBname );
+ $dbConn = wfGetDB( DB_MASTER );
- if (!($dbConn->isOpen())) {
+ if ( !( $dbConn->isOpen() ) ) {
$this->output( "Opening connection to database failed.\n" );
return;
}
$links_temp = $dbConn->tableName( 'links_temp' );
$this->output( "Dropping temporary links table if it exists..." );
- $dbConn->query( "DROP TABLE IF EXISTS $links_temp");
+ $dbConn->query( "DROP TABLE IF EXISTS $links_temp" );
$this->output( " done.\n" );
$this->output( "Creating temporary links table..." );
- if ( $noKeys ) {
+ if ( $this->hasOption( 'noKeys' ) ) {
$dbConn->query( "CREATE TABLE $links_temp ( " .
"l_from int(8) unsigned NOT NULL default '0', " .
- "l_to int(8) unsigned NOT NULL default '0')");
+ "l_to int(8) unsigned NOT NULL default '0')" );
} else {
$dbConn->query( "CREATE TABLE $links_temp ( " .
"l_from int(8) unsigned NOT NULL default '0', " .
"l_to int(8) unsigned NOT NULL default '0', " .
"UNIQUE KEY l_from(l_from,l_to), " .
- "KEY (l_to))");
+ "KEY (l_to))" );
}
$this->output( " done.\n\n" );
}
- private function performanceLog( $text ) {
- global $logPerformance, $fh;
- if ( $logPerformance ) {
+ private function performanceLog( $fh, $text ) {
+ if ( $this->logPerformance ) {
fwrite( $fh, $text );
}
}
private function getMicroTime() { # return time in seconds, with microsecond accuracy
- list($usec, $sec) = explode(" ", microtime());
- return ((float)$usec + (float)$sec);
+ list( $usec, $sec ) = explode( " ", microtime() );
+ return ( (float)$usec + (float)$sec );
}
}
$maintClass = "ConvertLinks";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/convertUserOptions.php b/maintenance/convertUserOptions.php
index 657a82c1..278d40ff 100644
--- a/maintenance/convertUserOptions.php
+++ b/maintenance/convertUserOptions.php
@@ -17,10 +17,11 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class ConvertUserOptions extends Maintenance {
@@ -30,24 +31,24 @@ class ConvertUserOptions extends Maintenance {
parent::__construct();
$this->mDescription = "Convert user options from old to new system";
}
-
+
public function execute() {
$this->output( "Beginning batch conversion of user options.\n" );
$id = 0;
$dbw = wfGetDB( DB_MASTER );
- while ($id !== null) {
- $idCond = 'user_id>'.$dbw->addQuotes( $id );
- $optCond = "user_options!=".$dbw->addQuotes( '' ); // For compatibility
+ while ( $id !== null ) {
+ $idCond = 'user_id>' . $dbw->addQuotes( $id );
+ $optCond = "user_options!=" . $dbw->addQuotes( '' ); // For compatibility
$res = $dbw->select( 'user', '*',
array( $optCond, $idCond ), __METHOD__,
array( 'LIMIT' => 50, 'FOR UPDATE' ) );
$id = $this->convertOptionBatch( $res, $dbw );
$dbw->commit();
-
+
wfWaitForSlaves( 1 );
-
- if ($id)
+
+ if ( $id )
$this->output( "--Converted to ID $id\n" );
}
$this->output( "Conversion done. Converted " . $this->mConversionCount . " user records.\n" );
@@ -57,16 +58,16 @@ class ConvertUserOptions extends Maintenance {
$id = null;
foreach ( $res as $row ) {
$this->mConversionCount++;
-
+
$u = User::newFromRow( $row );
-
+
$u->saveSettings();
$id = $row->user_id;
}
-
+
return $id;
}
}
$maintClass = "ConvertUserOptions";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/createAndPromote.php b/maintenance/createAndPromote.php
index 391d1226..8bff284a 100644
--- a/maintenance/createAndPromote.php
+++ b/maintenance/createAndPromote.php
@@ -1,5 +1,4 @@
<?php
-
/**
* Maintenance script to create an account and grant it administrator rights
*
@@ -18,11 +17,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
* @author Rob Church <robchur@gmail.com>
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class CreateAndPromote extends Maintenance {
@@ -35,41 +35,41 @@ class CreateAndPromote extends Maintenance {
}
public function execute() {
- $username = $this->getArg(0);
- $password = $this->getArg(1);
-
+ $username = $this->getArg( 0 );
+ $password = $this->getArg( 1 );
+
$this->output( wfWikiID() . ": Creating and promoting User:{$username}..." );
-
+
$user = User::newFromName( $username );
- if( !is_object( $user ) ) {
+ if ( !is_object( $user ) ) {
$this->error( "invalid username.", true );
- } elseif( 0 != $user->idForName() ) {
+ } elseif ( 0 != $user->idForName() ) {
$this->error( "account exists.", true );
}
# Try to set the password
try {
$user->setPassword( $password );
- } catch( PasswordError $pwe ) {
+ } catch ( PasswordError $pwe ) {
$this->error( $pwe->getText(), true );
}
# Insert the account into the database
$user->addToDatabase();
$user->saveSettings();
-
+
# Promote user
$user->addGroup( 'sysop' );
- if( $this->hasOption( 'bureaucrat' ) )
+ if ( $this->hasOption( 'bureaucrat' ) )
$user->addGroup( 'bureaucrat' );
-
+
# Increment site_stats.ss_users
$ssu = new SiteStatsUpdate( 0, 0, 0, 0, 1 );
$ssu->doUpdate();
-
+
$this->output( "done.\n" );
}
}
$maintClass = "CreateAndPromote";
-require_once( DO_MAINTENANCE ); \ No newline at end of file
+require_once( RUN_MAINTENANCE_IF_MAIN ); \ No newline at end of file
diff --git a/maintenance/cssjanus/COPYING b/maintenance/cssjanus/COPYING
new file mode 100644
index 00000000..3f2c8953
--- /dev/null
+++ b/maintenance/cssjanus/COPYING
@@ -0,0 +1,13 @@
+ Copyright 2008 Google Inc.
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/maintenance/cssjanus/LICENSE b/maintenance/cssjanus/LICENSE
new file mode 100644
index 00000000..d6456956
--- /dev/null
+++ b/maintenance/cssjanus/LICENSE
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/maintenance/cssjanus/README b/maintenance/cssjanus/README
new file mode 100644
index 00000000..9b922156
--- /dev/null
+++ b/maintenance/cssjanus/README
@@ -0,0 +1,91 @@
+=CSSJanus=
+
+_Flips CSS from LTR to an RTL orienation and vice-versa_
+
+Author: `Lindsey Simon <elsigh@google.com>`
+
+==Introduction==
+
+CSSJanus is CSS parser utility designed to aid the conversion of a website's
+layout from left-to-right(LTR) to right-to-left(RTL). The script was born out of
+a need to convert CSS for RTL languages when tables are not being used for layout (since tables will automatically reorder TD's in RTL).
+CSSJanus will change most of the obvious CSS property names and their values as
+well as some not-so-obvious ones (cursor, background-position %, etc...).
+The script is designed to offer flexibility to account for cases when you do
+not want to change certain rules which exist to account for bidirectional text
+display bugs, as well as situations where you may or may not want to flip annotations inside of the background url string.
+Note that you can disable CSSJanus from running on an entire class or any
+rule within a class by prepending a /* @noflip */ comment before the rule(s)
+you want CSSJanus to ignore.
+
+CSSJanus itself is not always enough to make a website that works in a LTR
+language context work in a RTL language all the way, but it is a start.
+
+==Getting the code==
+
+View the trunk at:
+
+ http://cssjanus.googlecode.com/svn/trunk/
+
+Check out the latest development version anonymously with:
+
+{{{
+ $ svn checkout http://cssjanus.googlecode.com/svn/trunk/ cssjanus
+}}}
+
+==Using==
+
+Usage:
+ ./cssjanus.py < file.css > file-rtl.css
+Flags:
+ --swap_left_right_in_url: Fixes "left"/"right" string within urls.
+ Ex: ./cssjanus.py --swap_left_right_in_url < file.css > file_rtl.css
+ --swap_ltr_rtl_in_url: Fixes "ltr"/"rtl" string within urls.
+ Ex: ./cssjanus.py --swap_ltr_rtl_in_url < file.css > file_rtl.css
+
+If you'd like to make use of the webapp version of cssjanus, you'll need to
+download the Google App Engine SDK
+ http://code.google.com/appengine/downloads.html
+and also drop a "django" directory into this directory, with the latest svn
+from django. You should be good to go with that setup. Please let me know
+otherwise.
+
+==Bugs, Patches==
+
+Patches and bug reports are welcome, just please keep the style
+consistent with the original source. If you find a bug, please include a diff
+of cssjanus_test.py with the bug included as a new unit test which fails. It
+will make understanding and fixing the bug easier.
+
+==Todo==
+
+* Include some helpers for some typical bidi text solutions?
+* Aural CSS (azimuth) swapping?
+
+==Contributors==
+
+Additional thanks to Mike Samuel for his work on csslex.py, Andy Perelson for
+his help coding and reviewing, Stephen Zabel for his help with i18n and my sanity,
+and to Eric Meyer for his thoughtful input.
+Thanks to Junyu Wang for the Chinese translation.
+Thanks to Masashi Kawashima for the Japanese translation.
+Thanks to Taaryk Taar and Tariq Al-Omaireeni for an updated Arabic translation.
+Thanks to Jens Meiert for the German translation.
+
+==License==
+
+{{{
+ Copyright 2008 Google Inc. All Rights Reserved.
+
+ Licensed under the Apache License, Version 2.0 (the 'License');
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an 'AS IS' BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+}}}
diff --git a/maintenance/cssjanus/cssjanus.py b/maintenance/cssjanus/cssjanus.py
new file mode 100644
index 00000000..dd14bd58
--- /dev/null
+++ b/maintenance/cssjanus/cssjanus.py
@@ -0,0 +1,574 @@
+#!/usr/bin/python
+#
+# Copyright 2008 Google Inc. All Rights Reserved.
+
+"""Converts a LeftToRight Cascading Style Sheet into a RightToLeft one.
+
+ This is a utility script for replacing "left" oriented things in a CSS file
+ like float, padding, margin with "right" oriented values.
+ It also does the opposite.
+ The goal is to be able to conditionally serve one large, cat'd, compiled CSS
+ file appropriate for LeftToRight oriented languages and RightToLeft ones.
+ This utility will hopefully help your structural layout done in CSS in
+ terms of its RTL compatibility. It will not help with some of the more
+ complicated bidirectional text issues.
+"""
+
+__author__ = 'elsigh@google.com (Lindsey Simon)'
+__version__ = '0.1'
+
+import logging
+import re
+import sys
+import getopt
+import os
+
+import csslex
+
+logging.getLogger().setLevel(logging.INFO)
+
+# Global for the command line flags.
+SWAP_LTR_RTL_IN_URL_DEFAULT = False
+SWAP_LEFT_RIGHT_IN_URL_DEFAULT = False
+FLAGS = {'swap_ltr_rtl_in_url': SWAP_LTR_RTL_IN_URL_DEFAULT,
+ 'swap_left_right_in_url': SWAP_LEFT_RIGHT_IN_URL_DEFAULT}
+
+# Generic token delimiter character.
+TOKEN_DELIMITER = '~'
+
+# This is a temporary match token we use when swapping strings.
+TMP_TOKEN = '%sTMP%s' % (TOKEN_DELIMITER, TOKEN_DELIMITER)
+
+# Token to be used for joining lines.
+TOKEN_LINES = '%sJ%s' % (TOKEN_DELIMITER, TOKEN_DELIMITER)
+
+# Global constant text strings for CSS value matches.
+LTR = 'ltr'
+RTL = 'rtl'
+LEFT = 'left'
+RIGHT = 'right'
+
+# This is a lookbehind match to ensure that we don't replace instances
+# of our string token (left, rtl, etc...) if there's a letter in front of it.
+# Specifically, this prevents replacements like 'background: url(bright.png)'.
+LOOKBEHIND_NOT_LETTER = r'(?<![a-zA-Z])'
+
+# This is a lookahead match to make sure we don't replace left and right
+# in actual classnames, so that we don't break the HTML/CSS dependencies.
+# Read literally, it says ignore cases where the word left, for instance, is
+# directly followed by valid classname characters and a curly brace.
+# ex: .column-left {float: left} will become .column-left {float: right}
+LOOKAHEAD_NOT_OPEN_BRACE = (r'(?!(?:%s|%s|%s|#|\:|\.|\,|\+|>)*?{)' %
+ (csslex.NMCHAR, TOKEN_LINES, csslex.SPACE))
+
+
+# These two lookaheads are to test whether or not we are within a
+# background: url(HERE) situation.
+# Ref: http://www.w3.org/TR/CSS21/syndata.html#uri
+VALID_AFTER_URI_CHARS = r'[\'\"]?%s' % csslex.WHITESPACE
+LOOKAHEAD_NOT_CLOSING_PAREN = r'(?!%s?%s\))' % (csslex.URL_CHARS,
+ VALID_AFTER_URI_CHARS)
+LOOKAHEAD_FOR_CLOSING_PAREN = r'(?=%s?%s\))' % (csslex.URL_CHARS,
+ VALID_AFTER_URI_CHARS)
+
+# Compile a regex to swap left and right values in 4 part notations.
+# We need to match negatives and decimal numeric values.
+# ex. 'margin: .25em -2px 3px 0' becomes 'margin: .25em 0 3px -2px'.
+POSSIBLY_NEGATIVE_QUANTITY = r'((?:-?%s)|(?:inherit|auto))' % csslex.QUANTITY
+POSSIBLY_NEGATIVE_QUANTITY_SPACE = r'%s%s%s' % (POSSIBLY_NEGATIVE_QUANTITY,
+ csslex.SPACE,
+ csslex.WHITESPACE)
+FOUR_NOTATION_QUANTITY_RE = re.compile(r'%s%s%s%s' %
+ (POSSIBLY_NEGATIVE_QUANTITY_SPACE,
+ POSSIBLY_NEGATIVE_QUANTITY_SPACE,
+ POSSIBLY_NEGATIVE_QUANTITY_SPACE,
+ POSSIBLY_NEGATIVE_QUANTITY),
+ re.I)
+COLOR = r'(%s|%s)' % (csslex.NAME, csslex.HASH)
+COLOR_SPACE = r'%s%s' % (COLOR, csslex.SPACE)
+FOUR_NOTATION_COLOR_RE = re.compile(r'(-color%s:%s)%s%s%s(%s)' %
+ (csslex.WHITESPACE,
+ csslex.WHITESPACE,
+ COLOR_SPACE,
+ COLOR_SPACE,
+ COLOR_SPACE,
+ COLOR),
+ re.I)
+
+# Compile the cursor resize regexes
+CURSOR_EAST_RE = re.compile(LOOKBEHIND_NOT_LETTER + '([ns]?)e-resize')
+CURSOR_WEST_RE = re.compile(LOOKBEHIND_NOT_LETTER + '([ns]?)w-resize')
+
+# Matches the condition where we need to replace the horizontal component
+# of a background-position value when expressed in horizontal percentage.
+# Had to make two regexes because in the case of position-x there is only
+# one quantity, and otherwise we don't want to match and change cases with only
+# one quantity.
+BG_HORIZONTAL_PERCENTAGE_RE = re.compile(r'background(-position)?(%s:%s)'
+ '([^%%]*?)(%s)%%'
+ '(%s(?:%s|%s))' % (csslex.WHITESPACE,
+ csslex.WHITESPACE,
+ csslex.NUM,
+ csslex.WHITESPACE,
+ csslex.QUANTITY,
+ csslex.IDENT))
+
+BG_HORIZONTAL_PERCENTAGE_X_RE = re.compile(r'background-position-x(%s:%s)'
+ '(%s)%%' % (csslex.WHITESPACE,
+ csslex.WHITESPACE,
+ csslex.NUM))
+
+# Matches the opening of a body selector.
+BODY_SELECTOR = r'body%s{%s' % (csslex.WHITESPACE, csslex.WHITESPACE)
+
+# Matches anything up until the closing of a selector.
+CHARS_WITHIN_SELECTOR = r'[^\}]*?'
+
+# Matches the direction property in a selector.
+DIRECTION_RE = r'direction%s:%s' % (csslex.WHITESPACE, csslex.WHITESPACE)
+
+# These allow us to swap "ltr" with "rtl" and vice versa ONLY within the
+# body selector and on the same line.
+BODY_DIRECTION_LTR_RE = re.compile(r'(%s)(%s)(%s)(ltr)' %
+ (BODY_SELECTOR, CHARS_WITHIN_SELECTOR,
+ DIRECTION_RE),
+ re.I)
+BODY_DIRECTION_RTL_RE = re.compile(r'(%s)(%s)(%s)(rtl)' %
+ (BODY_SELECTOR, CHARS_WITHIN_SELECTOR,
+ DIRECTION_RE),
+ re.I)
+
+
+# Allows us to swap "direction:ltr" with "direction:rtl" and
+# vice versa anywhere in a line.
+DIRECTION_LTR_RE = re.compile(r'%s(ltr)' % DIRECTION_RE)
+DIRECTION_RTL_RE = re.compile(r'%s(rtl)' % DIRECTION_RE)
+
+# We want to be able to switch left with right and vice versa anywhere
+# we encounter left/right strings, EXCEPT inside the background:url(). The next
+# two regexes are for that purpose. We have alternate IN_URL versions of the
+# regexes compiled in case the user passes the flag that they do
+# actually want to have left and right swapped inside of background:urls.
+LEFT_RE = re.compile('%s(%s)%s%s' % (LOOKBEHIND_NOT_LETTER,
+ LEFT,
+ LOOKAHEAD_NOT_CLOSING_PAREN,
+ LOOKAHEAD_NOT_OPEN_BRACE),
+ re.I)
+RIGHT_RE = re.compile('%s(%s)%s%s' % (LOOKBEHIND_NOT_LETTER,
+ RIGHT,
+ LOOKAHEAD_NOT_CLOSING_PAREN,
+ LOOKAHEAD_NOT_OPEN_BRACE),
+ re.I)
+LEFT_IN_URL_RE = re.compile('%s(%s)%s' % (LOOKBEHIND_NOT_LETTER,
+ LEFT,
+ LOOKAHEAD_FOR_CLOSING_PAREN),
+ re.I)
+RIGHT_IN_URL_RE = re.compile('%s(%s)%s' % (LOOKBEHIND_NOT_LETTER,
+ RIGHT,
+ LOOKAHEAD_FOR_CLOSING_PAREN),
+ re.I)
+LTR_IN_URL_RE = re.compile('%s(%s)%s' % (LOOKBEHIND_NOT_LETTER,
+ LTR,
+ LOOKAHEAD_FOR_CLOSING_PAREN),
+ re.I)
+RTL_IN_URL_RE = re.compile('%s(%s)%s' % (LOOKBEHIND_NOT_LETTER,
+ RTL,
+ LOOKAHEAD_FOR_CLOSING_PAREN),
+ re.I)
+
+COMMENT_RE = re.compile('(%s)' % csslex.COMMENT, re.I)
+
+NOFLIP_TOKEN = r'\@noflip'
+# The NOFLIP_TOKEN inside of a comment. For now, this requires that comments
+# be in the input, which means users of a css compiler would have to run
+# this script first if they want this functionality.
+NOFLIP_ANNOTATION = r'/\*%s%s%s\*/' % (csslex.WHITESPACE,
+ NOFLIP_TOKEN,
+ csslex. WHITESPACE)
+
+# After a NOFLIP_ANNOTATION, and within a class selector, we want to be able
+# to set aside a single rule not to be flipped. We can do this by matching
+# our NOFLIP annotation and then using a lookahead to make sure there is not
+# an opening brace before the match.
+NOFLIP_SINGLE_RE = re.compile(r'(%s%s[^;}]+;?)' % (NOFLIP_ANNOTATION,
+ LOOKAHEAD_NOT_OPEN_BRACE),
+ re.I)
+
+# After a NOFLIP_ANNOTATION, we want to grab anything up until the next } which
+# means the entire following class block. This will prevent all of its
+# declarations from being flipped.
+NOFLIP_CLASS_RE = re.compile(r'(%s%s})' % (NOFLIP_ANNOTATION,
+ CHARS_WITHIN_SELECTOR),
+ re.I)
+
+
+class Tokenizer:
+ """Replaces any CSS comments with string tokens and vice versa."""
+
+ def __init__(self, token_re, token_string):
+ """Constructor for the Tokenizer.
+
+ Args:
+ token_re: A regex for the string to be replace by a token.
+ token_string: The string to put between token delimiters when tokenizing.
+ """
+ logging.debug('Tokenizer::init token_string=%s' % token_string)
+ self.token_re = token_re
+ self.token_string = token_string
+ self.originals = []
+
+ def Tokenize(self, line):
+ """Replaces any string matching token_re in line with string tokens.
+
+ By passing a function as an argument to the re.sub line below, we bypass
+ the usual rule where re.sub will only replace the left-most occurrence of
+ a match by calling the passed in function for each occurrence.
+
+ Args:
+ line: A line to replace token_re matches in.
+
+ Returns:
+ line: A line with token_re matches tokenized.
+ """
+ line = self.token_re.sub(self.TokenizeMatches, line)
+ logging.debug('Tokenizer::Tokenize returns: %s' % line)
+ return line
+
+ def DeTokenize(self, line):
+ """Replaces tokens with the original string.
+
+ Args:
+ line: A line with tokens.
+
+ Returns:
+ line with any tokens replaced by the original string.
+ """
+
+ # Put all of the comments back in by their comment token.
+ for i, original in enumerate(self.originals):
+ token = '%s%s_%s%s' % (TOKEN_DELIMITER, self.token_string, i + 1,
+ TOKEN_DELIMITER)
+ line = line.replace(token, original)
+ logging.debug('Tokenizer::DeTokenize i:%s w/%s' % (i, token))
+ logging.debug('Tokenizer::DeTokenize returns: %s' % line)
+ return line
+
+ def TokenizeMatches(self, m):
+ """Replaces matches with tokens and stores the originals.
+
+ Args:
+ m: A match object.
+
+ Returns:
+ A string token which replaces the CSS comment.
+ """
+ logging.debug('Tokenizer::TokenizeMatches %s' % m.group(1))
+ self.originals.append(m.group(1))
+ return '%s%s_%s%s' % (TOKEN_DELIMITER,
+ self.token_string,
+ len(self.originals),
+ TOKEN_DELIMITER)
+
+
+def FixBodyDirectionLtrAndRtl(line):
+ """Replaces ltr with rtl and vice versa ONLY in the body direction.
+
+ Args:
+ line: A string to replace instances of ltr with rtl.
+ Returns:
+ line with direction: ltr and direction: rtl swapped only in body selector.
+ line = FixBodyDirectionLtrAndRtl('body { direction:ltr }')
+ line will now be 'body { direction:rtl }'.
+ """
+
+ line = BODY_DIRECTION_LTR_RE.sub('\\1\\2\\3%s' % TMP_TOKEN, line)
+ line = BODY_DIRECTION_RTL_RE.sub('\\1\\2\\3%s' % LTR, line)
+ line = line.replace(TMP_TOKEN, RTL)
+ logging.debug('FixBodyDirectionLtrAndRtl returns: %s' % line)
+ return line
+
+
+def FixLeftAndRight(line):
+ """Replaces left with right and vice versa in line.
+
+ Args:
+ line: A string in which to perform the replacement.
+
+ Returns:
+ line with left and right swapped. For example:
+ line = FixLeftAndRight('padding-left: 2px; margin-right: 1px;')
+ line will now be 'padding-right: 2px; margin-left: 1px;'.
+ """
+
+ line = LEFT_RE.sub(TMP_TOKEN, line)
+ line = RIGHT_RE.sub(LEFT, line)
+ line = line.replace(TMP_TOKEN, RIGHT)
+ logging.debug('FixLeftAndRight returns: %s' % line)
+ return line
+
+
+def FixLeftAndRightInUrl(line):
+ """Replaces left with right and vice versa ONLY within background urls.
+
+ Args:
+ line: A string in which to replace left with right and vice versa.
+
+ Returns:
+ line with left and right swapped in the url string. For example:
+ line = FixLeftAndRightInUrl('background:url(right.png)')
+ line will now be 'background:url(left.png)'.
+ """
+
+ line = LEFT_IN_URL_RE.sub(TMP_TOKEN, line)
+ line = RIGHT_IN_URL_RE.sub(LEFT, line)
+ line = line.replace(TMP_TOKEN, RIGHT)
+ logging.debug('FixLeftAndRightInUrl returns: %s' % line)
+ return line
+
+
+def FixLtrAndRtlInUrl(line):
+ """Replaces ltr with rtl and vice versa ONLY within background urls.
+
+ Args:
+ line: A string in which to replace ltr with rtl and vice versa.
+
+ Returns:
+ line with left and right swapped. For example:
+ line = FixLtrAndRtlInUrl('background:url(rtl.png)')
+ line will now be 'background:url(ltr.png)'.
+ """
+
+ line = LTR_IN_URL_RE.sub(TMP_TOKEN, line)
+ line = RTL_IN_URL_RE.sub(LTR, line)
+ line = line.replace(TMP_TOKEN, RTL)
+ logging.debug('FixLtrAndRtlInUrl returns: %s' % line)
+ return line
+
+
+def FixCursorProperties(line):
+ """Fixes directional CSS cursor properties.
+
+ Args:
+ line: A string to fix CSS cursor properties in.
+
+ Returns:
+ line reformatted with the cursor properties substituted. For example:
+ line = FixCursorProperties('cursor: ne-resize')
+ line will now be 'cursor: nw-resize'.
+ """
+
+ line = CURSOR_EAST_RE.sub('\\1' + TMP_TOKEN, line)
+ line = CURSOR_WEST_RE.sub('\\1e-resize', line)
+ line = line.replace(TMP_TOKEN, 'w-resize')
+ logging.debug('FixCursorProperties returns: %s' % line)
+ return line
+
+
+def FixFourPartNotation(line):
+ """Fixes the second and fourth positions in 4 part CSS notation.
+
+ Args:
+ line: A string to fix 4 part CSS notation in.
+
+ Returns:
+ line reformatted with the 4 part notations swapped. For example:
+ line = FixFourPartNotation('padding: 1px 2px 3px 4px')
+ line will now be 'padding: 1px 4px 3px 2px'.
+ """
+ line = FOUR_NOTATION_QUANTITY_RE.sub('\\1 \\4 \\3 \\2', line)
+ line = FOUR_NOTATION_COLOR_RE.sub('\\1\\2 \\5 \\4 \\3', line)
+ logging.debug('FixFourPartNotation returns: %s' % line)
+ return line
+
+
+def FixBackgroundPosition(line):
+ """Fixes horizontal background percentage values in line.
+
+ Args:
+ line: A string to fix horizontal background position values in.
+
+ Returns:
+ line reformatted with the 4 part notations swapped.
+ """
+ line = BG_HORIZONTAL_PERCENTAGE_RE.sub(CalculateNewBackgroundPosition, line)
+ line = BG_HORIZONTAL_PERCENTAGE_X_RE.sub(CalculateNewBackgroundPositionX,
+ line)
+ logging.debug('FixBackgroundPosition returns: %s' % line)
+ return line
+
+
+def CalculateNewBackgroundPosition(m):
+ """Fixes horizontal background-position percentages.
+
+ This function should be used as an argument to re.sub since it needs to
+ perform replacement specific calculations.
+
+ Args:
+ m: A match object.
+
+ Returns:
+ A string with the horizontal background position percentage fixed.
+ BG_HORIZONTAL_PERCENTAGE_RE.sub(FixBackgroundPosition,
+ 'background-position: 75% 50%')
+ will return 'background-position: 25% 50%'.
+ """
+
+ # The flipped value is the offset from 100%
+ new_x = str(100-int(m.group(4)))
+
+ # Since m.group(1) may very well be None type and we need a string..
+ if m.group(1):
+ position_string = m.group(1)
+ else:
+ position_string = ''
+
+ return 'background%s%s%s%s%%%s' % (position_string, m.group(2), m.group(3),
+ new_x, m.group(5))
+
+
+def CalculateNewBackgroundPositionX(m):
+ """Fixes percent based background-position-x.
+
+ This function should be used as an argument to re.sub since it needs to
+ perform replacement specific calculations.
+
+ Args:
+ m: A match object.
+
+ Returns:
+ A string with the background-position-x percentage fixed.
+ BG_HORIZONTAL_PERCENTAGE_X_RE.sub(CalculateNewBackgroundPosition,
+ 'background-position-x: 75%')
+ will return 'background-position-x: 25%'.
+ """
+
+ # The flipped value is the offset from 100%
+ new_x = str(100-int(m.group(2)))
+
+ return 'background-position-x%s%s%%' % (m.group(1), new_x)
+
+
+def ChangeLeftToRightToLeft(lines,
+ swap_ltr_rtl_in_url=None,
+ swap_left_right_in_url=None):
+ """Turns lines into a stream and runs the fixing functions against it.
+
+ Args:
+ lines: An list of CSS lines.
+ swap_ltr_rtl_in_url: Overrides this flag if param is set.
+ swap_left_right_in_url: Overrides this flag if param is set.
+
+ Returns:
+ The same lines, but with left and right fixes.
+ """
+
+ global FLAGS
+
+ # Possibly override flags with params.
+ logging.debug('ChangeLeftToRightToLeft swap_ltr_rtl_in_url=%s, '
+ 'swap_left_right_in_url=%s' % (swap_ltr_rtl_in_url,
+ swap_left_right_in_url))
+ if swap_ltr_rtl_in_url is None:
+ swap_ltr_rtl_in_url = FLAGS['swap_ltr_rtl_in_url']
+ if swap_left_right_in_url is None:
+ swap_left_right_in_url = FLAGS['swap_left_right_in_url']
+
+ # Turns the array of lines into a single line stream.
+ logging.debug('LINES COUNT: %s' % len(lines))
+ line = TOKEN_LINES.join(lines)
+
+ # Tokenize any single line rules with the /* noflip */ annotation.
+ noflip_single_tokenizer = Tokenizer(NOFLIP_SINGLE_RE, 'NOFLIP_SINGLE')
+ line = noflip_single_tokenizer.Tokenize(line)
+
+ # Tokenize any class rules with the /* noflip */ annotation.
+ noflip_class_tokenizer = Tokenizer(NOFLIP_CLASS_RE, 'NOFLIP_CLASS')
+ line = noflip_class_tokenizer.Tokenize(line)
+
+ # Tokenize the comments so we can preserve them through the changes.
+ comment_tokenizer = Tokenizer(COMMENT_RE, 'C')
+ line = comment_tokenizer.Tokenize(line)
+
+ # Here starteth the various left/right orientation fixes.
+ line = FixBodyDirectionLtrAndRtl(line)
+
+ if swap_left_right_in_url:
+ line = FixLeftAndRightInUrl(line)
+
+ if swap_ltr_rtl_in_url:
+ line = FixLtrAndRtlInUrl(line)
+
+ line = FixLeftAndRight(line)
+ line = FixCursorProperties(line)
+ line = FixFourPartNotation(line)
+ line = FixBackgroundPosition(line)
+
+ # DeTokenize the single line noflips.
+ line = noflip_single_tokenizer.DeTokenize(line)
+
+ # DeTokenize the class-level noflips.
+ line = noflip_class_tokenizer.DeTokenize(line)
+
+ # DeTokenize the comments.
+ line = comment_tokenizer.DeTokenize(line)
+
+ # Rejoin the lines back together.
+ lines = line.split(TOKEN_LINES)
+
+ return lines
+
+def usage():
+ """Prints out usage information."""
+
+ print 'Usage:'
+ print ' ./cssjanus.py < file.css > file-rtl.css'
+ print 'Flags:'
+ print ' --swap_left_right_in_url: Fixes "left"/"right" string within urls.'
+ print ' Ex: ./cssjanus.py --swap_left_right_in_url < file.css > file_rtl.css'
+ print ' --swap_ltr_rtl_in_url: Fixes "ltr"/"rtl" string within urls.'
+ print ' Ex: ./cssjanus --swap_ltr_rtl_in_url < file.css > file_rtl.css'
+
+def setflags(opts):
+ """Parse the passed in command line arguments and set the FLAGS global.
+
+ Args:
+ opts: getopt iterable intercepted from argv.
+ """
+
+ global FLAGS
+
+ # Parse the arguments.
+ for opt, arg in opts:
+ logging.debug('opt: %s, arg: %s' % (opt, arg))
+ if opt in ("-h", "--help"):
+ usage()
+ sys.exit()
+ elif opt in ("-d", "--debug"):
+ logging.getLogger().setLevel(logging.DEBUG)
+ elif opt == '--swap_ltr_rtl_in_url':
+ FLAGS['swap_ltr_rtl_in_url'] = True
+ elif opt == '--swap_left_right_in_url':
+ FLAGS['swap_left_right_in_url'] = True
+
+
+def main(argv):
+ """Sends stdin lines to ChangeLeftToRightToLeft and writes to stdout."""
+
+ # Define the flags.
+ try:
+ opts, args = getopt.getopt(argv, 'hd', ['help', 'debug',
+ 'swap_left_right_in_url',
+ 'swap_ltr_rtl_in_url'])
+ except getopt.GetoptError:
+ usage()
+ sys.exit(2)
+
+ # Parse and set the flags.
+ setflags(opts)
+
+ # Call the main routine with all our functionality.
+ fixed_lines = ChangeLeftToRightToLeft(sys.stdin.readlines())
+ sys.stdout.write(''.join(fixed_lines))
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/maintenance/cssjanus/csslex.py b/maintenance/cssjanus/csslex.py
new file mode 100644
index 00000000..1fc7304e
--- /dev/null
+++ b/maintenance/cssjanus/csslex.py
@@ -0,0 +1,114 @@
+#!/usr/bin/python
+#
+# Copyright 2007 Google Inc. All Rights Reserved.
+
+"""CSS Lexical Grammar rules.
+
+CSS lexical grammar from http://www.w3.org/TR/CSS21/grammar.html
+"""
+
+__author__ = ['elsigh@google.com (Lindsey Simon)',
+ 'msamuel@google.com (Mike Samuel)']
+
+# public symbols
+__all__ = [ "NEWLINE", "HEX", "NON_ASCII", "UNICODE", "ESCAPE", "NMSTART", "NMCHAR", "STRING1", "STRING2", "IDENT", "NAME", "HASH", "NUM", "STRING", "URL", "SPACE", "WHITESPACE", "COMMENT", "QUANTITY", "PUNC" ]
+
+# The comments below are mostly copied verbatim from the grammar.
+
+# "@import" {return IMPORT_SYM;}
+# "@page" {return PAGE_SYM;}
+# "@media" {return MEDIA_SYM;}
+# "@charset" {return CHARSET_SYM;}
+KEYWORD = r'(?:\@(?:import|page|media|charset))'
+
+# nl \n|\r\n|\r|\f ; a newline
+NEWLINE = r'\n|\r\n|\r|\f'
+
+# h [0-9a-f] ; a hexadecimal digit
+HEX = r'[0-9a-f]'
+
+# nonascii [\200-\377]
+NON_ASCII = r'[\200-\377]'
+
+# unicode \\{h}{1,6}(\r\n|[ \t\r\n\f])?
+UNICODE = r'(?:(?:\\' + HEX + r'{1,6})(?:\r\n|[ \t\r\n\f])?)'
+
+# escape {unicode}|\\[^\r\n\f0-9a-f]
+ESCAPE = r'(?:' + UNICODE + r'|\\[^\r\n\f0-9a-f])'
+
+# nmstart [_a-z]|{nonascii}|{escape}
+NMSTART = r'(?:[_a-z]|' + NON_ASCII + r'|' + ESCAPE + r')'
+
+# nmchar [_a-z0-9-]|{nonascii}|{escape}
+NMCHAR = r'(?:[_a-z0-9-]|' + NON_ASCII + r'|' + ESCAPE + r')'
+
+# ident -?{nmstart}{nmchar}*
+IDENT = r'-?' + NMSTART + NMCHAR + '*'
+
+# name {nmchar}+
+NAME = NMCHAR + r'+'
+
+# hash
+HASH = r'#' + NAME
+
+# string1 \"([^\n\r\f\\"]|\\{nl}|{escape})*\" ; "string"
+STRING1 = r'"(?:[^\"\\]|\\.)*"'
+
+# string2 \'([^\n\r\f\\']|\\{nl}|{escape})*\' ; 'string'
+STRING2 = r"'(?:[^\'\\]|\\.)*'"
+
+# string {string1}|{string2}
+STRING = '(?:' + STRING1 + r'|' + STRING2 + ')'
+
+# num [0-9]+|[0-9]*"."[0-9]+
+NUM = r'(?:[0-9]*\.[0-9]+|[0-9]+)'
+
+# s [ \t\r\n\f]
+SPACE = r'[ \t\r\n\f]'
+
+# w {s}*
+WHITESPACE = '(?:' + SPACE + r'*)'
+
+# url special chars
+URL_SPECIAL_CHARS = r'[!#$%&*-~]'
+
+# url chars ({url_special_chars}|{nonascii}|{escape})*
+URL_CHARS = r'(?:%s|%s|%s)*' % (URL_SPECIAL_CHARS, NON_ASCII, ESCAPE)
+
+# url
+URL = r'url\(%s(%s|%s)%s\)' % (WHITESPACE, STRING, URL_CHARS, WHITESPACE)
+
+# comments
+# see http://www.w3.org/TR/CSS21/grammar.html
+COMMENT = r'/\*[^*]*\*+([^/*][^*]*\*+)*/'
+
+# {E}{M} {return EMS;}
+# {E}{X} {return EXS;}
+# {P}{X} {return LENGTH;}
+# {C}{M} {return LENGTH;}
+# {M}{M} {return LENGTH;}
+# {I}{N} {return LENGTH;}
+# {P}{T} {return LENGTH;}
+# {P}{C} {return LENGTH;}
+# {D}{E}{G} {return ANGLE;}
+# {R}{A}{D} {return ANGLE;}
+# {G}{R}{A}{D} {return ANGLE;}
+# {M}{S} {return TIME;}
+# {S} {return TIME;}
+# {H}{Z} {return FREQ;}
+# {K}{H}{Z} {return FREQ;}
+# % {return PERCENTAGE;}
+UNIT = r'(?:em|ex|px|cm|mm|in|pt|pc|deg|rad|grad|ms|s|hz|khz|%)'
+
+# {num}{UNIT|IDENT} {return NUMBER;}
+QUANTITY = '%s(?:%s%s|%s)?' % (NUM, WHITESPACE, UNIT, IDENT)
+
+# "<!--" {return CDO;}
+# "-->" {return CDC;}
+# "~=" {return INCLUDES;}
+# "|=" {return DASHMATCH;}
+# {w}"{" {return LBRACE;}
+# {w}"+" {return PLUS;}
+# {w}">" {return GREATER;}
+# {w}"," {return COMMA;}
+PUNC = r'<!--|-->|~=|\|=|[\{\+>,:;]'
diff --git a/maintenance/deleteArchivedFiles.inc b/maintenance/deleteArchivedFiles.inc
new file mode 100644
index 00000000..e0ac225e
--- /dev/null
+++ b/maintenance/deleteArchivedFiles.inc
@@ -0,0 +1,62 @@
+<?php
+/**
+ * Core functions for deleteArchivedFiles.php
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @ingroup Maintenance
+ */
+
+class DeleteArchivedFilesImplementation {
+ static public function doDelete( $output, $force ) {
+ # Data should come off the master, wrapped in a transaction
+ $dbw = wfGetDB( DB_MASTER );
+ $dbw->begin();
+ $tbl_arch = $dbw->tableName( 'filearchive' );
+ $repo = RepoGroup::singleton()->getLocalRepo();
+ # Get "active" revisions from the filearchive table
+ $output->handleOutput( "Searching for and deleting archived files...\n" );
+ $res = $dbw->query( "SELECT fa_id,fa_storage_group,fa_storage_key FROM $tbl_arch" );
+ $count = 0;
+ foreach ( $res as $row ) {
+ $key = $row->fa_storage_key;
+ $group = $row->fa_storage_group;
+ $id = $row->fa_id;
+ $path = $repo->getZonePath( 'deleted' ) . '/' . $repo->getDeletedHashPath( $key ) . $key;
+ $sha1 = substr( $key, 0, strcspn( $key, '.' ) );
+ // Check if the file is used anywhere...
+ $inuse = $dbw->selectField( 'oldimage', '1',
+ array( 'oi_sha1' => $sha1,
+ 'oi_deleted & ' . File::DELETED_FILE => File::DELETED_FILE ),
+ __METHOD__,
+ array( 'FOR UPDATE' )
+ );
+ if ( $path && file_exists( $path ) && !$inuse ) {
+ unlink( $path ); // delete
+ $count++;
+ $dbw->query( "DELETE FROM $tbl_arch WHERE fa_id = $id" );
+ } else {
+ $output->handleOutput( "Notice - file '$key' not found in group '$group'\n" );
+ if ( $force ) {
+ $output->handleOutput( "Got --force, deleting DB entry\n" );
+ $dbw->query( "DELETE FROM $tbl_arch WHERE fa_id = $id" );
+ }
+ }
+ }
+ $dbw->commit();
+ $output->handleOutput( "Done! [$count file(s)]\n" );
+ }
+} \ No newline at end of file
diff --git a/maintenance/deleteArchivedFiles.php b/maintenance/deleteArchivedFiles.php
index af4bbb74..6067c807 100644
--- a/maintenance/deleteArchivedFiles.php
+++ b/maintenance/deleteArchivedFiles.php
@@ -23,7 +23,8 @@
* Based on deleteOldRevisions.php by Rob Church
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/deleteArchivedFiles.inc' );
class DeleteArchivedFiles extends Maintenance {
public function __construct() {
@@ -33,50 +34,19 @@ class DeleteArchivedFiles extends Maintenance {
$this->addOption( 'force', 'Force deletion of rows from filearchive' );
}
+ public function handleOutput( $str ) {
+ return $this->output( $str );
+ }
+
public function execute() {
- if( !$this->hasOption('delete') ) {
+ if ( !$this->hasOption( 'delete' ) ) {
$this->output( "Use --delete to actually confirm this script\n" );
return;
}
$force = $this->hasOption( 'force' );
- # Data should come off the master, wrapped in a transaction
- $dbw = wfGetDB( DB_MASTER );
- $dbw->begin();
- $tbl_arch = $dbw->tableName( 'filearchive' );
- $repo = RepoGroup::singleton()->getLocalRepo();
- # Get "active" revisions from the filearchive table
- $this->output( "Searching for and deleting archived files...\n" );
- $res = $dbw->query( "SELECT fa_id,fa_storage_group,fa_storage_key FROM $tbl_arch" );
- $count = 0;
- foreach( $res as $row ) {
- $key = $row->fa_storage_key;
- $group = $row->fa_storage_group;
- $id = $row->fa_id;
- $path = $repo->getZonePath( 'deleted' ).'/'.$repo->getDeletedHashPath($key).$key;
- $sha1 = substr( $key, 0, strcspn( $key, '.' ) );
- // Check if the file is used anywhere...
- $inuse = $dbw->selectField( 'oldimage', '1',
- array( 'oi_sha1' => $sha1,
- 'oi_deleted & '.File::DELETED_FILE => File::DELETED_FILE ),
- __METHOD__,
- array( 'FOR UPDATE' )
- );
- if ( $path && file_exists($path) && !$inuse ) {
- unlink($path); // delete
- $count++;
- $dbw->query( "DELETE FROM $tbl_arch WHERE fa_id = $id" );
- } else {
- $this->output( "Notice - file '$key' not found in group '$group'\n" );
- if ( $force ) {
- $this->output( "Got --force, deleting DB entry\n" );
- $dbw->query( "DELETE FROM $tbl_arch WHERE fa_id = $id" );
- }
- }
- }
- $dbw->commit();
- $this->output( "Done! [$count file(s)]\n" );
+ DeleteArchivedFilesImplementation::doDelete( $this, $force );
}
}
$maintClass = "DeleteArchivedFiles";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteArchivedRevisions.inc b/maintenance/deleteArchivedRevisions.inc
new file mode 100644
index 00000000..10bd4cff
--- /dev/null
+++ b/maintenance/deleteArchivedRevisions.inc
@@ -0,0 +1,57 @@
+<?php
+
+/**
+ * Delete archived (deleted from public) revisions from the database
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @ingroup Maintenance
+ */
+
+class DeleteArchivedRevisionsImplementation {
+
+ /**
+ * Perform the delete on archived revisions.
+
+ * @param $maint Object An object (typically of class Maintenance)
+ * that implements two methods: handleOutput() and
+ * purgeRedundantText(). See Maintenance for a description of
+ * those methods.
+ */
+ static public function doDelete( $maint ) {
+ $dbw = wfGetDB( DB_MASTER );
+
+ $dbw->begin();
+
+ $tbl_arch = $dbw->tableName( 'archive' );
+
+ # Delete as appropriate
+ $maint->handleOutput( "Deleting archived revisions... " );
+ $dbw->query( "DELETE FROM $tbl_arch" );
+
+ $count = $dbw->affectedRows();
+ $deletedRows = $count != 0;
+
+ $maint->handleOutput( "done. $count revisions deleted.\n" );
+
+ # This bit's done
+ # Purge redundant text records
+ $dbw->commit();
+ if ( $deletedRows ) {
+ $maint->purgeRedundantText( true );
+ }
+ }
+} \ No newline at end of file
diff --git a/maintenance/deleteArchivedRevisions.php b/maintenance/deleteArchivedRevisions.php
index c3f8bf11..0faa0abb 100644
--- a/maintenance/deleteArchivedRevisions.php
+++ b/maintenance/deleteArchivedRevisions.php
@@ -23,7 +23,8 @@
* Shamelessly stolen from deleteOldRevisions.php by Rob Church :)
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/deleteArchivedRevisions.inc' );
class DeleteArchivedRevisions extends Maintenance {
public function __construct() {
@@ -32,31 +33,17 @@ class DeleteArchivedRevisions extends Maintenance {
$this->addOption( 'delete', 'Performs the deletion' );
}
+ public function handleOutput( $str ) {
+ $this->output( $str );
+ }
+
public function execute() {
$this->output( "Delete archived revisions\n\n" );
# Data should come off the master, wrapped in a transaction
- $dbw = wfGetDB( DB_MASTER );
- if( $this->hasOption('delete') ) {
- $dbw->begin();
-
- $tbl_arch = $dbw->tableName( 'archive' );
-
- # Delete as appropriate
- $this->output( "Deleting archived revisions... " );
- $dbw->query( "TRUNCATE TABLE $tbl_arch" );
-
- $count = $dbw->affectedRows();
- $deletedRows = $count != 0;
-
- $this->output( "done. $count revisions deleted.\n" );
-
- # This bit's done
- # Purge redundant text records
- $dbw->commit();
- if( $deletedRows ) {
- $this->purgeRedundantText( true );
- }
+ if ( $this->hasOption( 'delete' ) ) {
+ DeleteArchivedRevisionsImplementation::doDelete( $this );
} else {
+ $dbw = wfGetDB( DB_MASTER );
$res = $dbw->selectRow( 'archive', 'COUNT(*) as count', array(), __FUNCTION__ );
$this->output( "Found {$res->count} revisions to delete.\n" );
$this->output( "Please run the script again with the --delete option to really delete the revisions.\n" );
@@ -65,4 +52,4 @@ class DeleteArchivedRevisions extends Maintenance {
}
$maintClass = "DeleteArchivedRevisions";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteBatch.php b/maintenance/deleteBatch.php
index 56afd86c..c8bb4803 100644
--- a/maintenance/deleteBatch.php
+++ b/maintenance/deleteBatch.php
@@ -26,39 +26,40 @@
*
* @ingroup Maintenance
*/
-
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DeleteBatch extends Maintenance {
-
+
public function __construct() {
parent::__construct();
$this->mDescription = "Deletes a batch of pages";
$this->addOption( 'u', "User to perform deletion", false, true );
$this->addOption( 'r', "Reason to delete page", false, true );
$this->addOption( 'i', "Interval to sleep between deletions" );
- $this->addArg( 'listfile', 'File with titles to delete, separated by newlines', false );
+ $this->addArg( 'listfile', 'File with titles to delete, separated by newlines. ' .
+ 'If not given, stdin will be used.', false );
}
-
+
public function execute() {
global $wgUser;
# Change to current working directory
$oldCwd = getcwd();
chdir( $oldCwd );
-
+
# Options processing
$user = $this->getOption( 'u', 'Delete page script' );
$reason = $this->getOption( 'r', '' );
$interval = $this->getOption( 'i', 0 );
- if( $this->hasArg() ) {
+ if ( $this->hasArg() ) {
$file = fopen( $this->getArg(), 'r' );
} else {
$file = $this->getStdin();
}
# Setup
- if( !$file ) {
+ if ( !$file ) {
$this->error( "Unable to read file, exiting", true );
}
$wgUser = User::newFromName( $user );
@@ -75,18 +76,18 @@ class DeleteBatch extends Maintenance {
$this->output( "Invalid title '$line' on line $linenum\n" );
continue;
}
- if( !$page->exists() ) {
+ if ( !$page->exists() ) {
$this->output( "Skipping nonexistent page '$line'\n" );
continue;
}
-
-
+
+
$this->output( $page->getPrefixedText() );
$dbw->begin();
- if( $page->getNamespace() == NS_FILE ) {
+ if ( $page->getNamespace() == NS_FILE ) {
$art = new ImagePage( $page );
$img = wfFindFile( $art->mTitle );
- if( !$img || !$img->delete( $reason ) ) {
+ if ( !$img || !$img->delete( $reason ) ) {
$this->output( "FAILED to delete image file... " );
}
} else {
@@ -99,7 +100,7 @@ class DeleteBatch extends Maintenance {
} else {
$this->output( " FAILED to delete article\n" );
}
-
+
if ( $interval ) {
sleep( $interval );
}
@@ -109,4 +110,4 @@ class DeleteBatch extends Maintenance {
}
$maintClass = "DeleteBatch";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteDefaultMessages.php b/maintenance/deleteDefaultMessages.php
index 3f0e1b1c..fc482ac0 100644
--- a/maintenance/deleteDefaultMessages.php
+++ b/maintenance/deleteDefaultMessages.php
@@ -1,6 +1,6 @@
<?php
/**
- * Deletes all pages in the MediaWiki namespace which were last edited by
+ * Deletes all pages in the MediaWiki namespace which were last edited by
* "MediaWiki default".
*
* This program is free software; you can redistribute it and/or modify
@@ -21,7 +21,7 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DeleteDefaultMessages extends Maintenance {
public function __construct() {
@@ -31,17 +31,11 @@ class DeleteDefaultMessages extends Maintenance {
}
public function execute() {
- self::reallyExecute();
- }
-
- public static function reallyExecute() {
+
$user = 'MediaWiki default';
$reason = 'No longer required';
- global $wgUser;
- $wgUser = User::newFromName( $user );
- $wgUser->addGroup( 'bot' );
-
+ $this->output( "Checking existence of old default messages..." );
$dbr = wfGetDB( DB_SLAVE );
$res = $dbr->select( array( 'page', 'revision' ),
array( 'page_namespace', 'page_title' ),
@@ -52,6 +46,20 @@ class DeleteDefaultMessages extends Maintenance {
)
);
+ if( $dbr->numRows( $res ) == 0 ) {
+ # No more messages left
+ $this->output( "done.\n" );
+ return;
+ }
+
+ # Deletions will be made by $user temporarly added to the bot group
+ # in order to hide it in RecentChanges.
+ global $wgUser;
+ $wgUser = User::newFromName( $user );
+ $wgUser->addGroup( 'bot' );
+
+ # Handle deletion
+ $this->output( "\n...deleting old default messages (this may take a long time!)...", 'msg' );
$dbw = wfGetDB( DB_MASTER );
foreach ( $res as $row ) {
@@ -65,8 +73,10 @@ class DeleteDefaultMessages extends Maintenance {
$article->doDeleteArticle( $reason );
$dbw->commit();
}
+
+ $this->output( 'done!', 'msg' );
}
}
$maintClass = "DeleteDefaultMessages";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteImageMemcached.php b/maintenance/deleteImageMemcached.php
index 9becddb8..007f0d17 100644
--- a/maintenance/deleteImageMemcached.php
+++ b/maintenance/deleteImageMemcached.php
@@ -23,7 +23,7 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DeleteImageCache extends Maintenance {
public function __construct() {
@@ -36,8 +36,8 @@ class DeleteImageCache extends Maintenance {
public function execute() {
global $wgMemc;
- $until = preg_replace( "/[^\d]/", '', $this->getOption('until') );
- $sleep = (int)$this->getOption('sleep') * 1000; // milliseconds
+ $until = preg_replace( "/[^\d]/", '', $this->getOption( 'until' ) );
+ $sleep = (int)$this->getOption( 'sleep' ) * 1000; // milliseconds
ini_set( 'display_errors', false );
@@ -53,12 +53,12 @@ class DeleteImageCache extends Maintenance {
$total = $this->getImageCount();
foreach ( $res as $row ) {
- if ($i % $this->report == 0)
- $this->output( sprintf("%s: %13s done (%s)\n", wfWikiID(), "$i/$total", wfPercent( $i / $total * 100 ) ) );
+ if ( $i % $this->report == 0 )
+ $this->output( sprintf( "%s: %13s done (%s)\n", wfWikiID(), "$i/$total", wfPercent( $i / $total * 100 ) ) );
$md5 = md5( $row->img_name );
$wgMemc->delete( wfMemcKey( 'Image', $md5 ) );
- if ($sleep != 0)
+ if ( $sleep != 0 )
usleep( $sleep );
++$i;
@@ -72,4 +72,4 @@ class DeleteImageCache extends Maintenance {
}
$maintClass = "DeleteImageCache";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteOldRevisions.php b/maintenance/deleteOldRevisions.php
index 1f4dc4c9..ba76e9e9 100644
--- a/maintenance/deleteOldRevisions.php
+++ b/maintenance/deleteOldRevisions.php
@@ -22,7 +22,7 @@
* @author Rob Church <robchur@gmail.com>
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DeleteOldRevisions extends Maintenance {
public function __construct() {
@@ -31,24 +31,24 @@ class DeleteOldRevisions extends Maintenance {
$this->addOption( 'delete', 'Actually perform the deletion' );
$this->addOption( 'page_id', 'List of page ids to work on', false );
}
-
+
public function execute() {
$this->output( "Delete old revisions\n\n" );
$this->doDelete( $this->hasOption( 'delete' ), $this->mArgs );
}
-
+
function doDelete( $delete = false, $args = array() ) {
# Data should come off the master, wrapped in a transaction
$dbw = wfGetDB( DB_MASTER );
$dbw->begin();
-
+
$tbl_pag = $dbw->tableName( 'page' );
$tbl_rev = $dbw->tableName( 'revision' );
-
+
$pageIdClause = '';
$revPageClause = '';
-
+
# If a list of page_ids was provided, limit results to that set of page_ids
if ( sizeof( $args ) > 0 ) {
$pageIdList = implode( ',', $args );
@@ -56,46 +56,46 @@ class DeleteOldRevisions extends Maintenance {
$revPageClause = " AND rev_page IN ({$pageIdList})";
$this->output( "Limiting to {$tbl_pag}.page_id IN ({$pageIdList})\n" );
}
-
+
# Get "active" revisions from the page table
$this->output( "Searching for active revisions..." );
$res = $dbw->query( "SELECT page_latest FROM $tbl_pag{$pageIdClause}" );
- foreach( $res as $row ) {
+ foreach ( $res as $row ) {
$cur[] = $row->page_latest;
}
$this->output( "done.\n" );
-
+
# Get all revisions that aren't in this set
$old = array();
$this->output( "Searching for inactive revisions..." );
$set = implode( ', ', $cur );
$res = $dbw->query( "SELECT rev_id FROM $tbl_rev WHERE rev_id NOT IN ( $set ){$revPageClause}" );
- foreach( $res as $row ) {
+ foreach ( $res as $row ) {
$old[] = $row->rev_id;
}
$this->output( "done.\n" );
-
+
# Inform the user of what we're going to do
$count = count( $old );
$this->output( "$count old revisions found.\n" );
-
+
# Delete as appropriate
- if( $delete && $count ) {
+ if ( $delete && $count ) {
$this->output( "Deleting..." );
$set = implode( ', ', $old );
$dbw->query( "DELETE FROM $tbl_rev WHERE rev_id IN ( $set )" );
$this->output( "done.\n" );
}
-
+
# This bit's done
# Purge redundant text records
$dbw->commit();
- if( $delete ) {
+ if ( $delete ) {
$this->purgeRedundantText( true );
}
}
}
$maintClass = "DeleteOldRevisions";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteOrphanedRevisions.php b/maintenance/deleteOrphanedRevisions.php
index 1146befb..e972d1fa 100644
--- a/maintenance/deleteOrphanedRevisions.php
+++ b/maintenance/deleteOrphanedRevisions.php
@@ -24,7 +24,7 @@
* @todo More efficient cleanup of text records
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DeleteOrphanedRevisions extends Maintenance {
public function __construct() {
@@ -36,7 +36,7 @@ class DeleteOrphanedRevisions extends Maintenance {
public function execute() {
$this->output( "Delete Orphaned Revisions\n" );
- $report = $this->hasOption('report');
+ $report = $this->hasOption( 'report' );
$dbw = wfGetDB( DB_MASTER );
$dbw->begin();
@@ -46,45 +46,44 @@ class DeleteOrphanedRevisions extends Maintenance {
$this->output( "Checking for orphaned revisions..." );
$sql = "SELECT rev_id FROM {$revision} LEFT JOIN {$page} ON rev_page = page_id WHERE page_namespace IS NULL";
$res = $dbw->query( $sql, 'deleteOrphanedRevisions' );
-
+
# Stash 'em all up for deletion (if needed)
$revisions = array();
- foreach( $res as $row )
+ foreach ( $res as $row )
$revisions[] = $row->rev_id;
- $dbw->freeResult( $res );
$count = count( $revisions );
$this->output( "found {$count}.\n" );
-
+
# Nothing to do?
- if( $report || $count == 0 ) {
+ if ( $report || $count == 0 ) {
$dbw->commit();
- exit(0);
+ exit( 0 );
}
-
+
# Delete each revision
$this->output( "Deleting..." );
$this->deleteRevs( $revisions, $dbw );
$this->output( "done.\n" );
-
+
# Close the transaction and call the script to purge unused text records
$dbw->commit();
$this->purgeRedundantText( true );
}
-
+
/**
* Delete one or more revisions from the database
* Do this inside a transaction
*
* @param $id Array of revision id values
- * @param $db Database class (needs to be a master)
+ * @param $dbw Database class (needs to be a master)
*/
private function deleteRevs( $id, &$dbw ) {
- if( !is_array( $id ) )
+ if ( !is_array( $id ) )
$id = array( $id );
$dbw->delete( 'revision', array( 'rev_id' => $id ), __METHOD__ );
}
}
$maintClass = "DeleteOrphanedRevisions";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteRevision.php b/maintenance/deleteRevision.php
index 5dc0b59f..5e8ecaac 100644
--- a/maintenance/deleteRevision.php
+++ b/maintenance/deleteRevision.php
@@ -20,24 +20,24 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DeleteRevision extends Maintenance {
-
+
public function __construct() {
parent::__construct();
$this->mDescription = "Delete one or more revisions by moving them to the archive table";
}
-
+
public function execute() {
- if( count( $this->mArgs ) == 0 ) {
+ if ( count( $this->mArgs ) == 0 ) {
$this->error( "No revisions specified", true );
}
- $this->output( "Deleting revision(s) " . implode( ',', $this->mArgs ) .
+ $this->output( "Deleting revision(s) " . implode( ',', $this->mArgs ) .
" from " . wfWikiID() . "...\n" );
$dbw = wfGetDB( DB_MASTER );
-
+
$affected = 0;
foreach ( $this->mArgs as $revID ) {
$dbw->insertSelect( 'archive', array( 'page', 'revision' ),
@@ -78,4 +78,4 @@ class DeleteRevision extends Maintenance {
}
$maintClass = "DeleteRevision";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/deleteSelfExternals.php b/maintenance/deleteSelfExternals.php
index 1ab2839e..db23e92c 100644
--- a/maintenance/deleteSelfExternals.php
+++ b/maintenance/deleteSelfExternals.php
@@ -2,7 +2,7 @@
/**
* We want to make this whole thing as seamless as possible to the
* end-user. Unfortunately, we can't do _all_ of the work in the class
- * because A) included files are not in global scope, but in the scope
+ * because A) included files are not in global scope, but in the scope
* of their caller, and B) MediaWiki has way too many globals. So instead
* we'll kinda fake it, and do the requires() inline. <3 PHP
*
@@ -24,7 +24,7 @@
* @ingroup Maintenance
*/
-require_once( "Maintenance.php" );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DeleteSelfExternals extends Maintenance {
@@ -33,22 +33,22 @@ class DeleteSelfExternals extends Maintenance {
$this->mDescription = 'Delete self-references to $wgServer from externallinks';
$this->mBatchSize = 1000;
}
-
+
public function execute() {
global $wgServer;
$this->output( "Deleting self externals from $wgServer\n" );
- $db = wfGetDB(DB_MASTER);
- while (1) {
+ $db = wfGetDB( DB_MASTER );
+ while ( 1 ) {
wfWaitForSlaves( 2 );
$db->commit();
- $q = $db->limitResult( "DELETE /* deleteSelfExternals */ FROM externallinks WHERE el_to"
+ $q = $db->limitResult( "DELETE /* deleteSelfExternals */ FROM externallinks WHERE el_to"
. $db->buildLike( $wgServer . '/', $db->anyString() ), $this->mBatchSize );
$this->output( "Deleting a batch\n" );
- $db->query($q);
- if (!$db->affectedRows()) exit(0);
+ $db->query( $q );
+ if ( !$db->affectedRows() ) return;
}
}
}
$maintClass = "DeleteSelfExternals";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/doMaintenance.php b/maintenance/doMaintenance.php
index 008c5b87..a9f5fae7 100644
--- a/maintenance/doMaintenance.php
+++ b/maintenance/doMaintenance.php
@@ -2,7 +2,7 @@
/**
* We want to make this whole thing as seamless as possible to the
* end-user. Unfortunately, we can't do _all_ of the work in the class
- * because A) included files are not in global scope, but in the scope
+ * because A) included files are not in global scope, but in the scope
* of their caller, and B) MediaWiki has way too many globals. So instead
* we'll kinda fake it, and do the requires() inline. <3 PHP
*
@@ -26,18 +26,21 @@
* @ingroup Maintenance
*/
-if ( !defined( 'DO_MAINTENANCE' ) ) {
+if ( !defined( 'RUN_MAINTENANCE_IF_MAIN' ) ) {
echo "This file must be included after Maintenance.php\n";
exit( 1 );
}
-if( !$maintClass || !class_exists( $maintClass ) ) {
- echo "\$maintClass is not set or is set to a non-existent class.\n";
- exit( 1 );
+// Wasn't included from the file scope, halt execution (probably wanted the class)
+// If a class is using commandLine.inc (old school maintenance), they definitely
+// cannot be included and will proceed with execution
+if( !Maintenance::shouldExecute() && $maintClass != 'CommandLineInc' ) {
+ return;
}
-if( defined( 'MW_NO_SETUP' ) ) {
- return;
+if ( !$maintClass || !class_exists( $maintClass ) ) {
+ echo "\$maintClass is not set or is set to a non-existent class.\n";
+ exit( 1 );
}
// Get an object to start us off
@@ -51,6 +54,7 @@ $maintenance->setup();
$self = $maintenance->getName();
# Setup the profiler
+global $IP;
if ( file_exists( "$IP/StartProfiler.php" ) ) {
require_once( "$IP/StartProfiler.php" );
} else {
@@ -60,10 +64,19 @@ if ( file_exists( "$IP/StartProfiler.php" ) ) {
// Some other requires
require_once( "$IP/includes/AutoLoader.php" );
require_once( "$IP/includes/Defines.php" );
+require_once( "$IP/includes/DefaultSettings.php" );
-// Load settings, using wikimedia-mode if needed
-// Fixme: replace this hack with general farm-friendly code
-if( file_exists( "$IP/wmf-config/wikimedia-mode" ) ) {
+if ( defined( 'MW_CONFIG_CALLBACK' ) ) {
+ # Use a callback function to configure MediaWiki
+ $callback = MW_CONFIG_CALLBACK;
+ # PHP 5.1 doesn't support "class::method" for call_user_func, so split it
+ if ( strpos( $callback, '::' ) !== false ) {
+ $callback = explode( '::', $callback, 2 );
+ }
+ call_user_func( $callback );
+} elseif ( file_exists( "$IP/wmf-config/wikimedia-mode" ) ) {
+ // Load settings, using wikimedia-mode if needed
+ // Fixme: replace this hack with general farm-friendly code
# TODO FIXME! Wikimedia-specific stuff needs to go away to an ext
# Maybe a hook?
global $cluster;
@@ -72,10 +85,11 @@ if( file_exists( "$IP/wmf-config/wikimedia-mode" ) ) {
require_once( "$IP/includes/SiteConfiguration.php" );
require( "$IP/wmf-config/wgConf.php" );
$maintenance->loadWikimediaSettings();
- require( $IP.'/wmf-config/CommonSettings.php' );
+ require( $IP . '/wmf-config/CommonSettings.php' );
} else {
require_once( $maintenance->loadSettings() );
}
+
if ( $maintenance->getDbType() === Maintenance::DB_ADMIN &&
is_readable( "$IP/AdminSettings.php" ) )
{
@@ -87,7 +101,7 @@ require_once( "$IP/includes/Setup.php" );
require_once( "$IP/maintenance/install-utils.inc" );
// Much much faster startup than creating a title object
-$wgTitle = null;
+$wgTitle = null;
// Do the work
try {
@@ -95,7 +109,7 @@ try {
// Potentially debug globals
$maintenance->globals();
-} catch( MWException $mwe ) {
+} catch ( MWException $mwe ) {
echo( $mwe->getText() );
exit( 1 );
}
diff --git a/maintenance/dumpBackup.php b/maintenance/dumpBackup.php
index 3f4530ed..90e8f72f 100644
--- a/maintenance/dumpBackup.php
+++ b/maintenance/dumpBackup.php
@@ -1,6 +1,9 @@
<?php
/**
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Script that dumps wiki pages or logging database into an XML interchange
+ * wrapper format for export or backup
+ *
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
@@ -26,12 +29,12 @@ $originalDir = getcwd();
$optionsWithArgs = array( 'pagelist', 'start', 'end' );
-require_once( dirname(__FILE__) . '/commandLine.inc' );
+require_once( dirname( __FILE__ ) . '/commandLine.inc' );
require_once( 'backup.inc' );
$dumper = new BackupDumper( $argv );
-if( isset( $options['quiet'] ) ) {
+if ( isset( $options['quiet'] ) ) {
$dumper->reporting = false;
}
@@ -47,10 +50,10 @@ if ( isset( $options['pagelist'] ) ) {
$dumper->pages = array_filter( $pages, create_function( '$x', 'return $x !== "";' ) );
}
-if( isset( $options['start'] ) ) {
+if ( isset( $options['start'] ) ) {
$dumper->startId = intval( $options['start'] );
}
-if( isset( $options['end'] ) ) {
+if ( isset( $options['end'] ) ) {
$dumper->endId = intval( $options['end'] );
}
$dumper->skipHeader = isset( $options['skip-header'] );
@@ -59,13 +62,13 @@ $dumper->dumpUploads = isset( $options['uploads'] );
$textMode = isset( $options['stub'] ) ? WikiExporter::STUB : WikiExporter::TEXT;
-if( isset( $options['full'] ) ) {
+if ( isset( $options['full'] ) ) {
$dumper->dump( WikiExporter::FULL, $textMode );
-} elseif( isset( $options['current'] ) ) {
+} elseif ( isset( $options['current'] ) ) {
$dumper->dump( WikiExporter::CURRENT, $textMode );
-} elseif( isset( $options['stable'] ) ) {
+} elseif ( isset( $options['stable'] ) ) {
$dumper->dump( WikiExporter::STABLE, $textMode );
-} elseif( isset( $options['logs'] ) ) {
+} elseif ( isset( $options['logs'] ) ) {
$dumper->dump( WikiExporter::LOGS );
} else {
$dumper->progress( <<<ENDS
@@ -79,11 +82,14 @@ Actions:
--full Dump all revisions of every page.
--current Dump only the latest revision of every page.
--logs Dump all log events.
+ --stable Stable versions of pages?
+ --pagelist=<file>
+ Where <file> is a list of page titles to be dumped
Options:
--quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
- (Default: 100)
+ (Default: 100)
--server=h Force reading from MySQL server h
--start=n Start from page_id or log_id n
--end=n Stop before page_id or log_id n (exclusive)
@@ -91,11 +97,14 @@ Options:
--skip-footer Don't output the </mediawiki> footer
--stub Don't perform old_text lookups; for 2-pass dump
--uploads Include upload records (experimental)
+ --conf=<file> Use the specified configuration file (LocalSettings.php)
+
+ --wiki=<wiki> Only back up the specified <wiki>
Fancy stuff: (Works? Add examples please.)
--plugin=<class>[:<file>] Load a dump plugin class
--output=<type>:<file> Begin a filtered output stream;
- <type>s: file, gzip, bzip2, 7zip
+ <type>s: file, gzip, bzip2, 7zip
--filter=<type>[:<options>] Add a filter on an output branch
ENDS
diff --git a/maintenance/dumpInterwiki.inc b/maintenance/dumpInterwiki.inc
deleted file mode 100644
index c366b08c..00000000
--- a/maintenance/dumpInterwiki.inc
+++ /dev/null
@@ -1,209 +0,0 @@
-<?php
-/**
- * Rebuild interwiki table using the file on meta and the language list
- * Wikimedia specific!
- *
- * @file
- * @todo document
- * @ingroup Maintenance
- * @ingroup Wikimedia
- */
-
-/**
- * @todo document
- * @ingroup Maintenance
- */
-class Site {
- var $suffix, $lateral, $url;
-
- function __construct( $s, $l, $u ) {
- $this->suffix = $s;
- $this->lateral = $l;
- $this->url = $u;
- }
-
- function getURL( $lang ) {
- $xlang = str_replace( '_', '-', $lang );
- return "http://$xlang.{$this->url}/wiki/\$1";
- }
-}
-
-function getRebuildInterwikiDump() {
- global $langlist, $languageAliases, $prefixRewrites;
-
- # Multi-language sites
- # db suffix => db suffix, iw prefix, hostname
- $sites = array(
- 'wiki' => new Site( 'wiki', 'w', 'wikipedia.org' ),
- 'wiktionary' => new Site( 'wiktionary', 'wikt', 'wiktionary.org' ),
- 'wikiquote' => new Site( 'wikiquote', 'q', 'wikiquote.org' ),
- 'wikibooks' => new Site( 'wikibooks', 'b', 'wikibooks.org' ),
- 'wikinews' => new Site( 'wikinews', 'n', 'wikinews.org' ),
- 'wikisource' => new Site( 'wikisource', 's', 'wikisource.org' ),
- 'wikimedia' => new Site( 'wikimedia', 'chapter', 'wikimedia.org' ),
- 'wikiversity' => new Site( 'wikiversity', 'v', 'wikiversity.org' ),
- );
-
- # List of language prefixes likely to be found in multi-language sites
- $langlist = array_map( "trim", file( "/home/wikipedia/common/langlist" ) );
-
- # List of all database names
- $dblist = array_map( "trim", file( "/home/wikipedia/common/all.dblist" ) );
-
- # Special-case databases
- $specials = array_flip(
- array_map( "trim",
- file( "/home/wikipedia/common/special.dblist" ) ) );
-
- # Extra interwiki links that can't be in the intermap for some reason
- $extraLinks = array(
- array( 'm', 'http://meta.wikimedia.org/wiki/$1', 1 ),
- array( 'meta', 'http://meta.wikimedia.org/wiki/$1', 1 ),
- array( 'sep11', 'http://sep11.wikipedia.org/wiki/$1', 1 ),
- );
-
- # Language aliases, usually configured as redirects to the real wiki in apache
- # Interlanguage links are made directly to the real wiki
- # Something horrible happens if you forget to list an alias here, I can't
- # remember what
- $languageAliases = array(
- 'zh-cn' => 'zh',
- 'zh-tw' => 'zh',
- 'dk' => 'da',
- 'nb' => 'no',
- );
-
- # Special case prefix rewrites, for the benefit of Swedish which uses s:t
- # as an abbreviation for saint
- $prefixRewrites = array(
- 'svwiki' => array ( 's' => 'src'),
- );
-
- # Construct a list of reserved prefixes
- $reserved = array();
- foreach ( $langlist as $lang ) {
- $reserved[$lang] = 1;
- }
- foreach ( $languageAliases as $alias => $lang ) {
- $reserved[$alias] = 1;
- }
- foreach( $sites as $site ) {
- $reserved[$site->lateral] = 1;
- }
-
- # Extract the intermap from meta
- $intermap = Http::get( 'http://meta.wikimedia.org/w/index.php?title=Interwiki_map&action=raw', 30 );
- $lines = array_map( 'trim', explode( "\n", trim( $intermap ) ) );
-
- if ( !$lines || count( $lines ) < 2 ) {
- wfDie( "m:Interwiki_map not found" );
- }
-
- # Global iterwiki map
- foreach ( $lines as $line ) {
- if ( preg_match( '/^\|\s*(.*?)\s*\|\|\s*(.*?)\s*$/', $line, $matches ) ) {
- $prefix = strtolower( $matches[1] );
- $url = $matches[2];
- if ( preg_match( '/(wikipedia|wiktionary|wikisource|wikiquote|wikibooks|wikimedia)\.org/', $url ) ) {
- $local = 1;
- } else {
- $local = 0;
- }
-
- if ( empty( $reserved[$prefix] ) ) {
- $imap = array( "iw_prefix" => $prefix, "iw_url" => $url, "iw_local" => $local );
- makeLink ($imap, "__global");
- }
- }
- }
-
- # Exclude Wikipedia for Wikipedia
- makeLink ( array ('iw_prefix' => 'wikipedia', 'is_url' => null ), "_wiki" );
-
- #Multilanguage sites
- foreach ($sites as $site)
- makeLanguageLinks ( $site, "_".$site->suffix );
-
-
- foreach ( $dblist as $db ) {
- if ( isset( $specials[$db] ) ) {
- # Special wiki
- # Has interwiki links and interlanguage links to wikipedia
-
- makeLink( array( 'iw_prefix' => $db, 'iw_url' => "wiki"), "__sites" );
- # Links to multilanguage sites
- foreach ( $sites as $targetSite ) {
- makeLink( array( 'iw_prefix' => $targetSite->lateral,
- 'iw_url' =>$targetSite->getURL( 'en' ),
- 'iw_local' => 1 ), $db );
- }
-
- } else {
- # Find out which site this DB belongs to
- $site = false;
- foreach( $sites as $candidateSite ) {
- $suffix = $candidateSite->suffix;
- if ( preg_match( "/(.*)$suffix$/", $db, $matches ) ) {
- $site = $candidateSite;
- break;
- }
- }
- makeLink( array( 'iw_prefix' => $db, 'iw_url' => $site->suffix), "__sites" );
- if ( !$site ) {
- print "Invalid database $db\n";
- continue;
- }
- $lang = $matches[1];
-
- # Lateral links
- foreach ( $sites as $targetSite ) {
- if ( $targetSite->suffix != $site->suffix ) {
- makeLink( array( 'iw_prefix' => $targetSite->lateral,
- 'iw_url' => $targetSite->getURL( $lang ),
- 'iw_local' => 1 ), $db );
- }
- }
-
- if ( $site->suffix == "wiki" ) {
- makeLink( array('iw_prefix' => 'w',
- 'iw_url' => "http://en.wikipedia.org/wiki/$1",
- 'iw_local' => 1), $db );
- }
-
- }
- }
- foreach ( $extraLinks as $link )
- makeLink( $link, "__global" );
-}
-
-# ------------------------------------------------------------------------------------------
-
-# Executes part of an INSERT statement, corresponding to all interlanguage links to a particular site
-function makeLanguageLinks( &$site, $source ) {
- global $langlist, $languageAliases;
- # Actual languages with their own databases
- foreach ( $langlist as $targetLang ) {
- makeLink( array( $targetLang, $site->getURL( $targetLang ), 1 ), $source );
- }
-
- # Language aliases
- foreach ( $languageAliases as $alias => $lang ) {
- makeLink( array( $alias, $site->getURL( $lang ), 1 ), $source );
- }
-}
-
-function makeLink( $entry, $source ) {
- global $prefixRewrites, $dbFile;
- if ( isset( $prefixRewrites[$source] ) && isset( $prefixRewrites[$source][$entry[0]] ) )
- $entry[0] = $prefixRewrites[$source][$entry[0]];
- if (!array_key_exists("iw_prefix",$entry))
- $entry = array("iw_prefix" => $entry[0], "iw_url" => $entry[1], "iw_local" => $entry[2]);
- if ( array_key_exists($source,$prefixRewrites) &&
- array_key_exists($entry['iw_prefix'],$prefixRewrites[$source]))
- $entry['iw_prefix'] = $prefixRewrites[$source][$entry['iw_prefix']];
- if ($dbFile)
- $dbFile->set( "{$source}:{$entry['iw_prefix']}", trim("{$entry['iw_local']} {$entry['iw_url']}") );
- else
- print "{$source}:{$entry['iw_prefix']} {$entry['iw_url']} {$entry['iw_local']}\n";
-
- }
diff --git a/maintenance/dumpInterwiki.php b/maintenance/dumpInterwiki.php
index 045e393b..4a4b6791 100644
--- a/maintenance/dumpInterwiki.php
+++ b/maintenance/dumpInterwiki.php
@@ -1,6 +1,6 @@
<?php
/**
- * Rebuild interwiki table using the file on meta and the language list
+ * Build constant slightly compact database of interwiki prefixes
* Wikimedia specific!
*
* @file
@@ -9,19 +9,231 @@
* @ingroup Wikimedia
*/
-/** */
-$oldCwd = getcwd();
+/**
+ * @todo document
+ * @ingroup Maintenance
+ */
+class Site {
+ var $suffix, $lateral, $url;
+
+ function __construct( $s, $l, $u ) {
+ $this->suffix = $s;
+ $this->lateral = $l;
+ $this->url = $u;
+ }
+
+ function getURL( $lang ) {
+ $xlang = str_replace( '_', '-', $lang );
+ return "http://$xlang.{$this->url}/wiki/\$1";
+ }
+}
+
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
+
+class DumpInterwiki extends Maintenance {
+
+ public function __construct() {
+ parent::__construct();
+ $this->mDescription = "Build constant slightly compact database of interwiki prefixes.";
+ $this->addOption( 'langlist', 'File with one language code per line', false, true );
+ $this->addOption( 'dblist', 'File with one db per line', false, true );
+ $this->addOption( 'specialdbs', "File with one 'special' db per line", false, true );
+ $this->addOption( 'o', 'Cdb output file', false, true );
+ }
+
+ function execute() {
+ # List of language prefixes likely to be found in multi-language sites
+ $this->langlist = array_map( "trim", file( $this->getOption( 'langlist', "/home/wikipedia/common/langlist" ) ) );
+
+ # List of all database names
+ $this->dblist = array_map( "trim", file( $this->getOption( 'dblist', "/home/wikipedia/common/all.dblist" ) ) );
+
+ # Special-case databases
+ $this->specials = array_flip( array_map( "trim", file( $this->getOption( 'specialdbs', "/home/wikipedia/common/special.dblist" ) ) ) );
+
+ if ( $this->hasOption( 'o' ) ) {
+ $this->dbFile = CdbWriter::open( $this->getOption( 'o' ) ) ;
+ } else {
+ $this->dbFile = false;
+ }
+
+ $this->getRebuildInterwikiDump();
+ }
+
+ function getRebuildInterwikiDump() {
+ global $wgContLang;
+
+ # Multi-language sites
+ # db suffix => db suffix, iw prefix, hostname
+ $sites = array(
+ 'wiki' => new Site( 'wiki', 'w', 'wikipedia.org' ),
+ 'wiktionary' => new Site( 'wiktionary', 'wikt', 'wiktionary.org' ),
+ 'wikiquote' => new Site( 'wikiquote', 'q', 'wikiquote.org' ),
+ 'wikibooks' => new Site( 'wikibooks', 'b', 'wikibooks.org' ),
+ 'wikinews' => new Site( 'wikinews', 'n', 'wikinews.org' ),
+ 'wikisource' => new Site( 'wikisource', 's', 'wikisource.org' ),
+ 'wikimedia' => new Site( 'wikimedia', 'chapter', 'wikimedia.org' ),
+ 'wikiversity' => new Site( 'wikiversity', 'v', 'wikiversity.org' ),
+ );
+
+ # Extra interwiki links that can't be in the intermap for some reason
+ $extraLinks = array(
+ array( 'm', 'http://meta.wikimedia.org/wiki/$1', 1 ),
+ array( 'meta', 'http://meta.wikimedia.org/wiki/$1', 1 ),
+ array( 'sep11', 'http://sep11.wikipedia.org/wiki/$1', 1 ),
+ );
+
+ # Language aliases, usually configured as redirects to the real wiki in apache
+ # Interlanguage links are made directly to the real wiki
+ # Something horrible happens if you forget to list an alias here, I can't
+ # remember what
+ $this->languageAliases = array(
+ 'zh-cn' => 'zh',
+ 'zh-tw' => 'zh',
+ 'dk' => 'da',
+ 'nb' => 'no',
+ );
+
+ # Special case prefix rewrites, for the benefit of Swedish which uses s:t
+ # as an abbreviation for saint
+ $this->prefixRewrites = array(
+ 'svwiki' => array( 's' => 'src' ),
+ );
+
+ # Construct a list of reserved prefixes
+ $reserved = array();
+ foreach ( $this->langlist as $lang ) {
+ $reserved[$lang] = 1;
+ }
+ foreach ( $this->languageAliases as $alias => $lang ) {
+ $reserved[$alias] = 1;
+ }
+ foreach ( $sites as $site ) {
+ $reserved[$site->lateral] = 1;
+ }
+
+ # Extract the intermap from meta
+ $intermap = Http::get( 'http://meta.wikimedia.org/w/index.php?title=Interwiki_map&action=raw', 30 );
+ $lines = array_map( 'trim', explode( "\n", trim( $intermap ) ) );
+
+ if ( !$lines || count( $lines ) < 2 ) {
+ $this->error( "m:Interwiki_map not found", true );
+ }
+
+ # Global iterwiki map
+ foreach ( $lines as $line ) {
+ if ( preg_match( '/^\|\s*(.*?)\s*\|\|\s*(.*?)\s*$/', $line, $matches ) ) {
+ $prefix = $wgContLang->lc( $matches[1] );
+ $prefix = str_replace( ' ', '_', $prefix );
+
+ $url = $matches[2];
+ if ( preg_match( '/(wikipedia|wiktionary|wikisource|wikiquote|wikibooks|wikimedia)\.org/', $url ) ) {
+ $local = 1;
+ } else {
+ $local = 0;
+ }
+
+ if ( empty( $reserved[$prefix] ) ) {
+ $imap = array( "iw_prefix" => $prefix, "iw_url" => $url, "iw_local" => $local );
+ $this->makeLink ( $imap, "__global" );
+ }
+ }
+ }
+
+ # Exclude Wikipedia for Wikipedia
+ $this->makeLink ( array ( 'iw_prefix' => 'wikipedia', 'is_url' => null ), "_wiki" );
+
+ # Multilanguage sites
+ foreach ( $sites as $site ) {
+ $this->makeLanguageLinks ( $site, "_" . $site->suffix );
+ }
+
+ foreach ( $this->dblist as $db ) {
+ if ( isset( $this->specials[$db] ) ) {
+ # Special wiki
+ # Has interwiki links and interlanguage links to wikipedia
+
+ $this->makeLink( array( 'iw_prefix' => $db, 'iw_url' => "wiki" ), "__sites" );
+ # Links to multilanguage sites
+ foreach ( $sites as $targetSite ) {
+ $this->makeLink( array( 'iw_prefix' => $targetSite->lateral,
+ 'iw_url' => $targetSite->getURL( 'en' ),
+ 'iw_local' => 1 ), $db );
+ }
+ } else {
+ # Find out which site this DB belongs to
+ $site = false;
+ foreach ( $sites as $candidateSite ) {
+ $suffix = $candidateSite->suffix;
+ if ( preg_match( "/(.*)$suffix$/", $db, $matches ) ) {
+ $site = $candidateSite;
+ break;
+ }
+ }
+ $this->makeLink( array( 'iw_prefix' => $db, 'iw_url' => $site->suffix ), "__sites" );
+ if ( !$site ) {
+ $this->error( "Invalid database $db\n" );
+ continue;
+ }
+ $lang = $matches[1];
+
+ # Lateral links
+ foreach ( $sites as $targetSite ) {
+ if ( $targetSite->suffix != $site->suffix ) {
+ $this->makeLink( array( 'iw_prefix' => $targetSite->lateral,
+ 'iw_url' => $targetSite->getURL( $lang ),
+ 'iw_local' => 1 ), $db );
+ }
+ }
+
+ if ( $site->suffix == "wiki" ) {
+ $this->makeLink( array( 'iw_prefix' => 'w',
+ 'iw_url' => "http://en.wikipedia.org/wiki/$1",
+ 'iw_local' => 1 ), $db );
+ }
+
+ }
+ }
+ foreach ( $extraLinks as $link ) {
+ $this->makeLink( $link, "__global" );
+ }
+ }
+
+ # ------------------------------------------------------------------------------------------
+
+ # Executes part of an INSERT statement, corresponding to all interlanguage links to a particular site
+ function makeLanguageLinks( &$site, $source ) {
+ # Actual languages with their own databases
+ foreach ( $this->langlist as $targetLang ) {
+ $this->makeLink( array( $targetLang, $site->getURL( $targetLang ), 1 ), $source );
+ }
+
+ # Language aliases
+ foreach ( $this->languageAliases as $alias => $lang ) {
+ $this->makeLink( array( $alias, $site->getURL( $lang ), 1 ), $source );
+ }
+ }
+
+ function makeLink( $entry, $source ) {
+ if ( isset( $this->prefixRewrites[$source] ) && isset( $this->prefixRewrites[$source][$entry[0]] ) )
+ $entry[0] = $this->prefixRewrites[$source][$entry[0]];
-$optionsWithArgs = array( "o" );
-require_once( dirname(__FILE__) . '/commandLine.inc' );
-require( dirname(__FILE__)."/dumpInterwiki.inc" );
-chdir( $oldCwd );
+ if ( !array_key_exists( "iw_prefix", $entry ) ) {
+ $entry = array( "iw_prefix" => $entry[0], "iw_url" => $entry[1], "iw_local" => $entry[2] );
+ }
+ if ( array_key_exists( $source, $this->prefixRewrites ) &&
+ array_key_exists( $entry['iw_prefix'], $this->prefixRewrites[$source] ) ) {
+ $entry['iw_prefix'] = $this->prefixRewrites[$source][$entry['iw_prefix']];
+ }
-# Output
-if ( isset( $options['o'] ) ) {
- # To database specified with -o
- $dbFile = CdbWriter::open( $options['o'] );
-}
+ if ( $this->dbFile ) {
+ $this->dbFile->set( "{$source}:{$entry['iw_prefix']}", trim( "{$entry['iw_local']} {$entry['iw_url']}" ) );
+ } else {
+ $this->output( "{$source}:{$entry['iw_prefix']} {$entry['iw_url']} {$entry['iw_local']}\n" );
+ }
+ }
+}
-getRebuildInterwikiDump();
+$maintClass = "DumpInterwiki";
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/dumpLinks.php b/maintenance/dumpLinks.php
index 529cd1aa..39a9e955 100644
--- a/maintenance/dumpLinks.php
+++ b/maintenance/dumpLinks.php
@@ -29,7 +29,7 @@
* @ingroup Mainatenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DumpLinks extends Maintenance {
public function __construct() {
@@ -49,11 +49,11 @@ class DumpLinks extends Maintenance {
array( 'page_id=pl_from' ),
__METHOD__,
array( 'ORDER BY' => 'page_id' ) );
-
+
$lastPage = null;
- foreach( $result as $row ) {
- if( $lastPage != $row->page_id ) {
- if( isset( $lastPage ) ) {
+ foreach ( $result as $row ) {
+ if ( $lastPage != $row->page_id ) {
+ if ( isset( $lastPage ) ) {
$this->output( "\n" );
}
$page = Title::makeTitle( $row->page_namespace, $row->page_title );
@@ -63,11 +63,11 @@ class DumpLinks extends Maintenance {
$link = Title::makeTitle( $row->pl_namespace, $row->pl_title );
$this->output( " " . $link->getPrefixedUrl() );
}
- if( isset( $lastPage ) )
+ if ( isset( $lastPage ) )
$this->output( "\n" );
}
}
$maintClass = "DumpLinks";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/dumpSisterSites.php b/maintenance/dumpSisterSites.php
index d9fd28a6..f5abcd1b 100644
--- a/maintenance/dumpSisterSites.php
+++ b/maintenance/dumpSisterSites.php
@@ -24,14 +24,14 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class DumpSisterSites extends Maintenance {
public function __construct() {
parent::__construct();
$this->mDescription = "Quickie page name dump script for SisterSites usage";
}
-
+
public function execute() {
$dbr = wfGetDB( DB_SLAVE );
$dbr->bufferResults( false );
@@ -42,15 +42,14 @@ class DumpSisterSites extends Maintenance {
),
__METHOD__ );
- foreach( $result as $row ) {
+ foreach ( $result as $row ) {
$title = Title::makeTitle( $row->page_namespace, $row->page_title );
$url = $title->getFullUrl();
$text = $title->getPrefixedText();
$this->output( "$url $text\n" );
}
- $dbr->freeResult( $result );
}
}
$maintClass = "DumpSisterSites";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/dumpTextPass.php b/maintenance/dumpTextPass.php
index 2e639e68..98d4af0e 100644
--- a/maintenance/dumpTextPass.php
+++ b/maintenance/dumpTextPass.php
@@ -1,6 +1,8 @@
<?php
/**
- * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
+ * Script that postprocesses XML dumps from dumpBackup.php to add page text
+ *
+ * Copyright © 2005 Brion Vibber <brion@pobox.com>, 2010 Alexandre Emsenhuber
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
@@ -24,7 +26,7 @@
$originalDir = getcwd();
-require_once( dirname(__FILE__) . '/commandLine.inc' );
+require_once( dirname( __FILE__ ) . '/commandLine.inc' );
require_once( 'backup.inc' );
/**
@@ -33,14 +35,15 @@ require_once( 'backup.inc' );
class TextPassDumper extends BackupDumper {
var $prefetch = null;
var $input = "php://stdin";
- var $history = WikiExporter::FULL;
var $fetchCount = 0;
var $prefetchCount = 0;
-
+
var $failures = 0;
- var $maxFailures = 200;
+ var $maxFailures = 5;
+ var $failedTextRetrievals = 0;
+ var $maxConsecutiveFailedTextRetrievals = 200;
var $failureTimeout = 5; // Seconds to sleep after db failure
-
+
var $php = "php";
var $spawn = false;
var $spawnProc = false;
@@ -48,29 +51,22 @@ class TextPassDumper extends BackupDumper {
var $spawnRead = false;
var $spawnErr = false;
- function dump() {
+ function dump( $history, $text = WikiExporter::TEXT ) {
# This shouldn't happen if on console... ;)
header( 'Content-type: text/html; charset=UTF-8' );
# Notice messages will foul up your XML output even if they're
# relatively harmless.
- if( ini_get( 'display_errors' ) )
+ if ( ini_get( 'display_errors' ) )
ini_set( 'display_errors', 'stderr' );
- $this->initProgress( $this->history );
+ $this->initProgress( $history );
$this->db = $this->backupDb();
- $this->egress = new ExportProgressFilter( $this->sink, $this );
+ $this->readDump();
- $input = fopen( $this->input, "rt" );
- $result = $this->readDump( $input );
-
- if( WikiError::isError( $result ) ) {
- wfDie( $result->getMessage() );
- }
-
- if( $this->spawnProc ) {
+ if ( $this->spawnProc ) {
$this->closeSpawn();
}
@@ -78,59 +74,63 @@ class TextPassDumper extends BackupDumper {
}
function processOption( $opt, $val, $param ) {
+ global $IP;
$url = $this->processFileOpt( $val, $param );
-
+
switch( $opt ) {
case 'prefetch':
- global $IP;
require_once "$IP/maintenance/backupPrefetch.inc";
$this->prefetch = new BaseDump( $url );
break;
case 'stub':
$this->input = $url;
break;
- case 'current':
- $this->history = WikiExporter::CURRENT;
- break;
- case 'full':
- $this->history = WikiExporter::FULL;
- break;
case 'spawn':
$this->spawn = true;
- if( $val ) {
+ if ( $val ) {
$this->php = $val;
}
break;
}
}
-
+
function processFileOpt( $val, $param ) {
- switch( $val ) {
- case "file":
- return $param;
- case "gzip":
- return "compress.zlib://$param";
- case "bzip2":
- return "compress.bzip2://$param";
- case "7zip":
- return "mediawiki.compress.7z://$param";
- default:
- return $val;
+ $fileURIs = explode(';',$param);
+ foreach ( $fileURIs as $URI ) {
+ switch( $val ) {
+ case "file":
+ $newURI = $URI;
+ break;
+ case "gzip":
+ $newURI = "compress.zlib://$URI";
+ break;
+ case "bzip2":
+ $newURI = "compress.bzip2://$URI";
+ break;
+ case "7zip":
+ $newURI = "mediawiki.compress.7z://$URI";
+ break;
+ default:
+ $newURI = $URI;
+ }
+ $newFileURIs[] = $newURI;
}
+ $val = implode( ';', $newFileURIs );
+ return $val;
}
/**
* Overridden to include prefetch ratio if enabled.
*/
function showReport() {
- if( !$this->prefetch ) {
+ if ( !$this->prefetch ) {
return parent::showReport();
}
-
- if( $this->reporting ) {
+
+ if ( $this->reporting ) {
$delta = wfTime() - $this->startTime;
$now = wfTimestamp( TS_DB );
- if( $delta ) {
+ if ( $delta ) {
$rate = $this->pageCount / $delta;
$revrate = $this->revCount / $delta;
$portion = $this->revCount / $this->maxCount;
@@ -148,150 +148,207 @@ class TextPassDumper extends BackupDumper {
}
}
- function readDump( $input ) {
- $this->buffer = "";
- $this->openElement = false;
- $this->atStart = true;
- $this->state = "";
- $this->lastName = "";
+ function readDump() {
+ $state = '';
+ $lastName = '';
$this->thisPage = 0;
$this->thisRev = 0;
- $parser = xml_parser_create( "UTF-8" );
- xml_parser_set_option( $parser, XML_OPTION_CASE_FOLDING, false );
+ $reader = new XMLReader();
+ $reader->open( $this->input );
+ $writer = new XMLWriter();
+ $writer->openMemory();
+
- xml_set_element_handler( $parser, array( &$this, 'startElement' ), array( &$this, 'endElement' ) );
- xml_set_character_data_handler( $parser, array( &$this, 'characterData' ) );
+ while ( $reader->read() ) {
+ $tag = $reader->name;
+ $type = $reader->nodeType;
- $offset = 0; // for context extraction on error reporting
- $bufferSize = 512 * 1024;
- do {
- $chunk = fread( $input, $bufferSize );
- if( !xml_parse( $parser, $chunk, feof( $input ) ) ) {
- wfDebug( "TextDumpPass::readDump encountered XML parsing error\n" );
- return new WikiXmlError( $parser, 'XML import parse failure', $chunk, $offset );
+ if ( $type == XmlReader::END_ELEMENT ) {
+ $writer->endElement();
+
+ if ( $tag == 'revision' ) {
+ $this->revCount();
+ $this->thisRev = '';
+ } elseif ( $tag == 'page' ) {
+ $this->reportPage();
+ $this->thisPage = '';
+ }
+ } elseif ( $type == XmlReader::ELEMENT ) {
+ $attribs = array();
+ if ( $reader->hasAttributes ) {
+ for ( $i = 0; $reader->moveToAttributeNo( $i ); $i++ ) {
+ $attribs[$reader->name] = $reader->value;
+ }
+ }
+
+ if ( $reader->isEmptyElement && $tag == 'text' && isset( $attribs['id'] ) ) {
+ $writer->startElement( 'text' );
+ $writer->writeAttribute( 'xml:space', 'preserve' );
+ $text = $this->getText( $attribs['id'] );
+ if ( strlen( $text ) ) {
+ $writer->text( $text );
+ }
+ $writer->endElement();
+ } else {
+ $writer->startElement( $tag );
+ foreach( $attribs as $name => $val ) {
+ $writer->writeAttribute( $name, $val );
+ }
+ if ( $reader->isEmptyElement ) {
+ $writer->endElement();
+ }
+ }
+
+ $lastName = $tag;
+ if ( $tag == 'revision' ) {
+ $state = 'revision';
+ } elseif ( $tag == 'page' ) {
+ $state = 'page';
+ }
+ } elseif ( $type == XMLReader::SIGNIFICANT_WHITESPACE || $type = XMLReader::TEXT ) {
+ if ( $lastName == 'id' ) {
+ if ( $state == 'revision' ) {
+ $this->thisRev .= $reader->value;
+ } elseif ( $state == 'page' ) {
+ $this->thisPage .= $reader->value;
+ }
+ }
+ $writer->text( $reader->value );
}
- $offset += strlen( $chunk );
- } while( $chunk !== false && !feof( $input ) );
- xml_parser_free( $parser );
-
- return true;
+ $this->sink->write( $writer->outputMemory() );
+ }
}
function getText( $id ) {
$this->fetchCount++;
- if( isset( $this->prefetch ) ) {
+ if ( isset( $this->prefetch ) ) {
$text = $this->prefetch->prefetch( $this->thisPage, $this->thisRev );
- if( $text === null ) {
- // Entry missing from prefetch dump
- } elseif( $text === "" ) {
- // Blank entries may indicate that the prior dump was broken.
- // To be safe, reload it.
- } else {
- $this->prefetchCount++;
- return $text;
+ if ( $text !== null ) { // Entry missing from prefetch dump
+ $dbr = wfGetDB( DB_SLAVE );
+ $revID = intval( $this->thisRev );
+ $revLength = $dbr->selectField( 'revision', 'rev_len', array( 'rev_id' => $revID ) );
+ // if length of rev text in file doesn't match length in db, we reload
+ // this avoids carrying forward broken data from previous xml dumps
+ if( strlen( $text ) == $revLength ) {
+ $this->prefetchCount++;
+ return $text;
+ }
}
}
return $this->doGetText( $id );
}
-
+
private function doGetText( $id ) {
- if( $this->spawn ) {
- return $this->getTextSpawned( $id );
- } else {
- return $this->getTextDbSafe( $id );
+ $id = intval( $id );
+ $this->failures = 0;
+ $ex = new MWException( "Graceful storage failure" );
+ while (true) {
+ if ( $this->spawn ) {
+ if ($this->failures) {
+ // we don't know why it failed, could be the child process
+ // borked, could be db entry busted, could be db server out to lunch,
+ // so cover all bases
+ $this->closeSpawn();
+ $this->openSpawn();
+ }
+ $text = $this->getTextSpawned( $id );
+ } else {
+ $text = $this->getTextDbSafe( $id );
+ }
+ if ( $text === false ) {
+ $this->failures++;
+ if ( $this->failures > $this->maxFailures) {
+ $this->progress( "Failed to retrieve revision text for text id ".
+ "$id after $this->maxFailures tries, giving up" );
+ // were there so many bad retrievals in a row we want to bail?
+ // at some point we have to declare the dump irretrievably broken
+ $this->failedTextRetrievals++;
+ if ($this->failedTextRetrievals > $this->maxConsecutiveFailedTextRetrievals) {
+ throw $ex;
+ }
+ else {
+ // would be nice to return something better to the caller someday,
+ // log what we know about the failure and about the revision
+ return("");
+ }
+ } else {
+ $this->progress( "Error $this->failures " .
+ "of allowed $this->maxFailures retrieving revision text for text id $id! " .
+ "Pausing $this->failureTimeout seconds before retry..." );
+ sleep( $this->failureTimeout );
+ }
+ } else {
+ $this->failedTextRetrievals= 0;
+ return( $text );
+ }
}
+
}
-
+
/**
* Fetch a text revision from the database, retrying in case of failure.
* This may survive some transitory errors by reconnecting, but
* may not survive a long-term server outage.
*/
private function getTextDbSafe( $id ) {
- while( true ) {
+ while ( true ) {
try {
$text = $this->getTextDb( $id );
- $ex = new MWException("Graceful storage failure");
- } catch (DBQueryError $ex) {
+ } catch ( DBQueryError $ex ) {
$text = false;
}
- if( $text === false ) {
- $this->failures++;
- if( $this->failures > $this->maxFailures ) {
- throw $ex;
- } else {
- $this->progress( "Database failure $this->failures " .
- "of allowed $this->maxFailures for revision $id! " .
- "Pausing $this->failureTimeout seconds..." );
- sleep( $this->failureTimeout );
- }
- } else {
- return $text;
- }
+ return $text;
}
}
-
+
/**
* May throw a database error if, say, the server dies during query.
*/
private function getTextDb( $id ) {
global $wgContLang;
- $id = intval( $id );
$row = $this->db->selectRow( 'text',
array( 'old_text', 'old_flags' ),
array( 'old_id' => $id ),
- 'TextPassDumper::getText' );
+ __METHOD__ );
$text = Revision::getRevisionText( $row );
- if( $text === false ) {
+ if ( $text === false ) {
return false;
}
$stripped = str_replace( "\r", "", $text );
$normalized = $wgContLang->normalize( $stripped );
return $normalized;
}
-
+
private function getTextSpawned( $id ) {
wfSuppressWarnings();
- if( !$this->spawnProc ) {
+ if ( !$this->spawnProc ) {
// First time?
$this->openSpawn();
}
- while( true ) {
-
- $text = $this->getTextSpawnedOnce( $id );
- if( !is_string( $text ) ) {
- $this->progress("Database subprocess failed. Respawning...");
-
- $this->closeSpawn();
- sleep( $this->failureTimeout );
- $this->openSpawn();
-
- continue;
- }
- wfRestoreWarnings();
- return $text;
- }
+ $text = $this->getTextSpawnedOnce( $id );
+ wfRestoreWarnings();
+ return $text;
}
-
+
function openSpawn() {
- global $IP, $wgDBname;
-
+ global $IP;
+
$cmd = implode( " ",
array_map( 'wfEscapeShellArg',
array(
$this->php,
"$IP/maintenance/fetchText.php",
- $wgDBname ) ) );
+ '--wiki', wfWikiID() ) ) );
$spec = array(
0 => array( "pipe", "r" ),
1 => array( "pipe", "w" ),
2 => array( "file", "/dev/null", "a" ) );
$pipes = array();
-
+
$this->progress( "Spawning database subprocess: $cmd" );
$this->spawnProc = proc_open( $cmd, $spec, $pipes );
- if( !$this->spawnProc ) {
+ if ( !$this->spawnProc ) {
// shit
$this->progress( "Subprocess spawn failed." );
return false;
@@ -300,138 +357,83 @@ class TextPassDumper extends BackupDumper {
$this->spawnWrite, // -> stdin
$this->spawnRead, // <- stdout
) = $pipes;
-
+
return true;
}
-
+
private function closeSpawn() {
wfSuppressWarnings();
- if( $this->spawnRead )
+ if ( $this->spawnRead )
fclose( $this->spawnRead );
$this->spawnRead = false;
- if( $this->spawnWrite )
+ if ( $this->spawnWrite )
fclose( $this->spawnWrite );
$this->spawnWrite = false;
- if( $this->spawnErr )
+ if ( $this->spawnErr )
fclose( $this->spawnErr );
$this->spawnErr = false;
- if( $this->spawnProc )
+ if ( $this->spawnProc )
pclose( $this->spawnProc );
$this->spawnProc = false;
wfRestoreWarnings();
}
-
+
private function getTextSpawnedOnce( $id ) {
global $wgContLang;
$ok = fwrite( $this->spawnWrite, "$id\n" );
- //$this->progress( ">> $id" );
- if( !$ok ) return false;
-
+ // $this->progress( ">> $id" );
+ if ( !$ok ) return false;
+
$ok = fflush( $this->spawnWrite );
- //$this->progress( ">> [flush]" );
- if( !$ok ) return false;
-
+ // $this->progress( ">> [flush]" );
+ if ( !$ok ) return false;
+
+ // check that the text id they are sending is the one we asked for
+ // this avoids out of sync revision text errors we have encountered in the past
+ $newId = fgets( $this->spawnRead );
+ if ( $newId === false ) {
+ return false;
+ }
+ if ( $id != intval( $newId ) ) {
+ return false;
+ }
+
$len = fgets( $this->spawnRead );
- //$this->progress( "<< " . trim( $len ) );
- if( $len === false ) return false;
-
+ // $this->progress( "<< " . trim( $len ) );
+ if ( $len === false ) return false;
+
$nbytes = intval( $len );
+ // actual error, not zero-length text
+ if ($nbytes < 0 ) return false;
+
$text = "";
-
+
// Subprocess may not send everything at once, we have to loop.
- while( $nbytes > strlen( $text ) ) {
+ while ( $nbytes > strlen( $text ) ) {
$buffer = fread( $this->spawnRead, $nbytes - strlen( $text ) );
- if( $buffer === false ) break;
+ if ( $buffer === false ) break;
$text .= $buffer;
}
-
+
$gotbytes = strlen( $text );
- if( $gotbytes != $nbytes ) {
- $this->progress( "Expected $nbytes bytes from database subprocess, got $gotbytes ");
+ if ( $gotbytes != $nbytes ) {
+ $this->progress( "Expected $nbytes bytes from database subprocess, got $gotbytes " );
return false;
}
-
+
// Do normalization in the dump thread...
$stripped = str_replace( "\r", "", $text );
$normalized = $wgContLang->normalize( $stripped );
return $normalized;
}
-
- function startElement( $parser, $name, $attribs ) {
- $this->clearOpenElement( null );
- $this->lastName = $name;
-
- if( $name == 'revision' ) {
- $this->state = $name;
- $this->egress->writeOpenPage( null, $this->buffer );
- $this->buffer = "";
- } elseif( $name == 'page' ) {
- $this->state = $name;
- if( $this->atStart ) {
- $this->egress->writeOpenStream( $this->buffer );
- $this->buffer = "";
- $this->atStart = false;
- }
- }
-
- if( $name == "text" && isset( $attribs['id'] ) ) {
- $text = $this->getText( $attribs['id'] );
- $this->openElement = array( $name, array( 'xml:space' => 'preserve' ) );
- if( strlen( $text ) > 0 ) {
- $this->characterData( $parser, $text );
- }
- } else {
- $this->openElement = array( $name, $attribs );
- }
- }
-
- function endElement( $parser, $name ) {
- if( $this->openElement ) {
- $this->clearOpenElement( "" );
- } else {
- $this->buffer .= "</$name>";
- }
-
- if( $name == 'revision' ) {
- $this->egress->writeRevision( null, $this->buffer );
- $this->buffer = "";
- $this->thisRev = "";
- } elseif( $name == 'page' ) {
- $this->egress->writeClosePage( $this->buffer );
- $this->buffer = "";
- $this->thisPage = "";
- } elseif( $name == 'mediawiki' ) {
- $this->egress->writeCloseStream( $this->buffer );
- $this->buffer = "";
- }
- }
-
- function characterData( $parser, $data ) {
- $this->clearOpenElement( null );
- if( $this->lastName == "id" ) {
- if( $this->state == "revision" ) {
- $this->thisRev .= $data;
- } elseif( $this->state == "page" ) {
- $this->thisPage .= $data;
- }
- }
- $this->buffer .= htmlspecialchars( $data );
- }
-
- function clearOpenElement( $style ) {
- if( $this->openElement ) {
- $this->buffer .= Xml::element( $this->openElement[0], $this->openElement[1], $style );
- $this->openElement = false;
- }
- }
}
$dumper = new TextPassDumper( $argv );
-if( true ) {
- $dumper->dump();
+if ( !isset( $options['help'] ) ) {
+ $dumper->dump( WikiExporter::FULL );
} else {
$dumper->progress( <<<ENDS
This script postprocesses XML dumps from dumpBackup.php to add
@@ -444,14 +446,16 @@ Usage: php dumpTextPass.php [<options>]
Options:
--stub=<type>:<file> To load a compressed stub dump instead of stdin
--prefetch=<type>:<file> Use a prior dump file as a text source, to save
- pressure on the database.
- (Requires PHP 5.0+ and the XMLReader PECL extension)
- --quiet Don't dump status reports to stderr.
+ pressure on the database.
+ --quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
- (Default: 100)
+ (Default: 100)
--server=h Force reading from MySQL server h
- --current Base ETA on number of pages in database instead of all revisions
- --spawn Spawn a subprocess for loading text records
+ --output=<type>:<file> Write to a file instead of stdout
+ <type>s: file, gzip, bzip2, 7zip
+ --current Base ETA on number of pages in database instead of all revisions
+ --spawn Spawn a subprocess for loading text records
+ --help Display this help message
ENDS
);
}
diff --git a/maintenance/dumpUploads.php b/maintenance/dumpUploads.php
index c8f1667b..74c0cb0b 100644
--- a/maintenance/dumpUploads.php
+++ b/maintenance/dumpUploads.php
@@ -20,7 +20,7 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class UploadDumper extends Maintenance {
public function __construct() {
@@ -34,22 +34,22 @@ By default, outputs relative paths against the parent directory of \$wgUploadDir
}
public function execute() {
- global $IP, $wgUseSharedUploads;
+ global $IP;
$this->mAction = 'fetchLocal';
$this->mBasePath = $this->getOption( 'base', $IP );
$this->mShared = false;
$this->mSharedSupplement = false;
- if( $this->hasOption('local') ) {
+ if ( $this->hasOption( 'local' ) ) {
$this->mAction = 'fetchLocal';
}
-
- if( $this->hasOption('used') ) {
+
+ if ( $this->hasOption( 'used' ) ) {
$this->mAction = 'fetchUsed';
}
-
- if( $this->hasOption('shared') ) {
- if( $this->hasOption('used') ) {
+
+ if ( $this->hasOption( 'shared' ) ) {
+ if ( $this->hasOption( 'used' ) ) {
// Include shared-repo files in the used check
$this->mShared = true;
} else {
@@ -57,51 +57,53 @@ By default, outputs relative paths against the parent directory of \$wgUploadDir
$this->mSharedSupplement = true;
}
}
- $this->{$this->mAction}( $this->mShared );
- if( $this->mSharedSupplement ) {
+ $this-> { $this->mAction } ( $this->mShared );
+ if ( $this->mSharedSupplement ) {
$this->fetchUsed( true );
}
}
/**
- * Fetch a list of all or used images from a particular image source.
- * @param string $table
- * @param string $directory Base directory where files are located
- * @param bool $shared true to pass shared-dir settings to hash func
+ * Fetch a list of used images from a particular image source.
+ *
+ * @param $shared Boolean: true to pass shared-dir settings to hash func
*/
function fetchUsed( $shared ) {
$dbr = wfGetDB( DB_SLAVE );
$image = $dbr->tableName( 'image' );
$imagelinks = $dbr->tableName( 'imagelinks' );
-
+
$sql = "SELECT DISTINCT il_to, img_name
FROM $imagelinks
LEFT OUTER JOIN $image
ON il_to=img_name";
$result = $dbr->query( $sql );
-
- foreach( $result as $row ) {
+
+ foreach ( $result as $row ) {
$this->outputItem( $row->il_to, $shared );
}
- $dbr->freeResult( $result );
}
+ /**
+ * Fetch a list of all images from a particular image source.
+ *
+ * @param $shared Boolean: true to pass shared-dir settings to hash func
+ */
function fetchLocal( $shared ) {
$dbr = wfGetDB( DB_SLAVE );
$result = $dbr->select( 'image',
array( 'img_name' ),
'',
__METHOD__ );
-
- foreach( $result as $row ) {
+
+ foreach ( $result as $row ) {
$this->outputItem( $row->img_name, $shared );
}
- $dbr->freeResult( $result );
}
-
+
function outputItem( $name, $shared ) {
$file = wfFindFile( $name );
- if( $file && $this->filterItem( $file, $shared ) ) {
+ if ( $file && $this->filterItem( $file, $shared ) ) {
$filename = $file->getFullPath();
$rel = wfRelativePath( $filename, $this->mBasePath );
$this->output( "$rel\n" );
@@ -116,4 +118,4 @@ By default, outputs relative paths against the parent directory of \$wgUploadDir
}
$maintClass = "UploadDumper";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/edit.php b/maintenance/edit.php
index 8d0068c3..40623afb 100644
--- a/maintenance/edit.php
+++ b/maintenance/edit.php
@@ -20,7 +20,7 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class EditCLI extends Maintenance {
public function __construct() {
@@ -36,7 +36,7 @@ class EditCLI extends Maintenance {
}
public function execute() {
- global $wgUser, $wgTitle, $wgArticle;
+ global $wgUser, $wgTitle;
$userName = $this->getOption( 'u', 'Maintenance script' );
$summary = $this->getOption( 's', '' );
@@ -44,7 +44,7 @@ class EditCLI extends Maintenance {
$bot = $this->hasOption( 'b' );
$autoSummary = $this->hasOption( 'a' );
$noRC = $this->hasOption( 'no-rc' );
-
+
$wgUser = User::newFromName( $userName );
if ( !$wgUser ) {
$this->error( "Invalid username", true );
@@ -52,22 +52,22 @@ class EditCLI extends Maintenance {
if ( $wgUser->isAnon() ) {
$wgUser->addToDatabase();
}
-
+
$wgTitle = Title::newFromText( $this->getArg() );
if ( !$wgTitle ) {
$this->error( "Invalid title", true );
}
-
- $wgArticle = new Article( $wgTitle );
-
+
+ $article = new Article( $wgTitle );
+
# Read the text
$text = $this->getStdin( Maintenance::STDIN_ALL );
-
+
# Do the edit
$this->output( "Saving... " );
- $status = $wgArticle->doEdit( $text, $summary,
+ $status = $article->doEdit( $text, $summary,
( $minor ? EDIT_MINOR : 0 ) |
- ( $bot ? EDIT_FORCE_BOT : 0 ) |
+ ( $bot ? EDIT_FORCE_BOT : 0 ) |
( $autoSummary ? EDIT_AUTOSUMMARY : 0 ) |
( $noRC ? EDIT_SUPPRESS_RC : 0 ) );
if ( $status->isOK() ) {
@@ -85,5 +85,5 @@ class EditCLI extends Maintenance {
}
$maintClass = "EditCLI";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/eval.php b/maintenance/eval.php
index a990a4d8..3cc1d16a 100644
--- a/maintenance/eval.php
+++ b/maintenance/eval.php
@@ -16,7 +16,7 @@
* @ingroup Maintenance
*/
-$wgUseNormalUser = (bool)getenv('MW_WIKIUSER');
+$wgUseNormalUser = (bool)getenv( 'MW_WIKIUSER' );
$optionsWithArgs = array( 'd' );
@@ -39,8 +39,8 @@ if ( isset( $options['d'] ) ) {
}
}
-if ( function_exists( 'readline_add_history' )
- && function_exists( 'posix_isatty' ) && posix_isatty( 0 /*STDIN*/ ) )
+if ( function_exists( 'readline_add_history' )
+ && posix_isatty( 0 /*STDIN*/ ) )
{
$useReadline = true;
} else {
@@ -48,19 +48,20 @@ if ( function_exists( 'readline_add_history' )
}
if ( $useReadline ) {
- $historyFile = "{$_ENV['HOME']}/.mweval_history";
+ $historyFile = isset( $_ENV['HOME'] ) ?
+ "{$_ENV['HOME']}/.mweval_history" : "$IP/maintenance/.mweval_history";
readline_read_history( $historyFile );
}
-while ( ( $line = readconsole( '> ' ) ) !== false ) {
+while ( ( $line = Maintenance::readconsole() ) !== false ) {
if ( $useReadline ) {
readline_add_history( $line );
readline_write_history( $historyFile );
}
$val = eval( $line . ";" );
- if( is_null( $val ) ) {
+ if ( is_null( $val ) ) {
echo "\n";
- } elseif( is_string( $val ) || is_numeric( $val ) ) {
+ } elseif ( is_string( $val ) || is_numeric( $val ) ) {
echo "$val\n";
} else {
var_dump( $val );
diff --git a/maintenance/fetchText.php b/maintenance/fetchText.php
index 746ef8ad..ea56535d 100644
--- a/maintenance/fetchText.php
+++ b/maintenance/fetchText.php
@@ -20,7 +20,7 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class FetchText extends Maintenance {
public function __construct() {
@@ -28,35 +28,52 @@ class FetchText extends Maintenance {
$this->mDescription = "Fetch the revision text from an old_id";
}
- public function execute() {
+ /*
+ * returns a string containing the following in order:
+ * textid
+ * \n
+ * length of text (-1 on error = failure to retrieve/unserialize/gunzip/etc)
+ * \n
+ * text (may be empty)
+ *
+ * note that that the text string itself is *not* followed by newline
+ */
+ public function execute() {
$db = wfGetDB( DB_SLAVE );
$stdin = $this->getStdin();
- while( !feof( $stdin ) ) {
+ while ( !feof( $stdin ) ) {
$line = fgets( $stdin );
- if( $line === false ) {
+ if ( $line === false ) {
// We appear to have lost contact...
break;
}
$textId = intval( $line );
$text = $this->doGetText( $db, $textId );
- $this->output( strlen( $text ) . "\n". $text );
+ if ($text === false) {
+ # actual error, not zero-length text
+ $textLen = "-1";
+ }
+ else {
+ $textLen = strlen($text);
+ }
+ $this->output( $textId . "\n" . $textLen . "\n" . $text );
}
}
-
+
/**
- * May throw a database error if, say, the server dies during query.
+ * May throw a database error if, say, the server dies during query.
* @param $db Database object
* @param $id int The old_id
* @return String
- */
+ */
private function doGetText( $db, $id ) {
$id = intval( $id );
$row = $db->selectRow( 'text',
array( 'old_text', 'old_flags' ),
array( 'old_id' => $id ),
- 'TextPassDumper::getText' );
+ __METHOD__ );
$text = Revision::getRevisionText( $row );
- if( $text === false ) {
+ if ( $text === false ) {
return false;
}
return $text;
@@ -64,4 +81,4 @@ class FetchText extends Maintenance {
}
$maintClass = "FetchText";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/findhooks.php b/maintenance/findhooks.php
index 13236b6b..04a5faef 100644
--- a/maintenance/findhooks.php
+++ b/maintenance/findhooks.php
@@ -2,7 +2,7 @@
/**
* Simple script that try to find documented hook and hooks actually
* in the code and show what's missing.
- *
+ *
* This script assumes that:
* - hooks names in hooks.txt are at the beginning of a line and single quoted.
* - hooks names in code are the first parameter of wfRunHooks.
@@ -12,6 +12,8 @@
*
* Any instance of wfRunHooks that doesn't meet these parameters will be noted.
*
+ * Copyright © Ashar Voultoiz
+ *
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
@@ -27,14 +29,12 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
- *
- * @author Ashar Voultoiz <hashar@altern.org>
- * @copyright Copyright © Ashar voultoiz
- * @license http://www.gnu.org/copyleft/gpl.html GNU General Public Licence 2.0 or later
+ * @author Ashar Voultoiz <hashar at free dot fr>
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class FindHooks extends Maintenance {
public function __construct() {
@@ -54,37 +54,42 @@ class FindHooks extends Maintenance {
$potential = array();
$bad = array();
$pathinc = array(
- $IP.'/',
- $IP.'/includes/',
- $IP.'/includes/api/',
- $IP.'/includes/db/',
- $IP.'/includes/diff/',
- $IP.'/includes/filerepo/',
- $IP.'/includes/parser/',
- $IP.'/includes/search/',
- $IP.'/includes/specials/',
- $IP.'/includes/upload/',
- $IP.'/languages/',
- $IP.'/maintenance/',
- $IP.'/skins/',
+ $IP . '/',
+ $IP . '/includes/',
+ $IP . '/includes/api/',
+ $IP . '/includes/db/',
+ $IP . '/includes/diff/',
+ $IP . '/includes/filerepo/',
+ $IP . '/includes/installer/',
+ $IP . '/includes/parser/',
+ $IP . '/includes/resourceloader/',
+ $IP . '/includes/revisiondelete/',
+ $IP . '/includes/search/',
+ $IP . '/includes/specials/',
+ $IP . '/includes/upload/',
+ $IP . '/languages/',
+ $IP . '/maintenance/',
+ $IP . '/maintenance/tests/',
+ $IP . '/maintenance/tests/parser/',
+ $IP . '/skins/',
);
- foreach( $pathinc as $dir ) {
+ foreach ( $pathinc as $dir ) {
$potential = array_merge( $potential, $this->getHooksFromPath( $dir ) );
$bad = array_merge( $bad, $this->getBadHooksFromPath( $dir ) );
}
-
+
$potential = array_unique( $potential );
$bad = array_unique( $bad );
$todo = array_diff( $potential, $documented );
$deprecated = array_diff( $documented, $potential );
-
+
// let's show the results:
- $this->printArray('Undocumented', $todo );
- $this->printArray('Documented and not found', $deprecated );
- $this->printArray('Unclear hook calls', $bad );
-
- if ( count( $todo ) == 0 && count( $deprecated ) == 0 && count( $bad ) == 0 )
+ $this->printArray( 'Undocumented', $todo );
+ $this->printArray( 'Documented and not found', $deprecated );
+ $this->printArray( 'Unclear hook calls', $bad );
+
+ if ( count( $todo ) == 0 && count( $deprecated ) == 0 && count( $bad ) == 0 )
$this->output( "Looks good!\n" );
}
@@ -93,14 +98,14 @@ class FindHooks extends Maintenance {
* @return array of documented hooks
*/
private function getHooksFromDoc( $doc ) {
- if( $this->hasOption( 'online' ) ){
+ if ( $this->hasOption( 'online' ) ) {
// All hooks
$allhookdata = Http::get( 'http://www.mediawiki.org/w/api.php?action=query&list=categorymembers&cmtitle=Category:MediaWiki_hooks&cmlimit=500&format=php' );
$allhookdata = unserialize( $allhookdata );
$allhooks = array();
- foreach( $allhookdata['query']['categorymembers'] as $page ) {
+ foreach ( $allhookdata['query']['categorymembers'] as $page ) {
$found = preg_match( '/Manual\:Hooks\/([a-zA-Z0-9- :]+)/', $page['title'], $matches );
- if( $found ) {
+ if ( $found ) {
$hook = str_replace( ' ', '_', $matches[1] );
$allhooks[] = $hook;
}
@@ -109,9 +114,9 @@ class FindHooks extends Maintenance {
$oldhookdata = Http::get( 'http://www.mediawiki.org/w/api.php?action=query&list=categorymembers&cmtitle=Category:Removed_hooks&cmlimit=500&format=php' );
$oldhookdata = unserialize( $oldhookdata );
$removed = array();
- foreach( $oldhookdata['query']['categorymembers'] as $page ) {
+ foreach ( $oldhookdata['query']['categorymembers'] as $page ) {
$found = preg_match( '/Manual\:Hooks\/([a-zA-Z0-9- :]+)/', $page['title'], $matches );
- if( $found ) {
+ if ( $found ) {
$hook = str_replace( ' ', '_', $matches[1] );
$removed[] = $hook;
}
@@ -133,7 +138,7 @@ class FindHooks extends Maintenance {
private function getHooksFromFile( $file ) {
$content = file_get_contents( $file );
$m = array();
- preg_match_all( '/wfRunHooks\(\s*([\'"])(.*?)\1/', $content, $m);
+ preg_match_all( '/wfRunHooks\(\s*([\'"])(.*?)\1/', $content, $m );
return $m[2];
}
@@ -144,13 +149,14 @@ class FindHooks extends Maintenance {
*/
private function getHooksFromPath( $path ) {
$hooks = array();
- if( $dh = opendir($path) ) {
- while(($file = readdir($dh)) !== false) {
- if( filetype($path.$file) == 'file' ) {
- $hooks = array_merge( $hooks, $this->getHooksFromFile($path.$file) );
+ $dh = opendir( $path );
+ if ( $dh ) {
+ while ( ( $file = readdir( $dh ) ) !== false ) {
+ if ( filetype( $path . $file ) == 'file' ) {
+ $hooks = array_merge( $hooks, $this->getHooksFromFile( $path . $file ) );
}
}
- closedir($dh);
+ closedir( $dh );
}
return $hooks;
}
@@ -164,9 +170,9 @@ class FindHooks extends Maintenance {
$content = file_get_contents( $file );
$m = array();
# We want to skip the "function wfRunHooks()" one. :)
- preg_match_all( '/(?<!function )wfRunHooks\(\s*[^\s\'"].*/', $content, $m);
+ preg_match_all( '/(?<!function )wfRunHooks\(\s*[^\s\'"].*/', $content, $m );
$list = array();
- foreach( $m[0] as $match ){
+ foreach ( $m[0] as $match ) {
$list[] = $match . "(" . $file . ")";
}
return $list;
@@ -179,14 +185,15 @@ class FindHooks extends Maintenance {
*/
private function getBadHooksFromPath( $path ) {
$hooks = array();
- if( $dh = opendir($path) ) {
- while(($file = readdir($dh)) !== false) {
+ $dh = opendir( $path );
+ if ( $dh ) {
+ while ( ( $file = readdir( $dh ) ) !== false ) {
# We don't want to read this file as it contains bad calls to wfRunHooks()
- if( filetype( $path.$file ) == 'file' && !$path.$file == __FILE__ ) {
- $hooks = array_merge( $hooks, $this->getBadHooksFromFile($path.$file) );
+ if ( filetype( $path . $file ) == 'file' && !$path . $file == __FILE__ ) {
+ $hooks = array_merge( $hooks, $this->getBadHooksFromFile( $path . $file ) );
}
}
- closedir($dh);
+ closedir( $dh );
}
return $hooks;
}
@@ -198,10 +205,10 @@ class FindHooks extends Maintenance {
* @param $sort Boolean : wheter to sort the array (Default: true)
*/
private function printArray( $msg, $arr, $sort = true ) {
- if($sort) asort($arr);
- foreach($arr as $v) $this->output( "$msg: $v\n" );
+ if ( $sort ) asort( $arr );
+ foreach ( $arr as $v ) $this->output( "$msg: $v\n" );
}
}
$maintClass = "FindHooks";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/fixSlaveDesync.php b/maintenance/fixSlaveDesync.php
index c585beb1..fe892944 100644
--- a/maintenance/fixSlaveDesync.php
+++ b/maintenance/fixSlaveDesync.php
@@ -18,32 +18,29 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class FixSlaveDesync extends Maintenance {
public function __construct() {
- global $wgUseRootUser;
- $wgUseRootUser = true;
-
parent::__construct();
$this->mDescription = "";
-
}
-
+
+ public function getDbType() {
+ return Maintenance::DB_ADMIN;
+ }
+
public function execute() {
- global $slaveIndexes, $wgDBservers;
- $slaveIndexes = array();
- for ( $i = 1; $i < count( $wgDBservers ); $i++ ) {
+ $this->slaveIndexes = array();
+ for ( $i = 1; $i < wfGetLB()->getServerCount(); $i++ ) {
if ( wfGetLB()->isNonZeroLoad( $i ) ) {
- $slaveIndexes[] = $i;
+ $this->slaveIndexes[] = $i;
}
}
if ( $this->hasArg() ) {
$this->desyncFixPage( $this->getArg() );
} else {
- $dbw = wfGetDB( DB_MASTER );
- $maxPage = $dbw->selectField( 'page', 'MAX(page_id)', false, __METHOD__ );
$corrupt = $this->findPageLatestCorruption();
foreach ( $corrupt as $id => $dummy ) {
$this->desyncFixPage( $id );
@@ -69,10 +66,8 @@ class FixSlaveDesync extends Maintenance {
}
}
$this->output( "\n" );
- $dbw->freeResult( $res );
- global $slaveIndexes;
- foreach ( $slaveIndexes as $i ) {
+ foreach ( $this->slaveIndexes as $i ) {
$db = wfGetDB( $i );
$res = $db->select( 'page', array( 'page_id', 'page_latest' ), array( 'page_id<6054123' ), __METHOD__ );
foreach ( $res as $row ) {
@@ -81,7 +76,6 @@ class FixSlaveDesync extends Maintenance {
$this->output( $row->page_id . "\t" );
}
}
- $db->freeResult( $res );
}
$this->output( "\n" );
return $desync;
@@ -92,16 +86,14 @@ class FixSlaveDesync extends Maintenance {
* @param $pageID int The page_id to fix
*/
private function desyncFixPage( $pageID ) {
- global $slaveIndexes;
-
# Check for a corrupted page_latest
$dbw = wfGetDB( DB_MASTER );
$dbw->begin();
- $realLatest = $dbw->selectField( 'page', 'page_latest', array( 'page_id' => $pageID ),
+ $realLatest = $dbw->selectField( 'page', 'page_latest', array( 'page_id' => $pageID ),
__METHOD__, 'FOR UPDATE' );
- #list( $masterFile, $masterPos ) = $dbw->getMasterPos();
+ # list( $masterFile, $masterPos ) = $dbw->getMasterPos();
$found = false;
- foreach ( $slaveIndexes as $i ) {
+ foreach ( $this->slaveIndexes as $i ) {
$db = wfGetDB( $i );
/*
if ( !$db->masterPosWait( $masterFile, $masterPos, 10 ) ) {
@@ -109,7 +101,7 @@ class FixSlaveDesync extends Maintenance {
$dbw->commit();
sleep(10);
return;
- }*/
+ }*/
$latest = $db->selectField( 'page', 'page_latest', array( 'page_id' => $pageID ), __METHOD__ );
$max = $db->selectField( 'revision', 'MAX(rev_id)', false, __METHOD__ );
if ( $latest != $realLatest && $realLatest < $max ) {
@@ -125,20 +117,18 @@ class FixSlaveDesync extends Maintenance {
}
# Find the missing revisions
- $res = $dbw->select( 'revision', array( 'rev_id' ), array( 'rev_page' => $pageID ),
+ $res = $dbw->select( 'revision', array( 'rev_id' ), array( 'rev_page' => $pageID ),
__METHOD__, 'FOR UPDATE' );
$masterIDs = array();
foreach ( $res as $row ) {
$masterIDs[] = $row->rev_id;
}
- $dbw->freeResult( $res );
$res = $db->select( 'revision', array( 'rev_id' ), array( 'rev_page' => $pageID ), __METHOD__ );
$slaveIDs = array();
foreach ( $res as $row ) {
$slaveIDs[] = $row->rev_id;
}
- $db->freeResult( $res );
if ( count( $masterIDs ) < count( $slaveIDs ) ) {
$missingIDs = array_diff( $slaveIDs, $masterIDs );
if ( count( $missingIDs ) ) {
@@ -167,7 +157,7 @@ class FixSlaveDesync extends Maintenance {
# Revision
$row = $dbFrom->selectRow( 'revision', '*', array( 'rev_id' => $rid ), __METHOD__ );
if ( $toMaster ) {
- $id = $dbw->selectField( 'revision', 'rev_id', array( 'rev_id' => $rid ),
+ $id = $dbw->selectField( 'revision', 'rev_id', array( 'rev_id' => $rid ),
__METHOD__, 'FOR UPDATE' );
if ( $id ) {
$this->output( "Revision already exists\n" );
@@ -177,7 +167,7 @@ class FixSlaveDesync extends Maintenance {
$dbw->insert( 'revision', get_object_vars( $row ), __METHOD__, 'IGNORE' );
}
} else {
- foreach ( $slaveIndexes as $i ) {
+ foreach ( $this->slaveIndexes as $i ) {
$db = wfGetDB( $i );
$db->insert( 'revision', get_object_vars( $row ), __METHOD__, 'IGNORE' );
}
@@ -188,7 +178,7 @@ class FixSlaveDesync extends Maintenance {
if ( $toMaster ) {
$dbw->insert( 'text', get_object_vars( $row ), __METHOD__, 'IGNORE' );
} else {
- foreach ( $slaveIndexes as $i ) {
+ foreach ( $this->slaveIndexes as $i ) {
$db = wfGetDB( $i );
$db->insert( 'text', get_object_vars( $row ), __METHOD__, 'IGNORE' );
}
@@ -200,9 +190,9 @@ class FixSlaveDesync extends Maintenance {
if ( $found ) {
$this->output( "Fixing page_latest... " );
if ( $toMaster ) {
- #$dbw->update( 'page', array( 'page_latest' => $realLatest ), array( 'page_id' => $pageID ), __METHOD__ );
+ # $dbw->update( 'page', array( 'page_latest' => $realLatest ), array( 'page_id' => $pageID ), __METHOD__ );
} else {
- foreach ( $slaveIndexes as $i ) {
+ foreach ( $this->slaveIndexes as $i ) {
$db = wfGetDB( $i );
$db->update( 'page', array( 'page_latest' => $realLatest ), array( 'page_id' => $pageID ), __METHOD__ );
}
@@ -214,4 +204,4 @@ class FixSlaveDesync extends Maintenance {
}
$maintClass = "FixSlaveDesync";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/fixTimestamps.php b/maintenance/fixTimestamps.php
index ea102fb8..3e3bd0a5 100644
--- a/maintenance/fixTimestamps.php
+++ b/maintenance/fixTimestamps.php
@@ -1,9 +1,9 @@
<?php
/**
- * This script fixes timestamp corruption caused by one or more webservers
+ * This script fixes timestamp corruption caused by one or more webservers
* temporarily being set to the wrong time. The time offset must be known and
- * consistent. Start and end times (in 14-character format) restrict the search,
- * and must bracket the damage. There must be a majority of good timestamps in the
+ * consistent. Start and end times (in 14-character format) restrict the search,
+ * and must bracket the damage. There must be a majority of good timestamps in the
* search period.
*
* This program is free software; you can redistribute it and/or modify
@@ -23,8 +23,8 @@
*
* @ingroup Maintenance
*/
-
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class FixTimestamps extends Maintenance {
public function __construct() {
@@ -36,25 +36,25 @@ class FixTimestamps extends Maintenance {
}
public function execute() {
- $offset = $this->getArg(0) * 3600;
- $start = $this->getArg(1);
- $end = $this->getArg(2);
+ $offset = $this->getArg( 0 ) * 3600;
+ $start = $this->getArg( 1 );
+ $end = $this->getArg( 2 );
$grace = 60; // maximum normal clock offset
-
+
# Find bounding revision IDs
$dbw = wfGetDB( DB_MASTER );
$revisionTable = $dbw->tableName( 'revision' );
$res = $dbw->query( "SELECT MIN(rev_id) as minrev, MAX(rev_id) as maxrev FROM $revisionTable " .
"WHERE rev_timestamp BETWEEN '{$start}' AND '{$end}'", __METHOD__ );
$row = $dbw->fetchObject( $res );
-
+
if ( is_null( $row->minrev ) ) {
$this->error( "No revisions in search period.", true );
}
-
+
$minRev = $row->minrev;
$maxRev = $row->maxrev;
-
+
# Select all timestamps and IDs
$sql = "SELECT rev_id, rev_timestamp FROM $revisionTable " .
"WHERE rev_id BETWEEN $minRev AND $maxRev";
@@ -64,13 +64,13 @@ class FixTimestamps extends Maintenance {
} else {
$expectedSign = 1;
}
-
+
$res = $dbw->query( $sql, __METHOD__ );
-
+
$lastNormal = 0;
$badRevs = array();
$numGoodRevs = 0;
-
+
foreach ( $res as $row ) {
$timestamp = wfTimestamp( TS_UNIX, $row->rev_timestamp );
$delta = $timestamp - $lastNormal;
@@ -89,26 +89,25 @@ class FixTimestamps extends Maintenance {
$badRevs[] = $row->rev_id;
}
}
- $dbw->freeResult( $res );
-
+
$numBadRevs = count( $badRevs );
if ( $numBadRevs > $numGoodRevs ) {
- $this->error(
+ $this->error(
"The majority of revisions in the search interval are marked as bad.
- Are you sure the offset ($offset) has the right sign? Positive means the clock
+ Are you sure the offset ($offset) has the right sign? Positive means the clock
was incorrectly set forward, negative means the clock was incorrectly set back.
- If the offset is right, then increase the search interval until there are enough
+ If the offset is right, then increase the search interval until there are enough
good revisions to provide a majority reference.", true );
} elseif ( $numBadRevs == 0 ) {
$this->output( "No bad revisions found.\n" );
- exit(0);
+ exit( 0 );
}
-
- $this->output( sprintf( "Fixing %d revisions (%.2f%% of revisions in search interval)\n",
- $numBadRevs, $numBadRevs / ($numGoodRevs + $numBadRevs) * 100 ) );
-
+
+ $this->output( sprintf( "Fixing %d revisions (%.2f%% of revisions in search interval)\n",
+ $numBadRevs, $numBadRevs / ( $numGoodRevs + $numBadRevs ) * 100 ) );
+
$fixup = -$offset;
$sql = "UPDATE $revisionTable " .
"SET rev_timestamp=DATE_FORMAT(DATE_ADD(rev_timestamp, INTERVAL $fixup SECOND), '%Y%m%d%H%i%s') " .
@@ -119,4 +118,4 @@ class FixTimestamps extends Maintenance {
}
$maintClass = "FixTimestamps";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/fixUserRegistration.php b/maintenance/fixUserRegistration.php
index d3305358..d4ff7c23 100644
--- a/maintenance/fixUserRegistration.php
+++ b/maintenance/fixUserRegistration.php
@@ -21,7 +21,7 @@
* @ingroup Maintenance
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class FixUserRegistration extends Maintenance {
public function __construct() {
@@ -52,4 +52,4 @@ class FixUserRegistration extends Maintenance {
}
$maintClass = "FixUserRegistration";
-require_once( DO_MAINTENANCE );
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/fuzz-tester.php b/maintenance/fuzz-tester.php
index 6d8c57f2..a78522cd 100644
--- a/maintenance/fuzz-tester.php
+++ b/maintenance/fuzz-tester.php
@@ -13,9 +13,9 @@ Description:
How:
- Generate lots of nasty wiki text.
- - Ask the Parser to render that wiki text to HTML, or ask MediaWiki's forms
- to deal with that wiki text.
- - Check MediaWiki's output for problems.
+ - Ask the Parser to render that wiki text to HTML, or ask MediaWiki's forms
+ to deal with that wiki text.
+ - Check MediaWiki's output for problems.
- Repeat.
Why:
@@ -32,7 +32,7 @@ What type of problems are being checked for:
- Optionally checking for malformed HTML using the W3C validator.
Background:
- Many of the wikiFuzz class methods are a modified PHP port,
+ Many of the wikiFuzz class methods are a modified PHP port,
of a "shameless" Python port, of LCAMTUF'S MANGELME:
- http://www.securiteam.com/tools/6Z00N1PBFK.html
- http://www.securityfocus.com/archive/1/378632/2004-10-15/2004-10-21/0
@@ -43,15 +43,15 @@ Video:
Requirements:
To run this, you will need:
- - Command-line PHP5, with PHP-curl enabled (not all installations have this
- enabled - try "apt-get install php5-curl" if you're on Debian to install).
+ - Command-line PHP5, with PHP-curl enabled (not all installations have this
+ enabled - try "apt-get install php5-curl" if you're on Debian to install).
- the Tidy standalone executable. ("apt-get install tidy").
Optional:
- If you want to run the curl scripts, you'll need standalone curl installed
- ("apt-get install curl")
+ ("apt-get install curl")
- For viewing the W3C validator output on a command line, the "html2text"
- program may be useful ("apt-get install html2text")
+ program may be useful ("apt-get install html2text")
Saving tests and test results:
Any of the fuzz tests which find problems are saved for later review.
@@ -65,7 +65,7 @@ Saving tests and test results:
Wiki configuration for testing:
You should make some additions to LocalSettings.php in order to catch the most
errors. Note this configuration is for **TESTING PURPOSES ONLY**, and is IN NO
- WAY, SHAPE, OR FORM suitable for deployment on a hostile network. That said,
+ WAY, SHAPE, OR FORM suitable for deployment on a hostile network. That said,
personally I find these additions to be the most helpful for testing purposes:
// --------- Start ---------
@@ -99,7 +99,7 @@ Wiki configuration for testing:
$wgGroupPermissions['*']['makesysop'] = true;
// Enable weird and wonderful options:
- // Increase default error reporting level.
+ // Increase default error reporting level.
error_reporting (E_ALL); // At a later date could be increased to E_ALL | E_STRICT
$wgBlockOpenProxies = true; // Some block pages require this to be true in order to test.
$wgEnableUploads = true; // enable uploads.
@@ -127,14 +127,14 @@ Wiki configuration for testing:
require_once("extensions/Renameuser/SpecialRenameuser.php");
require_once("extensions/LinkSearch/LinkSearch.php");
// --------- End ---------
-
+
If you want to try E_STRICT error logging, add this to the above:
// --------- Start ---------
error_reporting (E_ALL | E_STRICT);
set_error_handler( 'error_handler' );
function error_handler ($type, $message, $file=__FILE__, $line=__LINE__) {
- if ($message == "var: Deprecated. Please use the public/private/protected modifiers") return;
- print "<br />\n<b>Strict Standards:</b> Type: <b>$type</b>: $message in <b>$file</b> on line <b>$line</b><br />\n";
+ if ($message == "var: Deprecated. Please use the public/private/protected modifiers") return;
+ print "<br />\n<b>Strict Standards:</b> Type: <b>$type</b>: $message in <b>$file</b> on line <b>$line</b><br />\n";
}
// --------- End ---------
@@ -152,62 +152,62 @@ Usage:
Console output:
- If requested, first any previously failed tests will be rerun.
- Then new tests will be generated and run. Any tests that fail will be saved,
- and a brief message about why they failed will be printed on the console.
+ and a brief message about why they failed will be printed on the console.
- The console will show the number of tests run, time run, number of tests
- failed, number of tests being done per minute, and the name of the current test.
+ failed, number of tests being done per minute, and the name of the current test.
TODO:
Some known things that could improve this script:
- - Logging in with cookie jar storage needed for some tests (as there are some
- pages that cannot be tested without being logged in, and which are currently
- untested - e.g. Special:Emailuser, Special:Preferences, adding to Watchist).
+ - Logging in with cookie jar storage needed for some tests (as there are some
+ pages that cannot be tested without being logged in, and which are currently
+ untested - e.g. Special:Emailuser, Special:Preferences, adding to Watchist).
- Testing of Timeline extension (I cannot test as ploticus has/had issues on
- my architecture).
+ my architecture).
*/
-/////////////////////////// COMMAND LINE HELP ////////////////////////////////////
+// ///////////////////////// COMMAND LINE HELP ////////////////////////////////////
// This is a command line script, load MediaWiki env (gives command line options);
-require_once( dirname(__FILE__) . '/commandLine.inc' );
+require_once( dirname( __FILE__ ) . '/commandLine.inc' );
// if the user asked for an explanation of command line options.
if ( isset( $options["help"] ) ) {
- print <<<ENDS
+ print <<<ENDS
MediaWiki $wgVersion fuzz tester
Usage: php {$_SERVER["SCRIPT_NAME"]} [--quiet] [--base-url=<url-to-test-wiki>]
- [--directory=<failed-test-path>] [--include-binary]
- [--w3c-validate] [--delete-passed-retests] [--help]
- [--user=<username>] [--password=<password>]
- [--rerun-failed-tests] [--max-errors=<int>]
- [--max-runtime=<num-minutes>]
- [--specific-test=<test-name>]
+ [--directory=<failed-test-path>] [--include-binary]
+ [--w3c-validate] [--delete-passed-retests] [--help]
+ [--user=<username>] [--password=<password>]
+ [--rerun-failed-tests] [--max-errors=<int>]
+ [--max-runtime=<num-minutes>]
+ [--specific-test=<test-name>]
Options:
--quiet : Hides passed tests, shows only failed tests.
- --base-url : URL to a wiki on which to run the tests.
- The "http://" is optional and can be omitted.
+ --base-url : URL to a wiki on which to run the tests.
+ The "http://" is optional and can be omitted.
--directory : Full path to directory for storing failed tests.
- Will be created if it does not exist.
+ Will be created if it does not exist.
--include-binary : Includes non-alphanumeric characters in the tests.
- --w3c-validate : Validates pages using the W3C's web validator.
- Slow. Currently many pages fail validation.
+ --w3c-validate : Validates pages using the W3C's web validator.
+ Slow. Currently many pages fail validation.
--user : Login name of a valid user on your test wiki.
- --password : Password for the valid user on your test wiki.
+ --password : Password for the valid user on your test wiki.
--delete-passed-retests : Will delete retests that now pass.
- Requires --rerun-failed-tests to be meaningful.
+ Requires --rerun-failed-tests to be meaningful.
--rerun-failed-tests : Whether to rerun any previously failed tests.
--max-errors : Maximum number of errors to report before exiting.
- Does not include errors from --rerun-failed-tests
+ Does not include errors from --rerun-failed-tests
--max-runtime : Maximum runtime, in minutes, to run before exiting.
- Only applies to new tests, not --rerun-failed-tests
- --specific-test : Runs only the specified fuzz test.
- Only applies to new tests, not --rerun-failed-tests
+ Only applies to new tests, not --rerun-failed-tests
+ --specific-test : Runs only the specified fuzz test.
+ Only applies to new tests, not --rerun-failed-tests
--keep-passed-tests : Saves all test files, even those that pass.
--help : Show this help message.
Example:
- If you wanted to fuzz test a nightly MediaWiki checkout using cron for 1 hour,
+ If you wanted to fuzz test a nightly MediaWiki checkout using cron for 1 hour,
and only wanted to be informed of errors, and did not want to redo previously
failed tests, and wanted a maximum of 100 errors, then you could do:
php {$_SERVER["SCRIPT_NAME"]} --quiet --max-errors=100 --max-runtime=60
@@ -215,659 +215,660 @@ Example:
ENDS;
- exit( 0 );
+ exit( 0 );
}
// if we got command line options, check they look valid.
-$validOptions = array ("quiet", "base-url", "directory", "include-binary",
- "w3c-validate", "user", "password", "delete-passed-retests",
- "rerun-failed-tests", "max-errors",
- "max-runtime", "specific-test", "keep-passed-tests", "help" );
-if (!empty($options)) {
- $unknownArgs = array_diff (array_keys($options), $validOptions);
- foreach ($unknownArgs as $invalidArg) {
- print "Ignoring invalid command-line option: --$invalidArg\n";
- }
+$validOptions = array ( "quiet", "base-url", "directory", "include-binary",
+ "w3c-validate", "user", "password", "delete-passed-retests",
+ "rerun-failed-tests", "max-errors",
+ "max-runtime", "specific-test", "keep-passed-tests", "help" );
+if ( !empty( $options ) ) {
+ $unknownArgs = array_diff ( array_keys( $options ), $validOptions );
+ foreach ( $unknownArgs as $invalidArg ) {
+ print "Ignoring invalid command-line option: --$invalidArg\n";
+ }
}
-///////////////////////////// CONFIGURATION ////////////////////////////////////
+// /////////////////////////// CONFIGURATION ////////////////////////////////////
// URL to some wiki on which we can run our tests.
-if (!empty($options["base-url"])) {
- define("WIKI_BASE_URL", $options["base-url"]);
+if ( !empty( $options["base-url"] ) ) {
+ define( "WIKI_BASE_URL", $options["base-url"] );
} else {
- define("WIKI_BASE_URL", $wgServer . $wgScriptPath . '/');
+ define( "WIKI_BASE_URL", $wgServer . $wgScriptPath . '/' );
}
// The directory name where we store the output.
// Example for Windows: "c:\\temp\\wiki-fuzz"
-if (!empty($options["directory"])) {
- define("DIRECTORY", $options["directory"] );
+if ( !empty( $options["directory"] ) ) {
+ define( "DIRECTORY", $options["directory"] );
} else {
- define("DIRECTORY", "{$wgUploadDirectory}/fuzz-tests");
+ define( "DIRECTORY", "{$wgUploadDirectory}/fuzz-tests" );
}
// Should our test fuzz data include binary strings?
-define("INCLUDE_BINARY", isset($options["include-binary"]) );
+define( "INCLUDE_BINARY", isset( $options["include-binary"] ) );
// Whether we want to validate HTML output on the web.
// At the moment very few generated pages will validate, so not recommended.
-define("VALIDATE_ON_WEB", isset($options["w3c-validate"]) );
+define( "VALIDATE_ON_WEB", isset( $options["w3c-validate"] ) );
// URL to use to validate our output:
-define("VALIDATOR_URL", "http://validator.w3.org/check");
+define( "VALIDATOR_URL", "http://validator.w3.org/check" );
// Location of Tidy standalone executable.
-define("PATH_TO_TIDY", "/usr/bin/tidy");
+define( "PATH_TO_TIDY", "/usr/bin/tidy" );
-// The name of a user who has edited on your wiki. Used
+// The name of a user who has edited on your wiki. Used
// when testing the Special:Contributions and Special:Userlogin page.
-if (!empty($options["user"])) {
- define("USER_ON_WIKI", $options["user"] );
+if ( !empty( $options["user"] ) ) {
+ define( "USER_ON_WIKI", $options["user"] );
} else {
- define("USER_ON_WIKI", "nickj");
+ define( "USER_ON_WIKI", "nickj" );
}
// The password of the above user. Used when testing the login page,
-// and to do this we sometimes need to login successfully.
-if (!empty($options["password"])) {
- define("USER_PASSWORD", $options["password"] );
+// and to do this we sometimes need to login successfully.
+if ( !empty( $options["password"] ) ) {
+ define( "USER_PASSWORD", $options["password"] );
} else {
- // And no, this is not a valid password on any public wiki.
- define("USER_PASSWORD", "nickj");
+ // And no, this is not a valid password on any public wiki.
+ define( "USER_PASSWORD", "nickj" );
}
// If we have a test that failed, and then we run it again, and it passes,
// do you want to delete it or keep it?
-define("DELETE_PASSED_RETESTS", isset($options["delete-passed-retests"]) );
+define( "DELETE_PASSED_RETESTS", isset( $options["delete-passed-retests"] ) );
// Do we want to rerun old saved tests at script startup?
// Set to true to help catch regressions, or false if you only want new stuff.
-define("RERUN_OLD_TESTS", isset($options["rerun-failed-tests"]) );
+define( "RERUN_OLD_TESTS", isset( $options["rerun-failed-tests"] ) );
// File where the database errors are logged. Should be defined in LocalSettings.php.
-define("DB_ERROR_LOG_FILE", $wgDBerrorLog );
+define( "DB_ERROR_LOG_FILE", $wgDBerrorLog );
// Run in chatty mode (all output, default), or run in quiet mode (only prints out details of failed tests)?
-define("QUIET", isset($options["quiet"]) );
+define( "QUIET", isset( $options["quiet"] ) );
// Keep all test files, even those that pass. Potentially useful to tracking input that causes something
// unusual to happen, if you don't know what "unusual" is until later.
-define("KEEP_PASSED_TESTS", isset($options["keep-passed-tests"]) );
+define( "KEEP_PASSED_TESTS", isset( $options["keep-passed-tests"] ) );
// The maximum runtime, if specified.
-if (!empty($options["max-runtime"]) && intval($options["max-runtime"])>0) {
- define("MAX_RUNTIME", intval($options["max-runtime"]) );
+if ( !empty( $options["max-runtime"] ) && intval( $options["max-runtime"] ) > 0 ) {
+ define( "MAX_RUNTIME", intval( $options["max-runtime"] ) );
}
// The maximum number of problems to find, if specified. Excludes retest errors.
-if (!empty($options["max-errors"]) && intval($options["max-errors"])>0) {
- define("MAX_ERRORS", intval($options["max-errors"]) );
+if ( !empty( $options["max-errors"] ) && intval( $options["max-errors"] ) > 0 ) {
+ define( "MAX_ERRORS", intval( $options["max-errors"] ) );
}
// if the user has requested a specific test (instead of all tests), and the test they asked for looks valid.
-if (!empty($options["specific-test"])) {
- if (class_exists($options["specific-test"]) && get_parent_class($options["specific-test"])=="pageTest") {
- define("SPECIFIC_TEST", $options["specific-test"] );
- }
- else {
- print "Ignoring invalid --specific-test\n";
- }
+if ( !empty( $options["specific-test"] ) ) {
+ if ( class_exists( $options["specific-test"] ) && get_parent_class( $options["specific-test"] ) == "pageTest" ) {
+ define( "SPECIFIC_TEST", $options["specific-test"] );
+ }
+ else {
+ print "Ignoring invalid --specific-test\n";
+ }
}
// Define the file extensions we'll use:
-define("PHP_TEST" , ".test.php");
-define("CURL_TEST", ".curl.sh" );
-define("DATA_FILE", ".data.bin");
-define("INFO_FILE", ".info.txt");
-define("HTML_FILE", ".wiki_preview.html");
+define( "PHP_TEST" , ".test.php" );
+define( "CURL_TEST", ".curl.sh" );
+define( "DATA_FILE", ".data.bin" );
+define( "INFO_FILE", ".info.txt" );
+define( "HTML_FILE", ".wiki_preview.html" );
// If it goes wrong, we want to know about it.
-error_reporting(E_ALL | E_STRICT);
+error_reporting( E_ALL | E_STRICT );
-//////////////// A CLASS THAT GENERATES RANDOM NASTY WIKI & HTML STRINGS //////////////////////
+// ////////////// A CLASS THAT GENERATES RANDOM NASTY WIKI & HTML STRINGS //////////////////////
class wikiFuzz {
- // Only some HTML tags are understood with params by MediaWiki, the rest are ignored.
- // List the tags that accept params below, as well as what those params are.
- public static $data = array(
- "B" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "CAPTION" => array("CLASS", "ID", "STYLE", "align", "lang", "dir", "title"),
- "CENTER" => array("CLASS", "STYLE", "ID", "lang", "dir", "title"),
- "DIV" => array("CLASS", "STYLE", "ID", "align", "lang", "dir", "title"),
- "FONT" => array("CLASS", "STYLE", "ID", "lang", "dir", "title", "face", "size", "color"),
- "H1" => array("STYLE", "CLASS", "ID", "align", "lang", "dir", "title"),
- "H2" => array("STYLE", "CLASS", "ID", "align", "lang", "dir", "title"),
- "HR" => array("STYLE", "CLASS", "ID", "WIDTH", "lang", "dir", "title", "size", "noshade"),
- "LI" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "type", "value"),
- "TABLE" => array("STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "BORDER", "CELLPADDING",
- "CELLSPACING", "lang", "dir", "title", "summary", "frame", "rules"),
- "TD" => array("STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "COLSPAN", "ROWSPAN",
- "VALIGN", "abbr", "axis", "headers", "scope", "nowrap", "height", "lang",
- "dir", "title", "char", "charoff"),
- "TH" => array("STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "COLSPAN", "ROWSPAN",
- "VALIGN", "abbr", "axis", "headers", "scope", "nowrap", "height", "lang",
- "dir", "title", "char", "charoff"),
- "TR" => array("CLASS", "STYLE", "ID", "BGCOLOR", "ALIGN", "VALIGN", "lang", "dir", "title", "char", "charoff"),
- "UL" => array("CLASS", "STYLE", "ID", "lang", "dir", "title", "type"),
- "P" => array("style", "class", "id", "align", "lang", "dir", "title"),
- "blockquote" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "cite"),
- "span" => array("CLASS", "ID", "STYLE", "align", "lang", "dir", "title"),
- "code" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "tt" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "small" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "big" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "s" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "u" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "del" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "datetime", "cite"),
- "ins" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "datetime", "cite"),
- "sub" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "sup" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "ol" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "type", "start"),
- "br" => array("CLASS", "ID", "STYLE", "title", "clear"),
- "cite" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "var" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "dl" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "ruby" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "rt" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "rp" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "dt" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "dl" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "em" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "strong" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "i" => array("CLASS", "ID", "STYLE", "lang", "dir", "title"),
- "thead" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign'),
- "tfoot" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign'),
- "tbody" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign'),
- "colgroup" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign', 'span', 'width'),
- "col" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign', 'span', 'width'),
- "pre" => array("CLASS", "ID", "STYLE", "lang", "dir", "title", "width"),
-
- // extension tags that accept parameters:
- "sort" => array("order", "class"),
- "ref" => array("name"),
- "categorytree" => array("hideroot", "mode", "style"),
- "chemform" => array("link", "wikilink", "query"),
- "section" => array("begin", "new"),
-
- // older MW transclusion.
- "transclude" => array("page"),
- );
-
- // The types of the HTML that we will be testing were defined above
- // Note: this needs to be initialized later to be equal to: array_keys(wikiFuzz::$data);
- // as such, it also needs to also be publicly modifiable.
- public static $types;
-
-
- // Some attribute values.
- static private $other = array("&","=",":","?","\"","\n","%n%n%n%n%n%n%n%n%n%n%n%n","\\");
- static private $ints = array(
- // various numbers
- "0","-1","127","-7897","89000","808080","90928345",
- "0xfffffff","ffff",
-
- // Different ways of saying: '
- "&#0000039;", // Long UTF-8 Unicode encoding
- "&#39;", // dec version.
- "&#x27;", // hex version.
- "&#xA7;", // malformed hex variant, MSB not zero.
-
- // Different ways of saying: "
- "&#0000034;", // Long UTF-8 Unicode encoding
- "&#34;",
- "&#x22;", // hex version.
- "&#xA2;", // malformed hex variant, MSB not zero.
-
- // Different ways of saying: <
- "<",
- "&#0000060", // Long UTF-8 Unicode encoding without semicolon (Mediawiki wants the colon)
- "&#0000060;", // Long UTF-8 Unicode encoding with semicolon
- "&#60;",
- "&#x3C;", // hex version.
- "&#xBC;", // malformed hex variant, MSB not zero.
- "&#x0003C;", // mid-length hex version
- "&#X00003C;", // slightly longer hex version, with capital "X"
-
- // Different ways of saying: >
- ">",
- "&#0000062;", // Long UTF-8 Unicode encoding
- "&#62;",
- "&#x3E;", // hex version.
- "&#xBE;", // malformed variant, MSB not zero.
-
- // Different ways of saying: [
- "&#0000091;", // Long UTF-8 Unicode encoding
- "&#91;",
- "&#x5B;", // hex version.
-
- // Different ways of saying: {{
- "&#0000123;&#0000123;", // Long UTF-8 Unicode encoding
- "&#123;&#123;",
- "&#x7B;&#x7B;", // hex version.
-
- // Different ways of saying: |
- "&#0000124;", // Long UTF-8 Unicode encoding
- "&#124;",
- "&#x7C;", // hex version.
- "&#xFC;", // malformed hex variant, MSB not zero.
-
- // a "lignature" - http://www.robinlionheart.com/stds/html4/spchars#ligature
- "&zwnj;"
- );
-
- // Defines various wiki-related bits of syntax, that can potentially cause
- // MediaWiki to do something other than just print that literal text.
- static private $ext = array(
- // links, templates, parameters.
- "[[", "]]", "{{", "}}", "|", "[", "]", "{{{", "}}}", "|]]",
-
- // wiki tables.
- "\n{|", "\n|}",
- "!",
- "\n!",
- "!!",
- "||",
- "\n|-", "| ", "\n|",
-
- // section headings.
- "=", "==", "===", "====", "=====", "======",
-
- // lists (ordered and unordered) and indentation.
- "\n*", "*", "\n:", ":",
- "\n#", "#",
-
- // definition lists (dl, dt, dd), newline, and newline with pre, and a tab.
- "\n;", ";", "\n ",
-
- // Whitespace: newline, tab, space.
- "\n", "\t", " ",
-
- // Some XSS attack vectors from http://ha.ckers.org/xss.html
- "&#x09;", // tab
- "&#x0A;", // newline
- "&#x0D;", // carriage return
- "\0", // null character
- " &#14; ", // spaces and meta characters
- "'';!--\"<XSS>=&{()}", // compact injection of XSS & SQL tester
-
- // various NULL fields
- "%00",
- "&#00;",
- "\0",
-
- // horizontal rule.
- "-----", "\n-----",
-
- // signature, redirect, bold, italics.
- "~~~~", "#REDIRECT [[", "'''", "''",
-
- // comments.
- "<!--", "-->",
-
- // quotes.
- "\"", "'",
-
- // tag start and tag end.
- "<", ">",
-
- // implicit link creation on URIs.
- "http://",
- "https://",
- "ftp://",
- "irc://",
- "news:",
- 'gopher://',
- 'telnet://',
- 'nntp://',
- 'worldwind://',
- 'mailto:',
-
- // images.
- "[[image:",
- ".gif",
- ".png",
- ".jpg",
- ".jpeg",
- 'thumbnail=',
- 'thumbnail',
- 'thumb=',
- 'thumb',
- 'right',
- 'none',
- 'left',
- 'framed',
- 'frame',
- 'enframed',
- 'centre',
- 'center',
- "Image:",
- "[[:Image",
- 'px',
- 'upright=',
- 'border',
-
- // misc stuff to throw at the Parser.
- '%08X',
- '/',
- ":x{|",
- "\n|+",
- "<noinclude>",
- "</noinclude>",
- " \302\273",
- " :",
- " !",
- " ;",
- "\302\253",
- "[[category:",
- "?=",
- "(",
- ")",
- "]]]",
- "../",
- "{{{{",
- "}}}}",
- "[[Special:",
- "<includeonly>",
- "</includeonly>",
- "<!--MWTEMPLATESECTION=",
- '<!--MWTOC-->',
-
- // implicit link creation on booknum, RFC, and PubMed ID usage (both with and without IDs)
- "ISBN 2",
- "RFC 000",
- "PMID 000",
- "ISBN ",
- "RFC ",
- "PMID ",
-
- // magic words:
- '__NOTOC__',
- '__FORCETOC__',
- '__NOEDITSECTION__',
- '__START__',
- '__NOTITLECONVERT__',
- '__NOCONTENTCONVERT__',
- '__END__',
- '__TOC__',
- '__NOTC__',
- '__NOCC__',
- "__FORCETOC__",
- "__NEWSECTIONLINK__",
- "__NOGALLERY__",
-
- // more magic words / internal templates.
- '{{PAGENAME}}',
- '{{PAGENAMEE}}',
- '{{NAMESPACE}}',
- "{{MSG:",
- "}}",
- "{{MSGNW:",
- "}}",
- "{{INT:",
- "}}",
- '{{SITENAME}}',
- "{{NS:",
- "}}",
- "{{LOCALURL:",
- "}}",
- "{{LOCALURLE:",
- "}}",
- "{{SCRIPTPATH}}",
- "{{GRAMMAR:gentiv|",
- "}}",
- "{{REVISIONID}}",
- "{{SUBPAGENAME}}",
- "{{SUBPAGENAMEE}}",
- "{{ns:0}}",
- "{{fullurle:",
- "}}",
- "{{subst::",
- "}}",
- "{{UCFIRST:",
- "}}",
- "{{UC:",
- '{{SERVERNAME}}',
- '{{SERVER}}',
- "{{RAW:",
- "}}",
- "{{PLURAL:",
- "}}",
- "{{LCFIRST:",
- "}}",
- "{{LC:",
- "}}",
- '{{CURRENTWEEK}}',
- '{{CURRENTDOW}}',
- "{{INT:{{LC:contribs-showhideminor}}|",
- "}}",
- "{{INT:googlesearch|",
- "}}",
- "{{BASEPAGENAME}}",
- "{{CONTENTLANGUAGE}}",
- "{{PAGESINNAMESPACE:}}",
- "{{#language:",
- "}}",
- "{{#special:",
- "}}",
- "{{#special:emailuser",
- "}}",
-
- // Some raw link for magic words.
- "{{NUMBEROFPAGES:R",
- "}}",
- "{{NUMBEROFUSERS:R",
- "}}",
- "{{NUMBEROFARTICLES:R",
- "}}",
- "{{NUMBEROFFILES:R",
- "}}",
- "{{NUMBEROFADMINS:R",
- "}}",
- "{{padleft:",
- "}}",
- "{{padright:",
- "}}",
- "{{DEFAULTSORT:",
- "}}",
-
- // internal Math "extension":
- "<math>",
- "</math>",
-
- // Parser extension functions:
- "{{#expr:",
- "{{#if:",
- "{{#ifeq:",
- "{{#ifexist:",
- "{{#ifexpr:",
- "{{#switch:",
- "{{#time:",
- "}}",
-
- // references table for the Cite extension.
- "<references/>",
-
- // Internal Parser tokens - try inserting some of these.
- "UNIQ25f46b0524f13e67NOPARSE",
- "UNIQ17197916557e7cd6-HTMLCommentStrip46238afc3bb0cf5f00000002",
- "\x07UNIQ17197916557e7cd6-HTMLCommentStrip46238afc3bb0cf5f00000002-QINU",
-
- // Inputbox extension:
- "<inputbox>\ntype=search\nsearchbuttonlabel=\n",
- "</inputbox>",
-
- // charInsert extension:
- "<charInsert>",
- "</charInsert>",
-
- // wikiHiero extension:
- "<hiero>",
- "</hiero>",
-
- // Image gallery:
- "<gallery>",
- "</gallery>",
-
- // FixedImage extension.
- "<fundraising/>",
-
- // Timeline extension: currently untested.
-
- // Nowiki:
- "<nOwIkI>",
- "</nowiki>",
-
- // an external image to test the external image displaying code
- "http://debian.org/Pics/debian.png",
-
- // LabeledSectionTransclusion extension.
- "{{#lstx:",
- "}}",
- "{{#lst:",
- "}}",
- "{{#lst:Main Page|",
- "}}"
- );
-
- /**
- ** Randomly returns one element of the input array.
- */
- static public function chooseInput(array $input) {
- $randindex = wikiFuzz::randnum(count($input) - 1);
- return $input[$randindex];
- }
-
- // Max number of parameters for HTML attributes.
- static private $maxparams = 10;
-
- /**
- ** Returns random number between finish and start.
- */
- static public function randnum($finish,$start=0) {
- return mt_rand($start,$finish);
- }
-
- /**
- ** Returns a mix of random text and random wiki syntax.
- */
- static private function randstring() {
- $thestring = "";
-
- for ($i=0; $i<40; $i++) {
- $what = wikiFuzz::randnum(1);
-
- if ($what == 0) { // include some random wiki syntax
- $which = wikiFuzz::randnum(count(wikiFuzz::$ext) - 1);
- $thestring .= wikiFuzz::$ext[$which];
- }
- else { // include some random text
- $char = INCLUDE_BINARY
- // Decimal version:
- // "&#" . wikiFuzz::randnum(255) . ";"
- // Hex version:
- ? "&#x" . str_pad(dechex(wikiFuzz::randnum(255)), wikiFuzz::randnum(2, 7), "0", STR_PAD_LEFT) . ";"
- // A truly binary version:
- // ? chr(wikiFuzz::randnum(0,255))
- : chr(wikiFuzz::randnum(126,32));
-
- $length = wikiFuzz::randnum(8);
- $thestring .= str_repeat ($char, $length);
- }
- }
- return $thestring;
- }
-
- /**
- ** Returns either random text, or random wiki syntax, or random data from "ints",
- ** or random data from "other".
- */
- static private function makestring() {
- $what = wikiFuzz::randnum(2);
- if ($what == 0) {
- return wikiFuzz::randstring();
- }
- elseif ($what == 1) {
- return wikiFuzz::$ints[wikiFuzz::randnum(count(wikiFuzz::$ints) - 1)];
- }
- else {
- return wikiFuzz::$other[wikiFuzz::randnum(count(wikiFuzz::$other) - 1)];
- }
- }
-
-
- /**
- ** Strips out the stuff that Mediawiki balks at in a page's title.
- ** Implementation copied/pasted from cleanupTable.inc & cleanupImages.php
- */
- static public function makeTitleSafe($str) {
- $legalTitleChars = " %!\"$&'()*,\\-.\\/0-9:;=?@A-Z\\\\^_`a-z~\\x80-\\xFF";
- return preg_replace_callback(
- "/([^$legalTitleChars])/",
- create_function(
- // single quotes are essential here,
- // or alternative escape all $ as \$
- '$matches',
- 'return sprintf( "\\x%02x", ord( $matches[1] ) );'
- ),
- $str );
- }
-
- /**
- ** Returns a string of fuzz text.
- */
- static private function loop() {
- switch ( wikiFuzz::randnum(3) ) {
- case 1: // an opening tag, with parameters.
- $string = "";
- $i = wikiFuzz::randnum(count(wikiFuzz::$types) - 1);
- $t = wikiFuzz::$types[$i];
- $arr = wikiFuzz::$data[$t];
- $string .= "<" . $t . " ";
- $num_params = min(wikiFuzz::$maxparams, count($arr));
- for ($z=0; $z<$num_params; $z++) {
- $badparam = $arr[wikiFuzz::randnum(count($arr) - 1)];
- $badstring = wikiFuzz::makestring();
- $string .= $badparam . "=" . wikiFuzz::getRandQuote() . $badstring . wikiFuzz::getRandQuote() . " ";
- }
- $string .= ">\n";
- return $string;
- case 2: // a closing tag.
- $i = wikiFuzz::randnum(count(wikiFuzz::$types) - 1);
- return "</". wikiFuzz::$types[$i] . ">";
- case 3: // a random string, between tags.
- return wikiFuzz::makeString();
- }
- return ""; // catch-all, should never be called.
- }
-
- /**
- ** Returns one of the three styles of random quote: ', ", and nothing.
- */
- static private function getRandQuote() {
- switch ( wikiFuzz::randnum(3) ) {
- case 1 : return "'";
- case 2 : return "\"";
- default: return "";
- }
- }
-
- /**
- ** Returns fuzz text, with the parameter indicating approximately how many lines of text you want.
- */
- static public function makeFuzz($maxtypes = 2) {
- $page = "";
- for ($k=0; $k<$maxtypes; $k++) {
- $page .= wikiFuzz::loop();
- }
- return $page;
- }
-}
-
-
-//////// MEDIAWIKI PAGES TO TEST, AND HOW TO TEST THEM ///////
+ // Only some HTML tags are understood with params by MediaWiki, the rest are ignored.
+ // List the tags that accept params below, as well as what those params are.
+ public static $data = array(
+ "B" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "CAPTION" => array( "CLASS", "ID", "STYLE", "align", "lang", "dir", "title" ),
+ "CENTER" => array( "CLASS", "STYLE", "ID", "lang", "dir", "title" ),
+ "DIV" => array( "CLASS", "STYLE", "ID", "align", "lang", "dir", "title" ),
+ "FONT" => array( "CLASS", "STYLE", "ID", "lang", "dir", "title", "face", "size", "color" ),
+ "H1" => array( "STYLE", "CLASS", "ID", "align", "lang", "dir", "title" ),
+ "H2" => array( "STYLE", "CLASS", "ID", "align", "lang", "dir", "title" ),
+ "HR" => array( "STYLE", "CLASS", "ID", "WIDTH", "lang", "dir", "title", "size", "noshade" ),
+ "LI" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", "type", "value" ),
+ "TABLE" => array( "STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "BORDER", "CELLPADDING",
+ "CELLSPACING", "lang", "dir", "title", "summary", "frame", "rules" ),
+ "TD" => array( "STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "COLSPAN", "ROWSPAN",
+ "VALIGN", "abbr", "axis", "headers", "scope", "nowrap", "height", "lang",
+ "dir", "title", "char", "charoff" ),
+ "TH" => array( "STYLE", "CLASS", "ID", "BGCOLOR", "WIDTH", "ALIGN", "COLSPAN", "ROWSPAN",
+ "VALIGN", "abbr", "axis", "headers", "scope", "nowrap", "height", "lang",
+ "dir", "title", "char", "charoff" ),
+ "TR" => array( "CLASS", "STYLE", "ID", "BGCOLOR", "ALIGN", "VALIGN", "lang", "dir", "title", "char", "charoff" ),
+ "UL" => array( "CLASS", "STYLE", "ID", "lang", "dir", "title", "type" ),
+ "P" => array( "style", "class", "id", "align", "lang", "dir", "title" ),
+ "blockquote" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", "cite" ),
+ "span" => array( "CLASS", "ID", "STYLE", "align", "lang", "dir", "title" ),
+ "code" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "tt" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "small" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "big" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "s" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "u" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "del" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", "datetime", "cite" ),
+ "ins" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", "datetime", "cite" ),
+ "sub" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "sup" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "ol" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", "type", "start" ),
+ "br" => array( "CLASS", "ID", "STYLE", "title", "clear" ),
+ "cite" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "var" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "dl" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "ruby" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "rt" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "rp" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "dt" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "dl" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "em" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "strong" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "i" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title" ),
+ "thead" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign' ),
+ "tfoot" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign' ),
+ "tbody" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign' ),
+ "colgroup" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign', 'span', 'width' ),
+ "col" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", 'align', 'char', 'charoff', 'valign', 'span', 'width' ),
+ "pre" => array( "CLASS", "ID", "STYLE", "lang", "dir", "title", "width" ),
+
+ // extension tags that accept parameters:
+ "sort" => array( "order", "class" ),
+ "ref" => array( "name" ),
+ "categorytree" => array( "hideroot", "mode", "style" ),
+ "chemform" => array( "link", "wikilink", "query" ),
+ "section" => array( "begin", "new" ),
+
+ // older MW transclusion.
+ "transclude" => array( "page" ),
+ );
+
+ // The types of the HTML that we will be testing were defined above
+ // Note: this needs to be initialized later to be equal to: array_keys(wikiFuzz::$data);
+ // as such, it also needs to also be publicly modifiable.
+ public static $types;
+
+
+ // Some attribute values.
+ static private $other = array( "&", "=", ":", "?", "\"", "\n", "%n%n%n%n%n%n%n%n%n%n%n%n", "\\" );
+ static private $ints = array(
+ // various numbers
+ "0", "-1", "127", "-7897", "89000", "808080", "90928345",
+ "0xfffffff", "ffff",
+
+ // Different ways of saying: '
+ "&#0000039;", // Long UTF-8 Unicode encoding
+ "&#39;", // dec version.
+ "&#x27;", // hex version.
+ "&#xA7;", // malformed hex variant, MSB not zero.
+
+ // Different ways of saying: "
+ "&#0000034;", // Long UTF-8 Unicode encoding
+ "&#34;",
+ "&#x22;", // hex version.
+ "&#xA2;", // malformed hex variant, MSB not zero.
+
+ // Different ways of saying: <
+ "<",
+ "&#0000060", // Long UTF-8 Unicode encoding without semicolon (Mediawiki wants the colon)
+ "&#0000060;", // Long UTF-8 Unicode encoding with semicolon
+ "&#60;",
+ "&#x3C;", // hex version.
+ "&#xBC;", // malformed hex variant, MSB not zero.
+ "&#x0003C;", // mid-length hex version
+ "&#X00003C;", // slightly longer hex version, with capital "X"
+
+ // Different ways of saying: >
+ ">",
+ "&#0000062;", // Long UTF-8 Unicode encoding
+ "&#62;",
+ "&#x3E;", // hex version.
+ "&#xBE;", // malformed variant, MSB not zero.
+
+ // Different ways of saying: [
+ "&#0000091;", // Long UTF-8 Unicode encoding
+ "&#91;",
+ "&#x5B;", // hex version.
+
+ // Different ways of saying: {{
+ "&#0000123;&#0000123;", // Long UTF-8 Unicode encoding
+ "&#123;&#123;",
+ "&#x7B;&#x7B;", // hex version.
+
+ // Different ways of saying: |
+ "&#0000124;", // Long UTF-8 Unicode encoding
+ "&#124;",
+ "&#x7C;", // hex version.
+ "&#xFC;", // malformed hex variant, MSB not zero.
+
+ // a "lignature" - http://www.robinlionheart.com/stds/html4/spchars#ligature
+ // &#8204; == &zwnj;
+ "&#8204;"
+ );
+
+ // Defines various wiki-related bits of syntax, that can potentially cause
+ // MediaWiki to do something other than just print that literal text.
+ static private $ext = array(
+ // links, templates, parameters.
+ "[[", "]]", "{{", "}}", "|", "[", "]", "{{{", "}}}", "|]]",
+
+ // wiki tables.
+ "\n{|", "\n|}",
+ "!",
+ "\n!",
+ "!!",
+ "||",
+ "\n|-", "| ", "\n|",
+
+ // section headings.
+ "=", "==", "===", "====", "=====", "======",
+
+ // lists (ordered and unordered) and indentation.
+ "\n*", "*", "\n:", ":",
+ "\n#", "#",
+
+ // definition lists (dl, dt, dd), newline, and newline with pre, and a tab.
+ "\n;", ";", "\n ",
+
+ // Whitespace: newline, tab, space.
+ "\n", "\t", " ",
+
+ // Some XSS attack vectors from http://ha.ckers.org/xss.html
+ "&#x09;", // tab
+ "&#x0A;", // newline
+ "&#x0D;", // carriage return
+ "\0", // null character
+ " &#14; ", // spaces and meta characters
+ "'';!--\"<XSS>=&{()}", // compact injection of XSS & SQL tester
+
+ // various NULL fields
+ "%00",
+ "&#00;",
+ "\0",
+
+ // horizontal rule.
+ "-----", "\n-----",
+
+ // signature, redirect, bold, italics.
+ "~~~~", "#REDIRECT [[", "'''", "''",
+
+ // comments.
+ "<!--", "-->",
+
+ // quotes.
+ "\"", "'",
+
+ // tag start and tag end.
+ "<", ">",
+
+ // implicit link creation on URIs.
+ "http://",
+ "https://",
+ "ftp://",
+ "irc://",
+ "news:",
+ 'gopher://',
+ 'telnet://',
+ 'nntp://',
+ 'worldwind://',
+ 'mailto:',
+
+ // images.
+ "[[image:",
+ ".gif",
+ ".png",
+ ".jpg",
+ ".jpeg",
+ 'thumbnail=',
+ 'thumbnail',
+ 'thumb=',
+ 'thumb',
+ 'right',
+ 'none',
+ 'left',
+ 'framed',
+ 'frame',
+ 'enframed',
+ 'centre',
+ 'center',
+ "Image:",
+ "[[:Image",
+ 'px',
+ 'upright=',
+ 'border',
+
+ // misc stuff to throw at the Parser.
+ '%08X',
+ '/',
+ ":x{|",
+ "\n|+",
+ "<noinclude>",
+ "</noinclude>",
+ " \302\273",
+ " :",
+ " !",
+ " ;",
+ "\302\253",
+ "[[category:",
+ "?=",
+ "(",
+ ")",
+ "]]]",
+ "../",
+ "{{{{",
+ "}}}}",
+ "[[Special:",
+ "<includeonly>",
+ "</includeonly>",
+ "<!--MWTEMPLATESECTION=",
+ '<!--MWTOC-->',
+
+ // implicit link creation on booknum, RFC, and PubMed ID usage (both with and without IDs)
+ "ISBN 2",
+ "RFC 000",
+ "PMID 000",
+ "ISBN ",
+ "RFC ",
+ "PMID ",
+
+ // magic words:
+ '__NOTOC__',
+ '__FORCETOC__',
+ '__NOEDITSECTION__',
+ '__START__',
+ '__NOTITLECONVERT__',
+ '__NOCONTENTCONVERT__',
+ '__END__',
+ '__TOC__',
+ '__NOTC__',
+ '__NOCC__',
+ "__FORCETOC__",
+ "__NEWSECTIONLINK__",
+ "__NOGALLERY__",
+
+ // more magic words / internal templates.
+ '{{PAGENAME}}',
+ '{{PAGENAMEE}}',
+ '{{NAMESPACE}}',
+ "{{MSG:",
+ "}}",
+ "{{MSGNW:",
+ "}}",
+ "{{INT:",
+ "}}",
+ '{{SITENAME}}',
+ "{{NS:",
+ "}}",
+ "{{LOCALURL:",
+ "}}",
+ "{{LOCALURLE:",
+ "}}",
+ "{{SCRIPTPATH}}",
+ "{{GRAMMAR:gentiv|",
+ "}}",
+ "{{REVISIONID}}",
+ "{{SUBPAGENAME}}",
+ "{{SUBPAGENAMEE}}",
+ "{{ns:0}}",
+ "{{fullurle:",
+ "}}",
+ "{{subst::",
+ "}}",
+ "{{UCFIRST:",
+ "}}",
+ "{{UC:",
+ '{{SERVERNAME}}',
+ '{{SERVER}}',
+ "{{RAW:",
+ "}}",
+ "{{PLURAL:",
+ "}}",
+ "{{LCFIRST:",
+ "}}",
+ "{{LC:",
+ "}}",
+ '{{CURRENTWEEK}}',
+ '{{CURRENTDOW}}',
+ "{{INT:{{LC:contribs-showhideminor}}|",
+ "}}",
+ "{{INT:googlesearch|",
+ "}}",
+ "{{BASEPAGENAME}}",
+ "{{CONTENTLANGUAGE}}",
+ "{{PAGESINNAMESPACE:}}",
+ "{{#language:",
+ "}}",
+ "{{#special:",
+ "}}",
+ "{{#special:emailuser",
+ "}}",
+
+ // Some raw link for magic words.
+ "{{NUMBEROFPAGES:R",
+ "}}",
+ "{{NUMBEROFUSERS:R",
+ "}}",
+ "{{NUMBEROFARTICLES:R",
+ "}}",
+ "{{NUMBEROFFILES:R",
+ "}}",
+ "{{NUMBEROFADMINS:R",
+ "}}",
+ "{{padleft:",
+ "}}",
+ "{{padright:",
+ "}}",
+ "{{DEFAULTSORT:",
+ "}}",
+
+ // internal Math "extension":
+ "<math>",
+ "</math>",
+
+ // Parser extension functions:
+ "{{#expr:",
+ "{{#if:",
+ "{{#ifeq:",
+ "{{#ifexist:",
+ "{{#ifexpr:",
+ "{{#switch:",
+ "{{#time:",
+ "}}",
+
+ // references table for the Cite extension.
+ "<references/>",
+
+ // Internal Parser tokens - try inserting some of these.
+ "UNIQ25f46b0524f13e67NOPARSE",
+ "UNIQ17197916557e7cd6-HTMLCommentStrip46238afc3bb0cf5f00000002",
+ "\x07UNIQ17197916557e7cd6-HTMLCommentStrip46238afc3bb0cf5f00000002-QINU",
+
+ // Inputbox extension:
+ "<inputbox>\ntype=search\nsearchbuttonlabel=\n",
+ "</inputbox>",
+
+ // charInsert extension:
+ "<charInsert>",
+ "</charInsert>",
+
+ // wikiHiero extension:
+ "<hiero>",
+ "</hiero>",
+
+ // Image gallery:
+ "<gallery>",
+ "</gallery>",
+
+ // FixedImage extension.
+ "<fundraising/>",
+
+ // Timeline extension: currently untested.
+
+ // Nowiki:
+ "<nOwIkI>",
+ "</nowiki>",
+
+ // an external image to test the external image displaying code
+ "http://debian.org/Pics/debian.png",
+
+ // LabeledSectionTransclusion extension.
+ "{{#lstx:",
+ "}}",
+ "{{#lst:",
+ "}}",
+ "{{#lst:Main Page|",
+ "}}"
+ );
+
+ /**
+ ** Randomly returns one element of the input array.
+ */
+ static public function chooseInput( array $input ) {
+ $randindex = wikiFuzz::randnum( count( $input ) - 1 );
+ return $input[$randindex];
+ }
+
+ // Max number of parameters for HTML attributes.
+ static private $maxparams = 10;
+
+ /**
+ ** Returns random number between finish and start.
+ */
+ static public function randnum( $finish, $start = 0 ) {
+ return mt_rand( $start, $finish );
+ }
+
+ /**
+ ** Returns a mix of random text and random wiki syntax.
+ */
+ static private function randstring() {
+ $thestring = "";
+
+ for ( $i = 0; $i < 40; $i++ ) {
+ $what = wikiFuzz::randnum( 1 );
+
+ if ( $what == 0 ) { // include some random wiki syntax
+ $which = wikiFuzz::randnum( count( wikiFuzz::$ext ) - 1 );
+ $thestring .= wikiFuzz::$ext[$which];
+ }
+ else { // include some random text
+ $char = INCLUDE_BINARY
+ // Decimal version:
+ // "&#" . wikiFuzz::randnum(255) . ";"
+ // Hex version:
+ ? "&#x" . str_pad( dechex( wikiFuzz::randnum( 255 ) ), wikiFuzz::randnum( 2, 7 ), "0", STR_PAD_LEFT ) . ";"
+ // A truly binary version:
+ // ? chr(wikiFuzz::randnum(0,255))
+ : chr( wikiFuzz::randnum( 126, 32 ) );
+
+ $length = wikiFuzz::randnum( 8 );
+ $thestring .= str_repeat ( $char, $length );
+ }
+ }
+ return $thestring;
+ }
+
+ /**
+ ** Returns either random text, or random wiki syntax, or random data from "ints",
+ ** or random data from "other".
+ */
+ static private function makestring() {
+ $what = wikiFuzz::randnum( 2 );
+ if ( $what == 0 ) {
+ return wikiFuzz::randstring();
+ }
+ elseif ( $what == 1 ) {
+ return wikiFuzz::$ints[wikiFuzz::randnum( count( wikiFuzz::$ints ) - 1 )];
+ }
+ else {
+ return wikiFuzz::$other[wikiFuzz::randnum( count( wikiFuzz::$other ) - 1 )];
+ }
+ }
+
+
+ /**
+ ** Strips out the stuff that Mediawiki balks at in a page's title.
+ ** Implementation copied/pasted from cleanupTable.inc & cleanupImages.php
+ */
+ static public function makeTitleSafe( $str ) {
+ $legalTitleChars = " %!\"$&'()*,\\-.\\/0-9:;=?@A-Z\\\\^_`a-z~\\x80-\\xFF";
+ return preg_replace_callback(
+ "/([^$legalTitleChars])/",
+ create_function(
+ // single quotes are essential here,
+ // or alternative escape all $ as \$
+ '$matches',
+ 'return sprintf( "\\x%02x", ord( $matches[1] ) );'
+ ),
+ $str );
+ }
+
+ /**
+ ** Returns a string of fuzz text.
+ */
+ static private function loop() {
+ switch ( wikiFuzz::randnum( 3 ) ) {
+ case 1: // an opening tag, with parameters.
+ $string = "";
+ $i = wikiFuzz::randnum( count( wikiFuzz::$types ) - 1 );
+ $t = wikiFuzz::$types[$i];
+ $arr = wikiFuzz::$data[$t];
+ $string .= "<" . $t . " ";
+ $num_params = min( wikiFuzz::$maxparams, count( $arr ) );
+ for ( $z = 0; $z < $num_params; $z++ ) {
+ $badparam = $arr[wikiFuzz::randnum( count( $arr ) - 1 )];
+ $badstring = wikiFuzz::makestring();
+ $string .= $badparam . "=" . wikiFuzz::getRandQuote() . $badstring . wikiFuzz::getRandQuote() . " ";
+ }
+ $string .= ">\n";
+ return $string;
+ case 2: // a closing tag.
+ $i = wikiFuzz::randnum( count( wikiFuzz::$types ) - 1 );
+ return "</" . wikiFuzz::$types[$i] . ">";
+ case 3: // a random string, between tags.
+ return wikiFuzz::makeString();
+ }
+ return ""; // catch-all, should never be called.
+ }
+
+ /**
+ ** Returns one of the three styles of random quote: ', ", and nothing.
+ */
+ static private function getRandQuote() {
+ switch ( wikiFuzz::randnum( 3 ) ) {
+ case 1 : return "'";
+ case 2 : return "\"";
+ default: return "";
+ }
+ }
+
+ /**
+ ** Returns fuzz text, with the parameter indicating approximately how many lines of text you want.
+ */
+ static public function makeFuzz( $maxtypes = 2 ) {
+ $page = "";
+ for ( $k = 0; $k < $maxtypes; $k++ ) {
+ $page .= wikiFuzz::loop();
+ }
+ return $page;
+ }
+}
+
+
+// ////// MEDIAWIKI PAGES TO TEST, AND HOW TO TEST THEM ///////
/**
** A page test has just these things:
@@ -875,30 +876,30 @@ class wikiFuzz {
** 2) the URL we are going to test those parameters on.
** 3) Any cookies required for the test.
** 4) Whether Tidy should validate the page. Defaults to true, but can be turned off.
- ** Declared abstract because it should be extended by a class
+ ** Declared abstract because it should be extended by a class
** that supplies these parameters.
*/
abstract class pageTest {
- protected $params;
- protected $pagePath;
- protected $cookie = "";
- protected $tidyValidate = true;
+ protected $params;
+ protected $pagePath;
+ protected $cookie = "";
+ protected $tidyValidate = true;
+
+ public function getParams() {
+ return $this->params;
+ }
- public function getParams() {
- return $this->params;
- }
+ public function getPagePath() {
+ return $this->pagePath;
+ }
- public function getPagePath() {
- return $this->pagePath;
- }
+ public function getCookie() {
+ return $this->cookie;
+ }
- public function getCookie() {
- return $this->cookie;
- }
-
- public function tidyValidate() {
- return $this->tidyValidate;
- }
+ public function tidyValidate() {
+ return $this->tidyValidate;
+ }
}
@@ -906,31 +907,31 @@ abstract class pageTest {
** a page test for the "Edit" page. Tests Parser.php and Sanitizer.php.
*/
class editPageTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=WIKIFUZZ";
-
- $this->params = array (
- "action" => "submit",
- "wpMinoredit" => wikiFuzz::makeFuzz(2),
- "wpPreview" => wikiFuzz::makeFuzz(2),
- "wpSection" => wikiFuzz::makeFuzz(2),
- "wpEdittime" => wikiFuzz::makeFuzz(2),
- "wpSummary" => wikiFuzz::makeFuzz(2),
- "wpScrolltop" => wikiFuzz::makeFuzz(2),
- "wpStarttime" => wikiFuzz::makeFuzz(2),
- "wpAutoSummary" => wikiFuzz::makeFuzz(2),
- "wpTextbox1" => wikiFuzz::makeFuzz(40) // the main wiki text, need lots of this.
- );
-
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpSection"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpEdittime"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpSummary"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpScrolltop"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpStarttime"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpAutoSummary"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpTextbox1"]);
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=WIKIFUZZ";
+
+ $this->params = array (
+ "action" => "submit",
+ "wpMinoredit" => wikiFuzz::makeFuzz( 2 ),
+ "wpPreview" => wikiFuzz::makeFuzz( 2 ),
+ "wpSection" => wikiFuzz::makeFuzz( 2 ),
+ "wpEdittime" => wikiFuzz::makeFuzz( 2 ),
+ "wpSummary" => wikiFuzz::makeFuzz( 2 ),
+ "wpScrolltop" => wikiFuzz::makeFuzz( 2 ),
+ "wpStarttime" => wikiFuzz::makeFuzz( 2 ),
+ "wpAutoSummary" => wikiFuzz::makeFuzz( 2 ),
+ "wpTextbox1" => wikiFuzz::makeFuzz( 40 ) // the main wiki text, need lots of this.
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpSection"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpEdittime"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpSummary"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpScrolltop"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpStarttime"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpAutoSummary"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpTextbox1"] );
+ }
}
@@ -938,18 +939,18 @@ class editPageTest extends pageTest {
** a page test for "Special:Listusers".
*/
class listusersTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Listusers";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Listusers";
- $this->params = array (
- "title" => wikiFuzz::makeFuzz(2),
- "group" => wikiFuzz::makeFuzz(2),
- "username" => wikiFuzz::makeFuzz(2),
- "Go" => wikiFuzz::makeFuzz(2),
- "limit" => wikiFuzz::chooseInput( array("0", "-1", "---'----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "offset" => wikiFuzz::chooseInput( array("0", "-1", "--------'-----0", "+1", "81343242346234234", wikiFuzz::makeFuzz(2)) )
- );
- }
+ $this->params = array (
+ "title" => wikiFuzz::makeFuzz( 2 ),
+ "group" => wikiFuzz::makeFuzz( 2 ),
+ "username" => wikiFuzz::makeFuzz( 2 ),
+ "Go" => wikiFuzz::makeFuzz( 2 ),
+ "limit" => wikiFuzz::chooseInput( array( "0", "-1", "---'----------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "offset" => wikiFuzz::chooseInput( array( "0", "-1", "--------'-----0", "+1", "81343242346234234", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+ }
}
@@ -957,34 +958,34 @@ class listusersTest extends pageTest {
** a page test for "Special:Search".
*/
class searchTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Search";
-
- $this->params = array (
- "action" => "index.php?title=Special:Search",
- "ns0" => wikiFuzz::makeFuzz(2),
- "ns1" => wikiFuzz::makeFuzz(2),
- "ns2" => wikiFuzz::makeFuzz(2),
- "ns3" => wikiFuzz::makeFuzz(2),
- "ns4" => wikiFuzz::makeFuzz(2),
- "ns5" => wikiFuzz::makeFuzz(2),
- "ns6" => wikiFuzz::makeFuzz(2),
- "ns7" => wikiFuzz::makeFuzz(2),
- "ns8" => wikiFuzz::makeFuzz(2),
- "ns9" => wikiFuzz::makeFuzz(2),
- "ns10" => wikiFuzz::makeFuzz(2),
- "ns11" => wikiFuzz::makeFuzz(2),
- "ns12" => wikiFuzz::makeFuzz(2),
- "ns13" => wikiFuzz::makeFuzz(2),
- "ns14" => wikiFuzz::makeFuzz(2),
- "ns15" => wikiFuzz::makeFuzz(2),
- "redirs" => wikiFuzz::makeFuzz(2),
- "search" => wikiFuzz::makeFuzz(2),
- "offset" => wikiFuzz::chooseInput( array("", "0", "-1", "--------'-----0", "+1", "81343242346234234", wikiFuzz::makeFuzz(2)) ),
- "fulltext" => wikiFuzz::chooseInput( array("", "0", "1", "--------'-----0", "+1", wikiFuzz::makeFuzz(2)) ),
- "searchx" => wikiFuzz::chooseInput( array("", "0", "1", "--------'-----0", "+1", wikiFuzz::makeFuzz(2)) )
- );
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Search";
+
+ $this->params = array (
+ "action" => "index.php?title=Special:Search",
+ "ns0" => wikiFuzz::makeFuzz( 2 ),
+ "ns1" => wikiFuzz::makeFuzz( 2 ),
+ "ns2" => wikiFuzz::makeFuzz( 2 ),
+ "ns3" => wikiFuzz::makeFuzz( 2 ),
+ "ns4" => wikiFuzz::makeFuzz( 2 ),
+ "ns5" => wikiFuzz::makeFuzz( 2 ),
+ "ns6" => wikiFuzz::makeFuzz( 2 ),
+ "ns7" => wikiFuzz::makeFuzz( 2 ),
+ "ns8" => wikiFuzz::makeFuzz( 2 ),
+ "ns9" => wikiFuzz::makeFuzz( 2 ),
+ "ns10" => wikiFuzz::makeFuzz( 2 ),
+ "ns11" => wikiFuzz::makeFuzz( 2 ),
+ "ns12" => wikiFuzz::makeFuzz( 2 ),
+ "ns13" => wikiFuzz::makeFuzz( 2 ),
+ "ns14" => wikiFuzz::makeFuzz( 2 ),
+ "ns15" => wikiFuzz::makeFuzz( 2 ),
+ "redirs" => wikiFuzz::makeFuzz( 2 ),
+ "search" => wikiFuzz::makeFuzz( 2 ),
+ "offset" => wikiFuzz::chooseInput( array( "", "0", "-1", "--------'-----0", "+1", "81343242346234234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "fulltext" => wikiFuzz::chooseInput( array( "", "0", "1", "--------'-----0", "+1", wikiFuzz::makeFuzz( 2 ) ) ),
+ "searchx" => wikiFuzz::chooseInput( array( "", "0", "1", "--------'-----0", "+1", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+ }
}
@@ -992,28 +993,28 @@ class searchTest extends pageTest {
** a page test for "Special:Recentchanges".
*/
class recentchangesTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Recentchanges";
-
- $this->params = array (
- "action" => wikiFuzz::makeFuzz(2),
- "title" => wikiFuzz::makeFuzz(2),
- "namespace" => wikiFuzz::chooseInput( range(-1, 15) ),
- "Go" => wikiFuzz::makeFuzz(2),
- "invert" => wikiFuzz::chooseInput( array("-1", "---'----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "hideanons" => wikiFuzz::chooseInput( array("-1", "------'-------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- 'limit' => wikiFuzz::chooseInput( array("0", "-1", "---------'----0", "+1", "81340909772349234", wikiFuzz::makeFuzz(2)) ),
- "days" => wikiFuzz::chooseInput( array("-1", "----------'---0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "hideminor" => wikiFuzz::chooseInput( array("-1", "-----------'--0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "hidebots" => wikiFuzz::chooseInput( array("-1", "---------'----0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "hideliu" => wikiFuzz::chooseInput( array("-1", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "hidepatrolled" => wikiFuzz::chooseInput( array("-1", "-----'--------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "hidemyself" => wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- 'categories_any'=> wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- 'categories' => wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- 'feed' => wikiFuzz::chooseInput( array("-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz(2)) )
- );
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Recentchanges";
+
+ $this->params = array (
+ "action" => wikiFuzz::makeFuzz( 2 ),
+ "title" => wikiFuzz::makeFuzz( 2 ),
+ "namespace" => wikiFuzz::chooseInput( range( -1, 15 ) ),
+ "Go" => wikiFuzz::makeFuzz( 2 ),
+ "invert" => wikiFuzz::chooseInput( array( "-1", "---'----------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "hideanons" => wikiFuzz::chooseInput( array( "-1", "------'-------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'limit' => wikiFuzz::chooseInput( array( "0", "-1", "---------'----0", "+1", "81340909772349234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "days" => wikiFuzz::chooseInput( array( "-1", "----------'---0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "hideminor" => wikiFuzz::chooseInput( array( "-1", "-----------'--0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "hidebots" => wikiFuzz::chooseInput( array( "-1", "---------'----0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "hideliu" => wikiFuzz::chooseInput( array( "-1", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "hidepatrolled" => wikiFuzz::chooseInput( array( "-1", "-----'--------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "hidemyself" => wikiFuzz::chooseInput( array( "-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'categories_any' => wikiFuzz::chooseInput( array( "-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'categories' => wikiFuzz::chooseInput( array( "-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'feed' => wikiFuzz::chooseInput( array( "-1", "--'-----------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+ }
}
@@ -1021,25 +1022,25 @@ class recentchangesTest extends pageTest {
** a page test for "Special:Prefixindex".
*/
class prefixindexTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Prefixindex";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Prefixindex";
- $this->params = array (
- "title" => "Special:Prefixindex",
- "namespace" => wikiFuzz::randnum(-10,101),
- "Go" => wikiFuzz::makeFuzz(2)
- );
+ $this->params = array (
+ "title" => "Special:Prefixindex",
+ "namespace" => wikiFuzz::randnum( -10, 101 ),
+ "Go" => wikiFuzz::makeFuzz( 2 )
+ );
- // sometimes we want 'prefix', sometimes we want 'from', and sometimes we want nothing.
- if (wikiFuzz::randnum(3) == 0) {
- $this->params["prefix"] = wikiFuzz::chooseInput( array("-1", "-----'--------0", "+++--+1",
- wikiFuzz::randnum(-10,8134), wikiFuzz::makeFuzz(2)) );
- }
- if (wikiFuzz::randnum(3) == 0) {
- $this->params["from"] = wikiFuzz::chooseInput( array("-1", "-----'--------0", "+++--+1",
- wikiFuzz::randnum(-10,8134), wikiFuzz::makeFuzz(2)) );
- }
- }
+ // sometimes we want 'prefix', sometimes we want 'from', and sometimes we want nothing.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) {
+ $this->params["prefix"] = wikiFuzz::chooseInput( array( "-1", "-----'--------0", "+++--+1",
+ wikiFuzz::randnum( -10, 8134 ), wikiFuzz::makeFuzz( 2 ) ) );
+ }
+ if ( wikiFuzz::randnum( 3 ) == 0 ) {
+ $this->params["from"] = wikiFuzz::chooseInput( array( "-1", "-----'--------0", "+++--+1",
+ wikiFuzz::randnum( -10, 8134 ), wikiFuzz::makeFuzz( 2 ) ) );
+ }
+ }
}
@@ -1047,16 +1048,16 @@ class prefixindexTest extends pageTest {
** a page test for "Special:MIMEsearch".
*/
class mimeSearchTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:MIMEsearch";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:MIMEsearch";
- $this->params = array (
- "action" => "index.php?title=Special:MIMEsearch",
- "mime" => wikiFuzz::makeFuzz(3),
- 'limit' => wikiFuzz::chooseInput( array("0", "-1", "-------'------0", "+1", "81342321351235325", wikiFuzz::makeFuzz(2)) ),
- 'offset' => wikiFuzz::chooseInput( array("0", "-1", "-----'--------0", "+1", "81341231235365252234324", wikiFuzz::makeFuzz(2)) )
- );
- }
+ $this->params = array (
+ "action" => "index.php?title=Special:MIMEsearch",
+ "mime" => wikiFuzz::makeFuzz( 3 ),
+ 'limit' => wikiFuzz::chooseInput( array( "0", "-1", "-------'------0", "+1", "81342321351235325", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'offset' => wikiFuzz::chooseInput( array( "0", "-1", "-----'--------0", "+1", "81341231235365252234324", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+ }
}
@@ -1064,19 +1065,19 @@ class mimeSearchTest extends pageTest {
** a page test for "Special:Log".
*/
class specialLogTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Log";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Log";
- $this->params = array (
- "type" => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
- "par" => wikiFuzz::makeFuzz(2),
- "user" => wikiFuzz::makeFuzz(2),
- "page" => wikiFuzz::makeFuzz(2),
- "from" => wikiFuzz::makeFuzz(2),
- "until" => wikiFuzz::makeFuzz(2),
- "title" => wikiFuzz::makeFuzz(2)
- );
- }
+ $this->params = array (
+ "type" => wikiFuzz::chooseInput( array( "", wikiFuzz::makeFuzz( 2 ) ) ),
+ "par" => wikiFuzz::makeFuzz( 2 ),
+ "user" => wikiFuzz::makeFuzz( 2 ),
+ "page" => wikiFuzz::makeFuzz( 2 ),
+ "from" => wikiFuzz::makeFuzz( 2 ),
+ "until" => wikiFuzz::makeFuzz( 2 ),
+ "title" => wikiFuzz::makeFuzz( 2 )
+ );
+ }
}
@@ -1084,18 +1085,18 @@ class specialLogTest extends pageTest {
** a page test for "Special:Userlogin", with a successful login.
*/
class successfulUserLoginTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Userlogin&action=submitlogin&type=login&returnto=" . wikiFuzz::makeFuzz(2);
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Userlogin&action=submitlogin&type=login&returnto=" . wikiFuzz::makeFuzz( 2 );
- $this->params = array (
- "wpName" => USER_ON_WIKI,
- // sometimes real password, sometimes not:
- 'wpPassword' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz(2), USER_PASSWORD ) ),
- 'wpRemember' => wikiFuzz::makeFuzz(2)
- );
+ $this->params = array (
+ "wpName" => USER_ON_WIKI,
+ // sometimes real password, sometimes not:
+ 'wpPassword' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz( 2 ), USER_PASSWORD ) ),
+ 'wpRemember' => wikiFuzz::makeFuzz( 2 )
+ );
- $this->cookie = "wikidb_session=" . wikiFuzz::chooseInput( array("1" , wikiFuzz::makeFuzz(2) ) );
- }
+ $this->cookie = "wikidb_session=" . wikiFuzz::chooseInput( array( "1" , wikiFuzz::makeFuzz( 2 ) ) );
+ }
}
@@ -1103,30 +1104,30 @@ class successfulUserLoginTest extends pageTest {
** a page test for "Special:Userlogin".
*/
class userLoginTest extends pageTest {
- function __construct() {
+ function __construct() {
- $this->pagePath = "index.php?title=Special:Userlogin";
+ $this->pagePath = "index.php?title=Special:Userlogin";
- $this->params = array (
- 'wpRetype' => wikiFuzz::makeFuzz(2),
- 'wpRemember' => wikiFuzz::makeFuzz(2),
- 'wpRealName' => wikiFuzz::makeFuzz(2),
- 'wpPassword' => wikiFuzz::makeFuzz(2),
- 'wpName' => wikiFuzz::makeFuzz(2),
- 'wpMailmypassword'=> wikiFuzz::makeFuzz(2),
- 'wpLoginattempt' => wikiFuzz::makeFuzz(2),
- 'wpEmail' => wikiFuzz::makeFuzz(2),
- 'wpDomain' => wikiFuzz::chooseInput( array("", "local", wikiFuzz::makeFuzz(2)) ),
- 'wpCreateaccountMail' => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
- 'wpCreateaccount' => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
- 'wpCookieCheck' => wikiFuzz::chooseInput( array("", wikiFuzz::makeFuzz(2)) ),
- 'type' => wikiFuzz::chooseInput( array("signup", "login", "", wikiFuzz::makeFuzz(2)) ),
- 'returnto' => wikiFuzz::makeFuzz(2),
- 'action' => wikiFuzz::chooseInput( array("", "submitlogin", wikiFuzz::makeFuzz(2)) )
- );
+ $this->params = array (
+ 'wpRetype' => wikiFuzz::makeFuzz( 2 ),
+ 'wpRemember' => wikiFuzz::makeFuzz( 2 ),
+ 'wpRealName' => wikiFuzz::makeFuzz( 2 ),
+ 'wpPassword' => wikiFuzz::makeFuzz( 2 ),
+ 'wpName' => wikiFuzz::makeFuzz( 2 ),
+ 'wpMailmypassword' => wikiFuzz::makeFuzz( 2 ),
+ 'wpLoginattempt' => wikiFuzz::makeFuzz( 2 ),
+ 'wpEmail' => wikiFuzz::makeFuzz( 2 ),
+ 'wpDomain' => wikiFuzz::chooseInput( array( "", "local", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpCreateaccountMail' => wikiFuzz::chooseInput( array( "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpCreateaccount' => wikiFuzz::chooseInput( array( "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpCookieCheck' => wikiFuzz::chooseInput( array( "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'type' => wikiFuzz::chooseInput( array( "signup", "login", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'returnto' => wikiFuzz::makeFuzz( 2 ),
+ 'action' => wikiFuzz::chooseInput( array( "", "submitlogin", wikiFuzz::makeFuzz( 2 ) ) )
+ );
- $this->cookie = "wikidb_session=" . wikiFuzz::chooseInput( array("1" , wikiFuzz::makeFuzz(2) ) );
- }
+ $this->cookie = "wikidb_session=" . wikiFuzz::chooseInput( array( "1" , wikiFuzz::makeFuzz( 2 ) ) );
+ }
}
@@ -1134,32 +1135,32 @@ class userLoginTest extends pageTest {
** a page test for "Special:Ipblocklist" (also includes unblocking)
*/
class ipblocklistTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Ipblocklist";
-
- $this->params = array (
- 'wpUnblockAddress'=> wikiFuzz::makeFuzz(2),
- 'ip' => wikiFuzz::chooseInput( array("20398702394", "", "Nickj2", wikiFuzz::makeFuzz(2),
- // something like an IP address, sometimes invalid:
- ( wikiFuzz::randnum(300,-20) . "." . wikiFuzz::randnum(300,-20) . "."
- . wikiFuzz::randnum(300,-20) . "." .wikiFuzz::randnum(300,-20) ) ) ),
- 'id' => wikiFuzz::makeFuzz(2),
- 'wpUnblockReason' => wikiFuzz::makeFuzz(2),
- 'action' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "success", "submit", "unblock") ),
- 'wpEditToken' => wikiFuzz::makeFuzz(2),
- 'wpBlock' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "") ),
- 'limit' => wikiFuzz::chooseInput( array("0", "-1", "--------'-----0", "+1",
- "09700982312351132098234", wikiFuzz::makeFuzz(2)) ),
- 'offset' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1",
- "09700980982341535324234234", wikiFuzz::makeFuzz(2)) )
- );
-
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(4) == 0) unset($this->params["action"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["ip"]);
- if (wikiFuzz::randnum(2) == 0) unset($this->params["id"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["wpUnblockAddress"]);
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Ipblocklist";
+
+ $this->params = array (
+ 'wpUnblockAddress' => wikiFuzz::makeFuzz( 2 ),
+ 'ip' => wikiFuzz::chooseInput( array( "20398702394", "", "Nickj2", wikiFuzz::makeFuzz( 2 ),
+ // something like an IP address, sometimes invalid:
+ ( wikiFuzz::randnum( 300, -20 ) . "." . wikiFuzz::randnum( 300, -20 ) . "."
+ . wikiFuzz::randnum( 300, -20 ) . "." . wikiFuzz::randnum( 300, -20 ) ) ) ),
+ 'id' => wikiFuzz::makeFuzz( 2 ),
+ 'wpUnblockReason' => wikiFuzz::makeFuzz( 2 ),
+ 'action' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz( 2 ), "success", "submit", "unblock" ) ),
+ 'wpEditToken' => wikiFuzz::makeFuzz( 2 ),
+ 'wpBlock' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz( 2 ), "" ) ),
+ 'limit' => wikiFuzz::chooseInput( array( "0", "-1", "--------'-----0", "+1",
+ "09700982312351132098234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'offset' => wikiFuzz::chooseInput( array( "0", "-1", "------'-------0", "+1",
+ "09700980982341535324234234", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["action"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["ip"] );
+ if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["id"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpUnblockAddress"] );
+ }
}
@@ -1167,20 +1168,20 @@ class ipblocklistTest extends pageTest {
** a page test for "Special:Newimages".
*/
class newImagesTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Newimages";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Newimages";
- $this->params = array (
- 'hidebots' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "1", "", "-1") ),
- 'wpIlMatch' => wikiFuzz::makeFuzz(2),
- 'until' => wikiFuzz::makeFuzz(2),
- 'from' => wikiFuzz::makeFuzz(2)
- );
+ $this->params = array (
+ 'hidebots' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz( 2 ), "1", "", "-1" ) ),
+ 'wpIlMatch' => wikiFuzz::makeFuzz( 2 ),
+ 'until' => wikiFuzz::makeFuzz( 2 ),
+ 'from' => wikiFuzz::makeFuzz( 2 )
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["until"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["from"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["until"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["from"] );
+ }
}
@@ -1188,16 +1189,16 @@ class newImagesTest extends pageTest {
** a page test for the "Special:Imagelist" page.
*/
class imagelistTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Imagelist";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Imagelist";
- $this->params = array (
- 'sort' => wikiFuzz::chooseInput( array("bysize", "byname" , "bydate", wikiFuzz::makeFuzz(2)) ),
- 'limit' => wikiFuzz::chooseInput( array("0", "-1", "--------'-----0", "+1", "09700982312351132098234", wikiFuzz::makeFuzz(2)) ),
- 'offset' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1", "09700980982341535324234234", wikiFuzz::makeFuzz(2)) ),
- 'wpIlMatch' => wikiFuzz::makeFuzz(2)
- );
- }
+ $this->params = array (
+ 'sort' => wikiFuzz::chooseInput( array( "bysize", "byname" , "bydate", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'limit' => wikiFuzz::chooseInput( array( "0", "-1", "--------'-----0", "+1", "09700982312351132098234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'offset' => wikiFuzz::chooseInput( array( "0", "-1", "------'-------0", "+1", "09700980982341535324234234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpIlMatch' => wikiFuzz::makeFuzz( 2 )
+ );
+ }
}
@@ -1205,27 +1206,27 @@ class imagelistTest extends pageTest {
** a page test for "Special:Export".
*/
class specialExportTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Export";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Export";
- $this->params = array (
- 'action' => wikiFuzz::chooseInput( array("submit", "", wikiFuzz::makeFuzz(2)) ),
- 'pages' => wikiFuzz::makeFuzz(2),
- 'curonly' => wikiFuzz::chooseInput( array("", "0", "-1", wikiFuzz::makeFuzz(2)) ),
- 'listauthors' => wikiFuzz::chooseInput( array("", "0", "-1", wikiFuzz::makeFuzz(2)) ),
- 'history' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1", "09700980982341535324234234", wikiFuzz::makeFuzz(2)) ),
+ $this->params = array (
+ 'action' => wikiFuzz::chooseInput( array( "submit", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'pages' => wikiFuzz::makeFuzz( 2 ),
+ 'curonly' => wikiFuzz::chooseInput( array( "", "0", "-1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'listauthors' => wikiFuzz::chooseInput( array( "", "0", "-1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'history' => wikiFuzz::chooseInput( array( "0", "-1", "------'-------0", "+1", "09700980982341535324234234", wikiFuzz::makeFuzz( 2 ) ) ),
- );
+ );
- // For the time being, need to disable "submit" action as Tidy barfs on MediaWiki's XML export.
- if ($this->params['action'] == 'submit') $this->params['action'] = '';
+ // For the time being, need to disable "submit" action as Tidy barfs on MediaWiki's XML export.
+ if ( $this->params['action'] == 'submit' ) $this->params['action'] = '';
- // Sometimes remove the history field.
- if (wikiFuzz::randnum(2) == 0) unset($this->params["history"]);
-
- // page does not produce HTML.
- $this->tidyValidate = false;
- }
+ // Sometimes remove the history field.
+ if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["history"] );
+
+ // page does not produce HTML.
+ $this->tidyValidate = false;
+ }
}
@@ -1233,15 +1234,15 @@ class specialExportTest extends pageTest {
** a page test for "Special:Booksources".
*/
class specialBooksourcesTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Booksources";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Booksources";
- $this->params = array (
- 'go' => wikiFuzz::makeFuzz(2),
- // ISBN codes have to contain some semi-numeric stuff or will be ignored:
- 'isbn' => "0X0" . wikiFuzz::makeFuzz(2)
- );
- }
+ $this->params = array (
+ 'go' => wikiFuzz::makeFuzz( 2 ),
+ // ISBN codes have to contain some semi-numeric stuff or will be ignored:
+ 'isbn' => "0X0" . wikiFuzz::makeFuzz( 2 )
+ );
+ }
}
@@ -1249,15 +1250,15 @@ class specialBooksourcesTest extends pageTest {
** a page test for "Special:Allpages".
*/
class specialAllpagesTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special%3AAllpages";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special%3AAllpages";
- $this->params = array (
- 'from' => wikiFuzz::makeFuzz(2),
- 'namespace' => wikiFuzz::chooseInput( range(-1, 15) ),
- 'go' => wikiFuzz::makeFuzz(2)
- );
- }
+ $this->params = array (
+ 'from' => wikiFuzz::makeFuzz( 2 ),
+ 'namespace' => wikiFuzz::chooseInput( range( -1, 15 ) ),
+ 'go' => wikiFuzz::makeFuzz( 2 )
+ );
+ }
}
@@ -1265,19 +1266,19 @@ class specialAllpagesTest extends pageTest {
** a page test for the page History.
*/
class pageHistoryTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Main_Page&action=history";
+ function __construct() {
+ $this->pagePath = "index.php?title=Main_Page&action=history";
- $this->params = array (
- 'limit' => wikiFuzz::chooseInput( array("-1", "0", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- 'offset' => wikiFuzz::chooseInput( array("-1", "0", "------'-------0", "+1", "9823412312312412435", wikiFuzz::makeFuzz(2)) ),
- "go" => wikiFuzz::chooseInput( array("first", "last", wikiFuzz::makeFuzz(2)) ),
- "dir" => wikiFuzz::chooseInput( array("prev", "next", wikiFuzz::makeFuzz(2)) ),
- "diff" => wikiFuzz::chooseInput( array("-1", "--------'-----0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "oldid" => wikiFuzz::chooseInput( array("prev", "-1", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- "feed" => wikiFuzz::makeFuzz(2)
- );
- }
+ $this->params = array (
+ 'limit' => wikiFuzz::chooseInput( array( "-1", "0", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'offset' => wikiFuzz::chooseInput( array( "-1", "0", "------'-------0", "+1", "9823412312312412435", wikiFuzz::makeFuzz( 2 ) ) ),
+ "go" => wikiFuzz::chooseInput( array( "first", "last", wikiFuzz::makeFuzz( 2 ) ) ),
+ "dir" => wikiFuzz::chooseInput( array( "prev", "next", wikiFuzz::makeFuzz( 2 ) ) ),
+ "diff" => wikiFuzz::chooseInput( array( "-1", "--------'-----0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "oldid" => wikiFuzz::chooseInput( array( "prev", "-1", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ "feed" => wikiFuzz::makeFuzz( 2 )
+ );
+ }
}
@@ -1285,17 +1286,17 @@ class pageHistoryTest extends pageTest {
** a page test for the Special:Contributions".
*/
class contributionsTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Contributions/" . USER_ON_WIKI;
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Contributions/" . USER_ON_WIKI;
- $this->params = array (
- 'target' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2), "newbies", USER_ON_WIKI) ),
- 'namespace' => wikiFuzz::chooseInput( array(-1, 15, 1, wikiFuzz::makeFuzz(2)) ),
- 'offset' => wikiFuzz::chooseInput( array("0", "-1", "------'-------0", "+1", "982342131232131231241", wikiFuzz::makeFuzz(2)) ),
- 'bot' => wikiFuzz::chooseInput( array("", "-1", "0", "1", wikiFuzz::makeFuzz(2)) ),
- 'go' => wikiFuzz::chooseInput( array("-1", 'prev', 'next', wikiFuzz::makeFuzz(2)) )
- );
- }
+ $this->params = array (
+ 'target' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz( 2 ), "newbies", USER_ON_WIKI ) ),
+ 'namespace' => wikiFuzz::chooseInput( array( -1, 15, 1, wikiFuzz::makeFuzz( 2 ) ) ),
+ 'offset' => wikiFuzz::chooseInput( array( "0", "-1", "------'-------0", "+1", "982342131232131231241", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'bot' => wikiFuzz::chooseInput( array( "", "-1", "0", "1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'go' => wikiFuzz::chooseInput( array( "-1", 'prev', 'next', wikiFuzz::makeFuzz( 2 ) ) )
+ );
+ }
}
@@ -1303,66 +1304,66 @@ class contributionsTest extends pageTest {
** a page test for viewing a normal page, whilst posting various params.
*/
class viewPageTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Main_Page";
-
- $this->params = array (
- "useskin" => wikiFuzz::chooseInput( array("chick", "cologneblue", "myskin",
- "nostalgia", "simple", "standard", wikiFuzz::makeFuzz(2)) ),
- "uselang" => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz(2),
- "ab", "af", "an", "ar", "arc", "as", "ast", "av", "ay", "az", "ba",
- "bat-smg", "be", "bg", "bm", "bn", "bo", "bpy", "br", "bs", "ca",
- "ce", "cs", "csb", "cv", "cy", "da", "de", "dv", "dz", "el", "en",
- "eo", "es", "et", "eu", "fa", "fi", "fo", "fr", "fur", "fy", "ga",
- "gn", "gsw", "gu", "he", "hi", "hr", "hu", "ia", "id", "ii", "is",
- "it", "ja", "jv", "ka", "km", "kn", "ko", "ks", "ku", "kv", "la",
- "li", "lo", "lt", "lv", "mk", "ml", "ms", "nah", "nap", "nds",
- "nds-nl", "nl", "nn", "no", "non", "nv", "oc", "or", "os", "pa",
- "pl", "pms", "ps", "pt", "pt-br", "qu", "rmy", "ro", "ru", "sc",
- "sd", "sk", "sl", "sq", "sr", "sr-ec", "sr-el",
- "su", "sv", "ta", "te", "th", "tr", "tt", "ty", "tyv", "udm",
- "ug", "uk", "ur", "utf8", "vec", "vi", "wa", "xal", "yi", "za",
- "zh", "zh-cn", "zh-hk", "zh-sg", "zh-tw", "zh-tw") ),
- "returnto" => wikiFuzz::makeFuzz(2),
- "feed" => wikiFuzz::chooseInput( array("atom", "rss", wikiFuzz::makeFuzz(2)) ),
- "rcid" => wikiFuzz::makeFuzz(2),
- "action" => wikiFuzz::chooseInput( array("view", "raw", "render", wikiFuzz::makeFuzz(2), "markpatrolled") ),
- "printable" => wikiFuzz::makeFuzz(2),
- "oldid" => wikiFuzz::makeFuzz(2),
- "redirect" => wikiFuzz::makeFuzz(2),
- "diff" => wikiFuzz::makeFuzz(2),
- "search" => wikiFuzz::makeFuzz(2),
- "rdfrom" => wikiFuzz::makeFuzz(2), // things from Article.php from here on:
- "token" => wikiFuzz::makeFuzz(2),
- "tbid" => wikiFuzz::makeFuzz(2),
- "action" => wikiFuzz::chooseInput( array("purge", wikiFuzz::makeFuzz(2)) ),
- "wpReason" => wikiFuzz::makeFuzz(2),
- "wpEditToken" => wikiFuzz::makeFuzz(2),
- "from" => wikiFuzz::makeFuzz(2),
- "bot" => wikiFuzz::makeFuzz(2),
- "summary" => wikiFuzz::makeFuzz(2),
- "direction" => wikiFuzz::chooseInput( array("next", "prev", wikiFuzz::makeFuzz(2)) ),
- "section" => wikiFuzz::makeFuzz(2),
- "preload" => wikiFuzz::makeFuzz(2),
-
- );
-
- // Tidy does not know how to valid atom or rss, so exclude from testing for the time being.
- if ($this->params["feed"] == "atom") { unset($this->params["feed"]); }
- else if ($this->params["feed"] == "rss") { unset($this->params["feed"]); }
-
- // Raw pages cannot really be validated
- if ($this->params["action"] == "raw") unset($this->params["action"]);
-
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["rcid"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["diff"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["rdfrom"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["oldid"]);
-
- // usually don't want action == purge.
- if (wikiFuzz::randnum(6) > 1) unset($this->params["action"]);
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Main_Page";
+
+ $this->params = array (
+ "useskin" => wikiFuzz::chooseInput( array( "chick", "cologneblue", "myskin",
+ "nostalgia", "simple", "standard", wikiFuzz::makeFuzz( 2 ) ) ),
+ "uselang" => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz( 2 ),
+ "ab", "af", "an", "ar", "arc", "as", "ast", "av", "ay", "az", "ba",
+ "bat-smg", "be", "bg", "bm", "bn", "bo", "bpy", "br", "bs", "ca",
+ "ce", "cs", "csb", "cv", "cy", "da", "de", "dv", "dz", "el", "en",
+ "eo", "es", "et", "eu", "fa", "fi", "fo", "fr", "fur", "fy", "ga",
+ "gn", "gsw", "gu", "he", "hi", "hr", "hu", "ia", "id", "ii", "is",
+ "it", "ja", "jv", "ka", "km", "kn", "ko", "ks", "ku", "kv", "la",
+ "li", "lo", "lt", "lv", "mk", "ml", "ms", "nah", "nap", "nds",
+ "nds-nl", "nl", "nn", "no", "non", "nv", "oc", "or", "os", "pa",
+ "pl", "pms", "ps", "pt", "pt-br", "qu", "rmy", "ro", "ru", "sc",
+ "sd", "sk", "sl", "sq", "sr", "sr-ec", "sr-el",
+ "su", "sv", "ta", "te", "th", "tr", "tt", "ty", "tyv", "udm",
+ "ug", "uk", "ur", "utf8", "vec", "vi", "wa", "xal", "yi", "za",
+ "zh", "zh-cn", "zh-hk", "zh-sg", "zh-tw", "zh-tw" ) ),
+ "returnto" => wikiFuzz::makeFuzz( 2 ),
+ "feed" => wikiFuzz::chooseInput( array( "atom", "rss", wikiFuzz::makeFuzz( 2 ) ) ),
+ "rcid" => wikiFuzz::makeFuzz( 2 ),
+ "action" => wikiFuzz::chooseInput( array( "view", "raw", "render", wikiFuzz::makeFuzz( 2 ), "markpatrolled" ) ),
+ "printable" => wikiFuzz::makeFuzz( 2 ),
+ "oldid" => wikiFuzz::makeFuzz( 2 ),
+ "redirect" => wikiFuzz::makeFuzz( 2 ),
+ "diff" => wikiFuzz::makeFuzz( 2 ),
+ "search" => wikiFuzz::makeFuzz( 2 ),
+ "rdfrom" => wikiFuzz::makeFuzz( 2 ), // things from Article.php from here on:
+ "token" => wikiFuzz::makeFuzz( 2 ),
+ "tbid" => wikiFuzz::makeFuzz( 2 ),
+ "action" => wikiFuzz::chooseInput( array( "purge", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpReason" => wikiFuzz::makeFuzz( 2 ),
+ "wpEditToken" => wikiFuzz::makeFuzz( 2 ),
+ "from" => wikiFuzz::makeFuzz( 2 ),
+ "bot" => wikiFuzz::makeFuzz( 2 ),
+ "summary" => wikiFuzz::makeFuzz( 2 ),
+ "direction" => wikiFuzz::chooseInput( array( "next", "prev", wikiFuzz::makeFuzz( 2 ) ) ),
+ "section" => wikiFuzz::makeFuzz( 2 ),
+ "preload" => wikiFuzz::makeFuzz( 2 ),
+
+ );
+
+ // Tidy does not know how to valid atom or rss, so exclude from testing for the time being.
+ if ( $this->params["feed"] == "atom" ) { unset( $this->params["feed"] ); }
+ else if ( $this->params["feed"] == "rss" ) { unset( $this->params["feed"] ); }
+
+ // Raw pages cannot really be validated
+ if ( $this->params["action"] == "raw" ) unset( $this->params["action"] );
+
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["rcid"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["diff"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["rdfrom"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["oldid"] );
+
+ // usually don't want action == purge.
+ if ( wikiFuzz::randnum( 6 ) > 1 ) unset( $this->params["action"] );
+ }
}
@@ -1370,50 +1371,50 @@ class viewPageTest extends pageTest {
** a page test for "Special:Allmessages".
*/
class specialAllmessagesTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Allmessages";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Allmessages";
- // only really has one parameter
- $this->params = array (
- "ot" => wikiFuzz::chooseInput( array("php", "html", wikiFuzz::makeFuzz(2)) )
- );
- }
+ // only really has one parameter
+ $this->params = array (
+ "ot" => wikiFuzz::chooseInput( array( "php", "html", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+ }
}
/**
** a page test for "Special:Newpages".
*/
class specialNewpages extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Newpages";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Newpages";
- $this->params = array (
- "namespace" => wikiFuzz::chooseInput( range(-1, 15) ),
- "feed" => wikiFuzz::chooseInput( array("atom", "rss", wikiFuzz::makeFuzz(2)) ),
- 'limit' => wikiFuzz::chooseInput( array("-1", "0", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz(2)) ),
- 'offset' => wikiFuzz::chooseInput( array("-1", "0", "------'-------0", "+1", "9823412312312412435", wikiFuzz::makeFuzz(2)) )
- );
+ $this->params = array (
+ "namespace" => wikiFuzz::chooseInput( range( -1, 15 ) ),
+ "feed" => wikiFuzz::chooseInput( array( "atom", "rss", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'limit' => wikiFuzz::chooseInput( array( "-1", "0", "-------'------0", "+1", "8134", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'offset' => wikiFuzz::chooseInput( array( "-1", "0", "------'-------0", "+1", "9823412312312412435", wikiFuzz::makeFuzz( 2 ) ) )
+ );
- // Tidy does not know how to valid atom or rss, so exclude from testing for the time being.
- if ($this->params["feed"] == "atom") { unset($this->params["feed"]); }
- else if ($this->params["feed"] == "rss") { unset($this->params["feed"]); }
- }
+ // Tidy does not know how to valid atom or rss, so exclude from testing for the time being.
+ if ( $this->params["feed"] == "atom" ) { unset( $this->params["feed"] ); }
+ else if ( $this->params["feed"] == "rss" ) { unset( $this->params["feed"] ); }
+ }
}
/**
** a page test for "redirect.php"
*/
class redirectTest extends pageTest {
- function __construct() {
- $this->pagePath = "redirect.php";
+ function __construct() {
+ $this->pagePath = "redirect.php";
- $this->params = array (
- "wpDropdown" => wikiFuzz::makeFuzz(2)
- );
+ $this->params = array (
+ "wpDropdown" => wikiFuzz::makeFuzz( 2 )
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpDropdown"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpDropdown"] );
+ }
}
@@ -1421,14 +1422,14 @@ class redirectTest extends pageTest {
** a page test for "Special:Confirmemail"
*/
class confirmEmail extends pageTest {
- function __construct() {
- // sometimes we send a bogus confirmation code, and sometimes we don't.
- $this->pagePath = "index.php?title=Special:Confirmemail" . wikiFuzz::chooseInput( array("", "/" . wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(1)) ) );
+ function __construct() {
+ // sometimes we send a bogus confirmation code, and sometimes we don't.
+ $this->pagePath = "index.php?title=Special:Confirmemail" . wikiFuzz::chooseInput( array( "", "/" . wikiFuzz::makeTitleSafe( wikiFuzz::makeFuzz( 1 ) ) ) );
- $this->params = array (
- "token" => wikiFuzz::makeFuzz(2)
- );
- }
+ $this->params = array (
+ "token" => wikiFuzz::makeFuzz( 2 )
+ );
+ }
}
@@ -1437,24 +1438,24 @@ class confirmEmail extends pageTest {
** Note: this test would be better if we were logged in.
*/
class watchlistTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Watchlist";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Watchlist";
- $this->params = array (
- "remove" => wikiFuzz::chooseInput( array("Remove checked items from watchlist", wikiFuzz::makeFuzz(2))),
- 'days' => wikiFuzz::chooseInput( array(0, -1, -230, "--", 3, 9, wikiFuzz::makeFuzz(2)) ),
- 'hideOwn' => wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) ),
- 'hideBots' => wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) ),
- 'namespace'=> wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) ),
- 'action' => wikiFuzz::chooseInput( array("submit", "clear", wikiFuzz::makeFuzz(2)) ),
- 'id[]' => wikiFuzz::makeFuzz(2),
- 'edit' => wikiFuzz::makeFuzz(2),
- 'token' => wikiFuzz::chooseInput( array("", "1243213", wikiFuzz::makeFuzz(2)) )
- );
+ $this->params = array (
+ "remove" => wikiFuzz::chooseInput( array( "Remove checked items from watchlist", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'days' => wikiFuzz::chooseInput( array( 0, -1, -230, "--", 3, 9, wikiFuzz::makeFuzz( 2 ) ) ),
+ 'hideOwn' => wikiFuzz::chooseInput( array( "", "0", "1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'hideBots' => wikiFuzz::chooseInput( array( "", "0", "1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'namespace' => wikiFuzz::chooseInput( array( "", "0", "1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'action' => wikiFuzz::chooseInput( array( "submit", "clear", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'id[]' => wikiFuzz::makeFuzz( 2 ),
+ 'edit' => wikiFuzz::makeFuzz( 2 ),
+ 'token' => wikiFuzz::chooseInput( array( "", "1243213", wikiFuzz::makeFuzz( 2 ) ) )
+ );
- // sometimes we specifiy "reset", and sometimes we don't.
- if (wikiFuzz::randnum(3) == 0) $this->params["reset"] = wikiFuzz::chooseInput( array("", "0", "1", wikiFuzz::makeFuzz(2)) );
- }
+ // sometimes we specifiy "reset", and sometimes we don't.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) $this->params["reset"] = wikiFuzz::chooseInput( array( "", "0", "1", wikiFuzz::makeFuzz( 2 ) ) );
+ }
}
@@ -1462,16 +1463,16 @@ class watchlistTest extends pageTest {
** a page test for "Special:Blockme"
*/
class specialBlockmeTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Blockme";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Blockme";
- $this->params = array ( );
+ $this->params = array ( );
- // sometimes we specify "ip", and sometimes we don't.
- if (wikiFuzz::randnum(1) == 0) {
- $this->params["ip"] = wikiFuzz::chooseInput( array("10.12.41.213", wikiFuzz::randnum(-10,8134), wikiFuzz::makeFuzz(2)) );
- }
- }
+ // sometimes we specify "ip", and sometimes we don't.
+ if ( wikiFuzz::randnum( 1 ) == 0 ) {
+ $this->params["ip"] = wikiFuzz::chooseInput( array( "10.12.41.213", wikiFuzz::randnum( -10, 8134 ), wikiFuzz::makeFuzz( 2 ) ) );
+ }
+ }
}
@@ -1479,32 +1480,32 @@ class specialBlockmeTest extends pageTest {
** a page test for "Special:Movepage"
*/
class specialMovePage extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Movepage";
-
- $this->params = array (
- "action" => wikiFuzz::chooseInput( array("success", "submit", "", wikiFuzz::makeFuzz(2)) ),
- 'wpEditToken' => wikiFuzz::chooseInput( array('', 0, 34987987, wikiFuzz::makeFuzz(2)) ),
- 'target' => wikiFuzz::chooseInput( array("x", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)) ) ),
- 'wpOldTitle' => wikiFuzz::chooseInput( array("z", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)), wikiFuzz::makeFuzz(2) ) ),
- 'wpNewTitle' => wikiFuzz::chooseInput( array("y", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)), wikiFuzz::makeFuzz(2) ) ),
- 'wpReason' => wikiFuzz::chooseInput( array(wikiFuzz::makeFuzz(2)) ),
- 'wpMovetalk' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- 'wpDeleteAndMove' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- 'wpConfirm' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- 'talkmoved' => wikiFuzz::chooseInput( array("1", wikiFuzz::makeFuzz(2), "articleexists", 'notalkpage') ),
- 'oldtitle' => wikiFuzz::makeFuzz(2),
- 'newtitle' => wikiFuzz::makeFuzz(2),
- 'wpMovetalk' => wikiFuzz::chooseInput( array("1", "0", wikiFuzz::makeFuzz(2)) )
- );
-
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(2) == 0) unset($this->params["wpEditToken"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["target"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["wpNewTitle"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpReason"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpOldTitle"]);
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Movepage";
+
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array( "success", "submit", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array( '', 0, 34987987, wikiFuzz::makeFuzz( 2 ) ) ),
+ 'target' => wikiFuzz::chooseInput( array( "x", wikiFuzz::makeTitleSafe( wikiFuzz::makeFuzz( 2 ) ) ) ),
+ 'wpOldTitle' => wikiFuzz::chooseInput( array( "z", wikiFuzz::makeTitleSafe( wikiFuzz::makeFuzz( 2 ) ), wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpNewTitle' => wikiFuzz::chooseInput( array( "y", wikiFuzz::makeTitleSafe( wikiFuzz::makeFuzz( 2 ) ), wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpReason' => wikiFuzz::chooseInput( array( wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpMovetalk' => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpDeleteAndMove' => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpConfirm' => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'talkmoved' => wikiFuzz::chooseInput( array( "1", wikiFuzz::makeFuzz( 2 ), "articleexists", 'notalkpage' ) ),
+ 'oldtitle' => wikiFuzz::makeFuzz( 2 ),
+ 'newtitle' => wikiFuzz::makeFuzz( 2 ),
+ 'wpMovetalk' => wikiFuzz::chooseInput( array( "1", "0", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["wpEditToken"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["target"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpNewTitle"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpReason"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpOldTitle"] );
+ }
}
@@ -1512,26 +1513,26 @@ class specialMovePage extends pageTest {
** a page test for "Special:Undelete"
*/
class specialUndelete extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Undelete";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Undelete";
- $this->params = array (
- "action" => wikiFuzz::chooseInput( array("submit", "", wikiFuzz::makeFuzz(2)) ),
- 'wpEditToken' => wikiFuzz::chooseInput( array('', 0, 34987987, wikiFuzz::makeFuzz(2)) ),
- 'target' => wikiFuzz::chooseInput( array("x", wikiFuzz::makeTitleSafe(wikiFuzz::makeFuzz(2)) ) ),
- 'timestamp' => wikiFuzz::chooseInput( array("125223", wikiFuzz::makeFuzz(2) ) ),
- 'file' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- 'restore' => wikiFuzz::chooseInput( array("0", "1", wikiFuzz::makeFuzz(2)) ),
- 'preview' => wikiFuzz::chooseInput( array("0", "1", wikiFuzz::makeFuzz(2)) ),
- 'wpComment' => wikiFuzz::makeFuzz(2)
- );
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array( "submit", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array( '', 0, 34987987, wikiFuzz::makeFuzz( 2 ) ) ),
+ 'target' => wikiFuzz::chooseInput( array( "x", wikiFuzz::makeTitleSafe( wikiFuzz::makeFuzz( 2 ) ) ) ),
+ 'timestamp' => wikiFuzz::chooseInput( array( "125223", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'file' => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'restore' => wikiFuzz::chooseInput( array( "0", "1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'preview' => wikiFuzz::chooseInput( array( "0", "1", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpComment' => wikiFuzz::makeFuzz( 2 )
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(2) == 0) unset($this->params["wpEditToken"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["target"]);
- if (wikiFuzz::randnum(1) == 0) unset($this->params["restore"]);
- if (wikiFuzz::randnum(1) == 0) unset($this->params["preview"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["wpEditToken"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["target"] );
+ if ( wikiFuzz::randnum( 1 ) == 0 ) unset( $this->params["restore"] );
+ if ( wikiFuzz::randnum( 1 ) == 0 ) unset( $this->params["preview"] );
+ }
}
@@ -1539,20 +1540,20 @@ class specialUndelete extends pageTest {
** a page test for "Special:Unlockdb"
*/
class specialUnlockdb extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Unlockdb";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Unlockdb";
- $this->params = array (
- "action" => wikiFuzz::chooseInput( array("submit", "success", "", wikiFuzz::makeFuzz(2)) ),
- 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- 'wpLockConfirm' => wikiFuzz::chooseInput( array("0", "1", wikiFuzz::makeFuzz(2)) )
- );
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array( "submit", "success", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpLockConfirm' => wikiFuzz::chooseInput( array( "0", "1", wikiFuzz::makeFuzz( 2 ) ) )
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpEditToken"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["action"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpLockConfirm"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpEditToken"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["action"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpLockConfirm"] );
+ }
}
@@ -1560,21 +1561,21 @@ class specialUnlockdb extends pageTest {
** a page test for "Special:Lockdb"
*/
class specialLockdb extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Lockdb";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Lockdb";
- $this->params = array (
- "action" => wikiFuzz::chooseInput( array("submit", "success", "", wikiFuzz::makeFuzz(2)) ),
- 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- 'wpLockReason' => wikiFuzz::makeFuzz(2),
- 'wpLockConfirm'=> wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) )
- );
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array( "submit", "success", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpLockReason' => wikiFuzz::makeFuzz( 2 ),
+ 'wpLockConfirm' => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) )
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpEditToken"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["action"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpLockConfirm"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpEditToken"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["action"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpLockConfirm"] );
+ }
}
@@ -1582,22 +1583,22 @@ class specialLockdb extends pageTest {
** a page test for "Special:Userrights"
*/
class specialUserrights extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Userrights";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Userrights";
- $this->params = array (
- 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- 'user-editname' => wikiFuzz::chooseInput( array("Nickj2", "Nickj2\n<xyz>", wikiFuzz::makeFuzz(2)) ),
- 'ssearchuser' => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- 'saveusergroups'=> wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)), "Save User Groups"),
- 'member[]' => wikiFuzz::chooseInput( array("0", "bot", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "available[]" => wikiFuzz::chooseInput( array("0", "sysop", "bureaucrat", "1", "++--34234", wikiFuzz::makeFuzz(2)) )
- );
+ $this->params = array (
+ 'wpEditToken' => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'user-editname' => wikiFuzz::chooseInput( array( "Nickj2", "Nickj2\n<xyz>", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'ssearchuser' => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'saveusergroups' => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ), "Save User Groups" ),
+ 'member[]' => wikiFuzz::chooseInput( array( "0", "bot", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "available[]" => wikiFuzz::chooseInput( array( "0", "sysop", "bureaucrat", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) )
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(3) == 0) unset($this->params['ssearchuser']);
- if (wikiFuzz::randnum(3) == 0) unset($this->params['saveusergroups']);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params['ssearchuser'] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params['saveusergroups'] );
+ }
}
@@ -1605,23 +1606,23 @@ class specialUserrights extends pageTest {
** a test for page protection and unprotection.
*/
class pageProtectionForm extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Main_Page";
+ function __construct() {
+ $this->pagePath = "index.php?title=Main_Page";
- $this->params = array (
- "action" => "protect",
- 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- "mwProtect-level-edit" => wikiFuzz::chooseInput( array('', 'autoconfirmed', 'sysop', wikifuzz::makeFuzz(2)) ),
- "mwProtect-level-move" => wikiFuzz::chooseInput( array('', 'autoconfirmed', 'sysop', wikifuzz::makeFuzz(2)) ),
- "mwProtectUnchained" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- 'mwProtect-reason' => wikiFuzz::chooseInput( array("because it was there", wikifuzz::makeFuzz(2)) )
- );
+ $this->params = array (
+ "action" => "protect",
+ 'wpEditToken' => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ "mwProtect-level-edit" => wikiFuzz::chooseInput( array( '', 'autoconfirmed', 'sysop', wikiFuzz::makeFuzz( 2 ) ) ),
+ "mwProtect-level-move" => wikiFuzz::chooseInput( array( '', 'autoconfirmed', 'sysop', wikiFuzz::makeFuzz( 2 ) ) ),
+ "mwProtectUnchained" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'mwProtect-reason' => wikiFuzz::chooseInput( array( "because it was there", wikiFuzz::makeFuzz( 2 ) ) )
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(3) == 0) unset($this->params["mwProtectUnchained"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params['mwProtect-reason']);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["mwProtectUnchained"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params['mwProtect-reason'] );
+ }
}
@@ -1629,38 +1630,38 @@ class pageProtectionForm extends pageTest {
** a page test for "Special:Blockip".
*/
class specialBlockip extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Blockip";
-
- $this->params = array (
- "action" => wikiFuzz::chooseInput( array("submit", "", wikiFuzz::makeFuzz(2)) ),
- 'wpEditToken' => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- "wpBlockAddress" => wikiFuzz::chooseInput( array("20398702394", "", "Nickj2", wikiFuzz::makeFuzz(2),
- // something like an IP address, sometimes invalid:
- ( wikiFuzz::randnum(300,-20) . "." . wikiFuzz::randnum(300,-20) . "."
- . wikiFuzz::randnum(300,-20) . "." .wikiFuzz::randnum(300,-20) ) ) ),
- "ip" => wikiFuzz::chooseInput( array("20398702394", "", "Nickj2", wikiFuzz::makeFuzz(2),
- // something like an IP address, sometimes invalid:
- ( wikiFuzz::randnum(300,-20) . "." . wikiFuzz::randnum(300,-20) . "."
- . wikiFuzz::randnum(300,-20) . "." .wikiFuzz::randnum(300,-20) ) ) ),
- "wpBlockOther" => wikiFuzz::chooseInput( array('', 'Nickj2', wikifuzz::makeFuzz(2)) ),
- "wpBlockExpiry" => wikiFuzz::chooseInput( array("other", "2 hours", "1 day", "3 days", "1 week", "2 weeks",
- "1 month", "3 months", "6 months", "1 year", "infinite", wikiFuzz::makeFuzz(2)) ),
- "wpBlockReason" => wikiFuzz::chooseInput( array("because it was there", wikifuzz::makeFuzz(2)) ),
- "wpAnonOnly" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "wpCreateAccount" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "wpBlock" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) )
- );
-
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockOther"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockExpiry"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockReason"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpAnonOnly"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpCreateAccount"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["wpBlockAddress"]);
- if (wikiFuzz::randnum(4) == 0) unset($this->params["ip"]);
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Blockip";
+
+ $this->params = array (
+ "action" => wikiFuzz::chooseInput( array( "submit", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ 'wpEditToken' => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpBlockAddress" => wikiFuzz::chooseInput( array( "20398702394", "", "Nickj2", wikiFuzz::makeFuzz( 2 ),
+ // something like an IP address, sometimes invalid:
+ ( wikiFuzz::randnum( 300, -20 ) . "." . wikiFuzz::randnum( 300, -20 ) . "."
+ . wikiFuzz::randnum( 300, -20 ) . "." . wikiFuzz::randnum( 300, -20 ) ) ) ),
+ "ip" => wikiFuzz::chooseInput( array( "20398702394", "", "Nickj2", wikiFuzz::makeFuzz( 2 ),
+ // something like an IP address, sometimes invalid:
+ ( wikiFuzz::randnum( 300, -20 ) . "." . wikiFuzz::randnum( 300, -20 ) . "."
+ . wikiFuzz::randnum( 300, -20 ) . "." . wikiFuzz::randnum( 300, -20 ) ) ) ),
+ "wpBlockOther" => wikiFuzz::chooseInput( array( '', 'Nickj2', wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpBlockExpiry" => wikiFuzz::chooseInput( array( "other", "2 hours", "1 day", "3 days", "1 week", "2 weeks",
+ "1 month", "3 months", "6 months", "1 year", "infinite", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpBlockReason" => wikiFuzz::chooseInput( array( "because it was there", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpAnonOnly" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpCreateAccount" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpBlock" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) )
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpBlockOther"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpBlockExpiry"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpBlockReason"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpAnonOnly"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpCreateAccount"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["wpBlockAddress"] );
+ if ( wikiFuzz::randnum( 4 ) == 0 ) unset( $this->params["ip"] );
+ }
}
@@ -1668,22 +1669,22 @@ class specialBlockip extends pageTest {
** a test for the imagepage.
*/
class imagepageTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Image:Small-email.png";
+ function __construct() {
+ $this->pagePath = "index.php?title=Image:Small-email.png";
- $this->params = array (
- "image" => wikiFuzz::chooseInput( array("Small-email.png", wikifuzz::makeFuzz(2)) ),
- "wpReason" => wikifuzz::makeFuzz(2),
- "oldimage" => wikiFuzz::chooseInput( array("Small-email.png", wikifuzz::makeFuzz(2)) ),
- "wpEditToken" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "image" => wikiFuzz::chooseInput( array( "Small-email.png", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpReason" => wikiFuzz::makeFuzz( 2 ),
+ "oldimage" => wikiFuzz::chooseInput( array( "Small-email.png", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpEditToken" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["image"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpReason"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["oldimage"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpEditToken"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["image"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpReason"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["oldimage"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpEditToken"] );
+ }
}
@@ -1691,20 +1692,20 @@ class imagepageTest extends pageTest {
** a test for page deletion form.
*/
class pageDeletion extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Main_Page&action=delete";
+ function __construct() {
+ $this->pagePath = "index.php?title=Main_Page&action=delete";
- $this->params = array (
- "wpEditToken" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- "wpReason" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "wpConfirm" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "wpEditToken" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpReason" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpConfirm" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(5) == 0) unset($this->params["wpReason"]);
- if (wikiFuzz::randnum(5) == 0) unset($this->params["wpEditToken"]);
- if (wikiFuzz::randnum(5) == 0) unset($this->params["wpConfirm"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 5 ) == 0 ) unset( $this->params["wpReason"] );
+ if ( wikiFuzz::randnum( 5 ) == 0 ) unset( $this->params["wpEditToken"] );
+ if ( wikiFuzz::randnum( 5 ) == 0 ) unset( $this->params["wpConfirm"] );
+ }
}
@@ -1713,30 +1714,30 @@ class pageDeletion extends pageTest {
** a test for Revision Deletion.
*/
class specialRevisionDelete extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Revisiondelete";
-
- $this->params = array (
- "target" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
- "oldid" => wikifuzz::makeFuzz(2),
- "oldid[]" => wikifuzz::makeFuzz(2),
- "wpReason" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "revdelete-hide-text" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "revdelete-hide-comment" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "revdelete-hide-user" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "revdelete-hide-restricted" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- );
-
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(3) == 0) unset($this->params["target"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["oldid"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["oldid[]"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["wpReason"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-text"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-comment"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-user"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["revdelete-hide-restricted"]);
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Revisiondelete";
+
+ $this->params = array (
+ "target" => wikiFuzz::chooseInput( array( "Main Page", wikiFuzz::makeFuzz( 2 ) ) ),
+ "oldid" => wikiFuzz::makeFuzz( 2 ),
+ "oldid[]" => wikiFuzz::makeFuzz( 2 ),
+ "wpReason" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "revdelete-hide-text" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "revdelete-hide-comment" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "revdelete-hide-user" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "revdelete-hide-restricted" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
+
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["target"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["oldid"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["oldid[]"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["wpReason"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["revdelete-hide-text"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["revdelete-hide-comment"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["revdelete-hide-user"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["revdelete-hide-restricted"] );
+ }
}
@@ -1744,31 +1745,31 @@ class specialRevisionDelete extends pageTest {
** a test for Special:Import.
*/
class specialImport extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Import";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Import";
- $this->params = array (
- "action" => "submit",
- "source" => wikiFuzz::chooseInput( array("upload", "interwiki", wikifuzz::makeFuzz(2)) ),
- "MAX_FILE_SIZE" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
- "xmlimport" => wikiFuzz::chooseInput( array("/var/www/hosts/mediawiki/wiki/AdminSettings.php", "1", "++--34234", wikiFuzz::makeFuzz(2)) ),
- "namespace" => wikiFuzz::chooseInput( array(wikiFuzz::randnum(30,-6), wikiFuzz::makeFuzz(2)) ),
- "interwiki" => wikiFuzz::makeFuzz(2),
- "interwikiHistory" => wikiFuzz::makeFuzz(2),
- "frompage" => wikiFuzz::makeFuzz(2),
- );
+ $this->params = array (
+ "action" => "submit",
+ "source" => wikiFuzz::chooseInput( array( "upload", "interwiki", wikiFuzz::makeFuzz( 2 ) ) ),
+ "MAX_FILE_SIZE" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "xmlimport" => wikiFuzz::chooseInput( array( "/var/www/hosts/mediawiki/wiki/AdminSettings.php", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "namespace" => wikiFuzz::chooseInput( array( wikiFuzz::randnum( 30, -6 ), wikiFuzz::makeFuzz( 2 ) ) ),
+ "interwiki" => wikiFuzz::makeFuzz( 2 ),
+ "interwikiHistory" => wikiFuzz::makeFuzz( 2 ),
+ "frompage" => wikiFuzz::makeFuzz( 2 ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["action"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["source"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["MAX_FILE_SIZE"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["xmlimport"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["interwiki"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["interwikiHistory"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["frompage"]);
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["action"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["source"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["MAX_FILE_SIZE"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["xmlimport"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["interwiki"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["interwikiHistory"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["frompage"] );
- // Note: Need to do a file upload to fully test this Special page.
- }
+ // Note: Need to do a file upload to fully test this Special page.
+ }
}
@@ -1776,20 +1777,20 @@ class specialImport extends pageTest {
** a test for thumb.php
*/
class thumbTest extends pageTest {
- function __construct() {
- $this->pagePath = "thumb.php";
+ function __construct() {
+ $this->pagePath = "thumb.php";
- $this->params = array (
- "f" => wikiFuzz::chooseInput( array("..", "\\", "small-email.png", wikifuzz::makeFuzz(2)) ),
- "w" => wikiFuzz::chooseInput( array("80", wikiFuzz::randnum(6000,-200), wikifuzz::makeFuzz(2)) ),
- "r" => wikiFuzz::chooseInput( array("0", wikifuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "f" => wikiFuzz::chooseInput( array( "..", "\\", "small-email.png", wikiFuzz::makeFuzz( 2 ) ) ),
+ "w" => wikiFuzz::chooseInput( array( "80", wikiFuzz::randnum( 6000, -200 ), wikiFuzz::makeFuzz( 2 ) ) ),
+ "r" => wikiFuzz::chooseInput( array( "0", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["f"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["w"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["r"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["f"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["w"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["r"] );
+ }
}
@@ -1797,24 +1798,24 @@ class thumbTest extends pageTest {
** a test for trackback.php
*/
class trackbackTest extends pageTest {
- function __construct() {
- $this->pagePath = "trackback.php";
+ function __construct() {
+ $this->pagePath = "trackback.php";
+
+ $this->params = array (
+ "url" => wikiFuzz::makeFuzz( 2 ),
+ "blog_name" => wikiFuzz::chooseInput( array( "80", wikiFuzz::randnum( 6000, -200 ), wikiFuzz::makeFuzz( 2 ) ) ),
+ "article" => wikiFuzz::chooseInput( array( "Main Page", wikiFuzz::makeFuzz( 2 ) ) ),
+ "title" => wikiFuzz::chooseInput( array( "Main Page", wikiFuzz::makeFuzz( 2 ) ) ),
+ "excerpt" => wikiFuzz::makeFuzz( 2 ),
+ );
- $this->params = array (
- "url" => wikifuzz::makeFuzz(2),
- "blog_name" => wikiFuzz::chooseInput( array("80", wikiFuzz::randnum(6000,-200), wikifuzz::makeFuzz(2)) ),
- "article" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
- "title" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
- "excerpt" => wikifuzz::makeFuzz(2),
- );
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["title"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["excerpt"] );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(3) == 0) unset($this->params["title"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["excerpt"]);
-
- // page does not produce HTML.
- $this->tidyValidate = false;
- }
+ // page does not produce HTML.
+ $this->tidyValidate = false;
+ }
}
@@ -1822,19 +1823,19 @@ class trackbackTest extends pageTest {
** a test for profileinfo.php
*/
class profileInfo extends pageTest {
- function __construct() {
- $this->pagePath = "profileinfo.php";
+ function __construct() {
+ $this->pagePath = "profileinfo.php";
- $this->params = array (
- "expand" => wikifuzz::makeFuzz(2),
- "sort" => wikiFuzz::chooseInput( array("time", "count", "name", wikifuzz::makeFuzz(2)) ),
- "filter" => wikiFuzz::chooseInput( array("Main Page", wikifuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "expand" => wikiFuzz::makeFuzz( 2 ),
+ "sort" => wikiFuzz::chooseInput( array( "time", "count", "name", wikiFuzz::makeFuzz( 2 ) ) ),
+ "filter" => wikiFuzz::chooseInput( array( "Main Page", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(3) == 0) unset($this->params["sort"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["filter"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["sort"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["filter"] );
+ }
}
@@ -1842,18 +1843,18 @@ class profileInfo extends pageTest {
** a test for Special:Cite (extension Special page).
*/
class specialCite extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Cite";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Cite";
- $this->params = array (
- "page" => wikiFuzz::chooseInput( array("\" onmouseover=\"alert(1);\"", "Main Page", wikifuzz::makeFuzz(2)) ),
- "id" => wikiFuzz::chooseInput( array("-1", "0", "------'-------0", "+1", "-9823412312312412435", wikiFuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "page" => wikiFuzz::chooseInput( array( "\" onmouseover=\"alert(1);\"", "Main Page", wikiFuzz::makeFuzz( 2 ) ) ),
+ "id" => wikiFuzz::chooseInput( array( "-1", "0", "------'-------0", "+1", "-9823412312312412435", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(6) == 0) unset($this->params["page"]);
- if (wikiFuzz::randnum(6) == 0) unset($this->params["id"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["page"] );
+ if ( wikiFuzz::randnum( 6 ) == 0 ) unset( $this->params["id"] );
+ }
}
@@ -1861,13 +1862,13 @@ class specialCite extends pageTest {
** a test for Special:Filepath (extension Special page).
*/
class specialFilepath extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Filepath";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Filepath";
- $this->params = array (
- "file" => wikiFuzz::chooseInput( array("Small-email.png", "Small-email.png" . wikifuzz::makeFuzz(1), wikiFuzz::makeFuzz(2)) ),
- );
- }
+ $this->params = array (
+ "file" => wikiFuzz::chooseInput( array( "Small-email.png", "Small-email.png" . wikiFuzz::makeFuzz( 1 ), wikiFuzz::makeFuzz( 2 ) ) ),
+ );
+ }
}
@@ -1875,22 +1876,22 @@ class specialFilepath extends pageTest {
** a test for Special:Makebot (extension Special page).
*/
class specialMakebot extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Makebot";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Makebot";
- $this->params = array (
- "username" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
- "dosearch" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
- "grant" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
- "comment" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- "token" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "username" => wikiFuzz::chooseInput( array( "Nickj2", "192.168.0.2", wikiFuzz::makeFuzz( 1 ) ) ),
+ "dosearch" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "grant" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "comment" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ "token" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(2) == 0) unset($this->params["dosearch"]);
- if (wikiFuzz::randnum(2) == 0) unset($this->params["grant"]);
- if (wikiFuzz::randnum(5) == 0) unset($this->params["token"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["dosearch"] );
+ if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["grant"] );
+ if ( wikiFuzz::randnum( 5 ) == 0 ) unset( $this->params["token"] );
+ }
}
@@ -1898,22 +1899,22 @@ class specialMakebot extends pageTest {
** a test for Special:Makesysop (extension Special page).
*/
class specialMakesysop extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Makesysop";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Makesysop";
- $this->params = array (
- "wpMakesysopUser" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
- "action" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
- "wpMakesysopSubmit" => wikiFuzz::chooseInput( array("0", "1", "++--34234", wikifuzz::makeFuzz(2)) ),
- "wpEditToken" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- "wpSetBureaucrat" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "wpMakesysopUser" => wikiFuzz::chooseInput( array( "Nickj2", "192.168.0.2", wikiFuzz::makeFuzz( 1 ) ) ),
+ "action" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpMakesysopSubmit" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpEditToken" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ "wpSetBureaucrat" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(3) == 0) unset($this->params["wpMakesysopSubmit"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["wpEditToken"]);
- if (wikiFuzz::randnum(3) == 0) unset($this->params["wpSetBureaucrat"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpMakesysopSubmit"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpEditToken"] );
+ if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpSetBureaucrat"] );
+ }
}
@@ -1921,15 +1922,15 @@ class specialMakesysop extends pageTest {
** a test for Special:Renameuser (extension Special page).
*/
class specialRenameuser extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Renameuser";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Renameuser";
- $this->params = array (
- "oldusername" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
- "newusername" => wikiFuzz::chooseInput( array("Nickj2", "192.168.0.2", wikifuzz::makeFuzz(1) ) ),
- "token" => wikiFuzz::chooseInput( array("20398702394", "", wikiFuzz::makeFuzz(2)) ),
- );
- }
+ $this->params = array (
+ "oldusername" => wikiFuzz::chooseInput( array( "Nickj2", "192.168.0.2", wikiFuzz::makeFuzz( 1 ) ) ),
+ "newusername" => wikiFuzz::chooseInput( array( "Nickj2", "192.168.0.2", wikiFuzz::makeFuzz( 1 ) ) ),
+ "token" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
+ }
}
@@ -1937,16 +1938,16 @@ class specialRenameuser extends pageTest {
** a test for Special:Linksearch (extension Special page).
*/
class specialLinksearch extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special%3ALinksearch";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special%3ALinksearch";
- $this->params = array (
- "target" => wikifuzz::makeFuzz(2),
- );
+ $this->params = array (
+ "target" => wikiFuzz::makeFuzz( 2 ),
+ );
- // sometimes we don't want to specify certain parameters.
- if (wikiFuzz::randnum(10) == 0) unset($this->params["target"]);
- }
+ // sometimes we don't want to specify certain parameters.
+ if ( wikiFuzz::randnum( 10 ) == 0 ) unset( $this->params["target"] );
+ }
}
@@ -1954,20 +1955,20 @@ class specialLinksearch extends pageTest {
** a test for Special:CategoryTree (extension Special page).
*/
class specialCategoryTree extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:CategoryTree";
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:CategoryTree";
- $this->params = array (
- "target" => wikifuzz::makeFuzz(2),
- "from" => wikifuzz::makeFuzz(2),
- "until" => wikifuzz::makeFuzz(2),
- "showas" => wikifuzz::makeFuzz(2),
- "mode" => wikiFuzz::chooseInput( array("pages", "categories", "all", wikifuzz::makeFuzz(2)) ),
- );
+ $this->params = array (
+ "target" => wikiFuzz::makeFuzz( 2 ),
+ "from" => wikiFuzz::makeFuzz( 2 ),
+ "until" => wikiFuzz::makeFuzz( 2 ),
+ "showas" => wikiFuzz::makeFuzz( 2 ),
+ "mode" => wikiFuzz::chooseInput( array( "pages", "categories", "all", wikiFuzz::makeFuzz( 2 ) ) ),
+ );
- // sometimes we do want to specify certain parameters.
- if (wikiFuzz::randnum(5) == 0) $this->params["notree"] = wikiFuzz::chooseInput( array("1", 0, "", wikiFuzz::makeFuzz(2)) );
- }
+ // sometimes we do want to specify certain parameters.
+ if ( wikiFuzz::randnum( 5 ) == 0 ) $this->params["notree"] = wikiFuzz::chooseInput( array( "1", 0, "", wikiFuzz::makeFuzz( 2 ) ) );
+ }
}
@@ -1975,40 +1976,40 @@ class specialCategoryTree extends pageTest {
** a test for "Special:Chemicalsources" (extension Special page).
*/
class specialChemicalsourcesTest extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Chemicalsources";
-
- // choose an input format to use.
- $format = wikiFuzz::chooseInput(
- array( 'go',
- 'CAS',
- 'EINECS',
- 'CHEBI',
- 'PubChem',
- 'SMILES',
- 'InChI',
- 'ATCCode',
- 'KEGG',
- 'RTECS',
- 'ECNumber',
- 'DrugBank',
- 'Formula',
- 'Name'
- )
- );
-
- // values for different formats usually start with either letters or numbers.
- switch ($format) {
- case 'Name' : $value = "A"; break;
- case 'InChI' :
- case 'SMILES' :
- case 'Formula': $value = "C"; break;
- default : $value = "0"; break;
- }
-
- // and then we append the fuzz input.
- $this->params = array ($format => $value . wikifuzz::makeFuzz(2) );
- }
+ function __construct() {
+ $this->pagePath = "index.php?title=Special:Chemicalsources";
+
+ // choose an input format to use.
+ $format = wikiFuzz::chooseInput(
+ array( 'go',
+ 'CAS',
+ 'EINECS',
+ 'CHEBI',
+ 'PubChem',
+ 'SMILES',
+ 'InChI',
+ 'ATCCode',
+ 'KEGG',
+ 'RTECS',
+ 'ECNumber',
+ 'DrugBank',
+ 'Formula',
+ 'Name'
+ )
+ );
+
+ // values for different formats usually start with either letters or numbers.
+ switch ( $format ) {
+ case 'Name' : $value = "A"; break;
+ case 'InChI' :
+ case 'SMILES' :
+ case 'Formula': $value = "C"; break;
+ default : $value = "0"; break;
+ }
+
+ // and then we append the fuzz input.
+ $this->params = array ( $format => $value . wikiFuzz::makeFuzz( 2 ) );
+ }
}
@@ -2023,143 +2024,143 @@ class specialChemicalsourcesTest extends pageTest {
*/
class api extends pageTest {
- // API login mode.
- private static function loginMode() {
- $arr = array ( "lgname" => wikifuzz::makeFuzz(2),
- "lgpassword" => wikifuzz::makeFuzz(2),
- );
- // sometimes we want to specify the extra "lgdomain" parameter.
- if (wikiFuzz::randnum(3) == 0) {
- $arr["lgdomain"] = wikiFuzz::chooseInput( array("1", 0, "", wikiFuzz::makeFuzz(2)) );
- }
-
- return $arr;
- }
-
- // API OpenSearch mode.
- private static function opensearchMode() {
- return array ("search" => wikifuzz::makeFuzz(2));
- }
-
- // API watchlist feed mode.
- private static function feedwatchlistMode() {
- // FIXME: add "wikifuzz::makeFuzz(2)" as possible value below?
- return array ("feedformat" => wikiFuzz::chooseInput( array("rss", "atom") ) );
- }
-
- // API query mode.
- private static function queryMode() {
- // FIXME: add "wikifuzz::makeFuzz(2)" as possible params for the elements below?
- // Suspect this will stuff up the tests more, but need to check.
- $params = array (
- // FIXME: More titles.
- "titles" => wikiFuzz::chooseInput( array("Main Page")),
- // FIXME: More pageids.
- "pageids" => 1,
- "prop" => wikiFuzz::chooseInput( array("info", "revisions", "watchlist")),
- "list" => wikiFuzz::chooseInput( array("allpages", "logevents", "watchlist", "usercontribs", "recentchanges", "backlinks", "embeddedin", "imagelinks") ),
- "meta" => wikiFuzz::chooseInput( array("siteinfo")),
- "generator" => wikiFuzz::chooseInput( array("allpages", "logevents", "watchlist", "info", "revisions") ),
- "siprop" => wikiFuzz::chooseInput( array("general", "namespaces", "general|namespaces") ),
- );
-
- // Add extra parameters based on what list choice we got.
- switch ($params["list"]) {
- case "usercontribs" : self::addListParams ($params, "uc", array("limit", "start", "end", "user", "dir") ); break;
- case "allpages" : self::addListParams ($params, "ap", array("from", "prefix", "namespace", "filterredir", "limit") ); break;
- case "watchlist" : self::addListParams ($params, "wl", array("allrev", "start", "end", "namespace", "dir", "limit", "prop") ); break;
- case "logevents" : self::addListParams ($params, "le", array("limit", "type", "start", "end", "user", "dir") ); break;
- case "recentchanges": self::addListParams ($params, "rc", array("limit", "prop", "show", "namespace", "start", "end", "dir") ); break;
- case "backlinks" : self::addListParams ($params, "bl", array("continue", "namespace", "redirect", "limit") ); break;
- case "embeddedin" : self::addListParams ($params, "ei", array("continue", "namespace", "redirect", "limit") ); break;
- case "imagelinks" : self::addListParams ($params, "il", array("continue", "namespace", "redirect", "limit") ); break;
- }
-
- if ($params["prop"] == "revisions") {
- self::addListParams ($params, "rv", array("prop", "limit", "startid", "endid", "end", "dir") );
- }
-
- // Sometimes we want redirects, sometimes we don't.
- if (wikiFuzz::randnum(3) == 0) {
- $params["redirects"] = wikiFuzz::chooseInput( array("1", 0, "", wikiFuzz::makeFuzz(2)) );
- }
-
- return $params;
- }
-
- // Adds all the elements to the array, using the specified prefix.
- private static function addListParams(&$array, $prefix, $elements) {
- foreach ($elements as $element) {
- $array[$prefix . $element] = self::getParamDetails($element);
- }
- }
-
- // For a given element name, returns the data for that element.
- private static function getParamDetails($element) {
- switch ($element) {
- case 'startid' :
- case 'endid' :
- case 'start' :
- case 'end' :
- case 'limit' : return wikiFuzz::chooseInput( array("0", "-1", "---'----------0", "+1", "8134", "320742734234235", "20060230121212", wikiFuzz::randnum(9000, -100), wikiFuzz::makeFuzz(2)) );
- case 'dir' : return wikiFuzz::chooseInput( array("newer", "older", wikifuzz::makeFuzz(2) ) );
- case 'user' : return wikiFuzz::chooseInput( array(USER_ON_WIKI, wikifuzz::makeFuzz(2) ) );
- case 'namespace' : return wikiFuzz::chooseInput( array(-2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 200000, wikifuzz::makeFuzz(2)) );
- case 'filterredir': return wikiFuzz::chooseInput( array("all", "redirects", "nonredirectsallpages", wikifuzz::makeFuzz(2)) );
- case 'allrev' : return wikiFuzz::chooseInput( array("1", 0, "", wikiFuzz::makeFuzz(2)) );
- case 'prop' : return wikiFuzz::chooseInput( array("user", "comment", "timestamp", "patrol", "flags", "user|user|comment|flags", wikifuzz::makeFuzz(2) ) );
- case 'type' : return wikiFuzz::chooseInput( array("block", "protect", "rights", "delete", "upload", "move", "import", "renameuser", "newusers", "makebot", wikifuzz::makeFuzz(2) ) );
- case 'hide' : return wikiFuzz::chooseInput( array("minor", "bots", "anons", "liu", "liu|bots|", wikifuzz::makeFuzz(2) ) );
- case 'show' : return wikiFuzz::chooseInput( array('minor', '!minor', 'bot', '!bot', 'anon', '!anon', wikifuzz::makeFuzz(2) ) );
- default : return wikifuzz::makeFuzz(2);
- }
- }
-
- // Entry point.
- function __construct() {
- $this->pagePath = "api.php";
-
- $modes = array ("help",
- "login",
- "opensearch",
- "feedwatchlist",
- "query");
- $action = wikiFuzz::chooseInput( array_merge ($modes, array(wikifuzz::makeFuzz(2))) );
-
- switch ($action) {
- case "login" : $this->params = self::loginMode();
- break;
- case "opensearch" : $this->params = self::opensearchMode();
- break;
- case "feedwatchlist" : $this->params = self::feedwatchlistMode();
- break;
- case "query" : $this->params = self::queryMode();
- break;
- case "help" :
- default : // Do something random - "Crazy Ivan" mode.
- $random_mode = wikiFuzz::chooseInput( $modes ) . "Mode";
- // There is no "helpMode".
- if ($random_mode == "helpMode") $random_mode = "queryMode";
- $this->params = self::$random_mode();
- break;
- }
-
- // Save the selected action.
- $this->params["action"] = $action;
-
- // Set the cookie:
- // FIXME: need to get this cookie dynamically set, rather than hard-coded.
- $this->cookie = "wikidbUserID=10001; wikidbUserName=Test; wikidb_session=178df0fe68c75834643af65dec9ec98a; wikidbToken=1adc6753d62c44aec950c024d7ae0540";
-
- // Output format
- $this->params["format"] = wikiFuzz::chooseInput( array("json", "jsonfm", "php", "phpfm",
- "wddx", "wddxfm", "xml", "xmlfm",
- "yaml", "yamlfm", "raw", "rawfm",
- wikifuzz::makeFuzz(2) ) );
-
- // Page does not produce HTML (sometimes).
- $this->tidyValidate = false;
- }
+ // API login mode.
+ private static function loginMode() {
+ $arr = array ( "lgname" => wikiFuzz::makeFuzz( 2 ),
+ "lgpassword" => wikiFuzz::makeFuzz( 2 ),
+ );
+ // sometimes we want to specify the extra "lgdomain" parameter.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) {
+ $arr["lgdomain"] = wikiFuzz::chooseInput( array( "1", 0, "", wikiFuzz::makeFuzz( 2 ) ) );
+ }
+
+ return $arr;
+ }
+
+ // API OpenSearch mode.
+ private static function opensearchMode() {
+ return array ( "search" => wikiFuzz::makeFuzz( 2 ) );
+ }
+
+ // API watchlist feed mode.
+ private static function feedwatchlistMode() {
+ // FIXME: add "wikiFuzz::makeFuzz(2)" as possible value below?
+ return array ( "feedformat" => wikiFuzz::chooseInput( array( "rss", "atom" ) ) );
+ }
+
+ // API query mode.
+ private static function queryMode() {
+ // FIXME: add "wikiFuzz::makeFuzz(2)" as possible params for the elements below?
+ // Suspect this will stuff up the tests more, but need to check.
+ $params = array (
+ // FIXME: More titles.
+ "titles" => wikiFuzz::chooseInput( array( "Main Page" ) ),
+ // FIXME: More pageids.
+ "pageids" => 1,
+ "prop" => wikiFuzz::chooseInput( array( "info", "revisions", "watchlist" ) ),
+ "list" => wikiFuzz::chooseInput( array( "allpages", "logevents", "watchlist", "usercontribs", "recentchanges", "backlinks", "embeddedin", "imagelinks" ) ),
+ "meta" => wikiFuzz::chooseInput( array( "siteinfo" ) ),
+ "generator" => wikiFuzz::chooseInput( array( "allpages", "logevents", "watchlist", "info", "revisions" ) ),
+ "siprop" => wikiFuzz::chooseInput( array( "general", "namespaces", "general|namespaces" ) ),
+ );
+
+ // Add extra parameters based on what list choice we got.
+ switch ( $params["list"] ) {
+ case "usercontribs" : self::addListParams ( $params, "uc", array( "limit", "start", "end", "user", "dir" ) ); break;
+ case "allpages" : self::addListParams ( $params, "ap", array( "from", "prefix", "namespace", "filterredir", "limit" ) ); break;
+ case "watchlist" : self::addListParams ( $params, "wl", array( "allrev", "start", "end", "namespace", "dir", "limit", "prop" ) ); break;
+ case "logevents" : self::addListParams ( $params, "le", array( "limit", "type", "start", "end", "user", "dir" ) ); break;
+ case "recentchanges": self::addListParams ( $params, "rc", array( "limit", "prop", "show", "namespace", "start", "end", "dir" ) ); break;
+ case "backlinks" : self::addListParams ( $params, "bl", array( "continue", "namespace", "redirect", "limit" ) ); break;
+ case "embeddedin" : self::addListParams ( $params, "ei", array( "continue", "namespace", "redirect", "limit" ) ); break;
+ case "imagelinks" : self::addListParams ( $params, "il", array( "continue", "namespace", "redirect", "limit" ) ); break;
+ }
+
+ if ( $params["prop"] == "revisions" ) {
+ self::addListParams ( $params, "rv", array( "prop", "limit", "startid", "endid", "end", "dir" ) );
+ }
+
+ // Sometimes we want redirects, sometimes we don't.
+ if ( wikiFuzz::randnum( 3 ) == 0 ) {
+ $params["redirects"] = wikiFuzz::chooseInput( array( "1", 0, "", wikiFuzz::makeFuzz( 2 ) ) );
+ }
+
+ return $params;
+ }
+
+ // Adds all the elements to the array, using the specified prefix.
+ private static function addListParams( &$array, $prefix, $elements ) {
+ foreach ( $elements as $element ) {
+ $array[$prefix . $element] = self::getParamDetails( $element );
+ }
+ }
+
+ // For a given element name, returns the data for that element.
+ private static function getParamDetails( $element ) {
+ switch ( $element ) {
+ case 'startid' :
+ case 'endid' :
+ case 'start' :
+ case 'end' :
+ case 'limit' : return wikiFuzz::chooseInput( array( "0", "-1", "---'----------0", "+1", "8134", "320742734234235", "20060230121212", wikiFuzz::randnum( 9000, -100 ), wikiFuzz::makeFuzz( 2 ) ) );
+ case 'dir' : return wikiFuzz::chooseInput( array( "newer", "older", wikiFuzz::makeFuzz( 2 ) ) );
+ case 'user' : return wikiFuzz::chooseInput( array( USER_ON_WIKI, wikiFuzz::makeFuzz( 2 ) ) );
+ case 'namespace' : return wikiFuzz::chooseInput( array( -2, -1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 200000, wikiFuzz::makeFuzz( 2 ) ) );
+ case 'filterredir': return wikiFuzz::chooseInput( array( "all", "redirects", "nonredirectsallpages", wikiFuzz::makeFuzz( 2 ) ) );
+ case 'allrev' : return wikiFuzz::chooseInput( array( "1", 0, "", wikiFuzz::makeFuzz( 2 ) ) );
+ case 'prop' : return wikiFuzz::chooseInput( array( "user", "comment", "timestamp", "patrol", "flags", "user|user|comment|flags", wikiFuzz::makeFuzz( 2 ) ) );
+ case 'type' : return wikiFuzz::chooseInput( array( "block", "protect", "rights", "delete", "upload", "move", "import", "renameuser", "newusers", "makebot", wikiFuzz::makeFuzz( 2 ) ) );
+ case 'hide' : return wikiFuzz::chooseInput( array( "minor", "bots", "anons", "liu", "liu|bots|", wikiFuzz::makeFuzz( 2 ) ) );
+ case 'show' : return wikiFuzz::chooseInput( array( 'minor', '!minor', 'bot', '!bot', 'anon', '!anon', wikiFuzz::makeFuzz( 2 ) ) );
+ default : return wikiFuzz::makeFuzz( 2 );
+ }
+ }
+
+ // Entry point.
+ function __construct() {
+ $this->pagePath = "api.php";
+
+ $modes = array ( "help",
+ "login",
+ "opensearch",
+ "feedwatchlist",
+ "query" );
+ $action = wikiFuzz::chooseInput( array_merge ( $modes, array( wikiFuzz::makeFuzz( 2 ) ) ) );
+
+ switch ( $action ) {
+ case "login" : $this->params = self::loginMode();
+ break;
+ case "opensearch" : $this->params = self::opensearchMode();
+ break;
+ case "feedwatchlist" : $this->params = self::feedwatchlistMode();
+ break;
+ case "query" : $this->params = self::queryMode();
+ break;
+ case "help" :
+ default : // Do something random - "Crazy Ivan" mode.
+ $random_mode = wikiFuzz::chooseInput( $modes ) . "Mode";
+ // There is no "helpMode".
+ if ( $random_mode == "helpMode" ) $random_mode = "queryMode";
+ $this->params = self::$random_mode();
+ break;
+ }
+
+ // Save the selected action.
+ $this->params["action"] = $action;
+
+ // Set the cookie:
+ // FIXME: need to get this cookie dynamically set, rather than hard-coded.
+ $this->cookie = "wikidbUserID=10001; wikidbUserName=Test; wikidb_session=178df0fe68c75834643af65dec9ec98a; wikidbToken=1adc6753d62c44aec950c024d7ae0540";
+
+ // Output format
+ $this->params["format"] = wikiFuzz::chooseInput( array( "json", "jsonfm", "php", "phpfm",
+ "wddx", "wddxfm", "xml", "xmlfm",
+ "yaml", "yamlfm", "raw", "rawfm",
+ wikiFuzz::makeFuzz( 2 ) ) );
+
+ // Page does not produce HTML (sometimes).
+ $this->tidyValidate = false;
+ }
}
@@ -2167,152 +2168,152 @@ class api extends pageTest {
** a page test for the GeSHi extension.
*/
class GeSHi_Test extends pageTest {
-
- private function getGeSHiContent() {
- return "<source lang=\"" . $this->getLang() . "\" "
- . (wikiFuzz::randnum(2) == 0 ? "line " : "")
- . (wikiFuzz::randnum(2) == 0 ? "strict " : "")
- . "start=" . wikiFuzz::chooseInput( array(wikiFuzz::randnum(-6000,6000), wikifuzz::makeFuzz(2)) )
- . ">"
- . wikiFuzz::makeFuzz(2)
- . "</source>";
- }
-
- private function getLang() {
+
+ private function getGeSHiContent() {
+ return "<source lang=\"" . $this->getLang() . "\" "
+ . ( wikiFuzz::randnum( 2 ) == 0 ? "line " : "" )
+ . ( wikiFuzz::randnum( 2 ) == 0 ? "strict " : "" )
+ . "start=" . wikiFuzz::chooseInput( array( wikiFuzz::randnum( -6000, 6000 ), wikiFuzz::makeFuzz( 2 ) ) )
+ . ">"
+ . wikiFuzz::makeFuzz( 2 )
+ . "</source>";
+ }
+
+ private function getLang() {
return wikiFuzz::chooseInput( array( "actionscript", "ada", "apache", "applescript", "asm", "asp", "autoit", "bash", "blitzbasic", "bnf", "c", "c_mac", "caddcl", "cadlisp",
- "cfdg", "cfm", "cpp", "cpp-qt", "csharp", "css", "d", "delphi", "diff", "div", "dos", "eiffel", "fortran", "freebasic", "gml", "groovy", "html4strict", "idl",
- "ini", "inno", "io", "java", "java5", "javascript", "latex", "lisp", "lua", "matlab", "mirc", "mpasm", "mysql", "nsis", "objc", "ocaml", "ocaml-brief", "oobas",
- "oracle8", "pascal", "perl", "php", "php-brief", "plsql", "python", "qbasic", "rails", "reg", "robots", "ruby", "sas", "scheme", "sdlbasic", "smalltalk", "smarty",
- "sql", "tcl", "text", "thinbasic", "tsql", "vb", "vbnet", "vhdl", "visualfoxpro", "winbatch", "xml", "xpp", "z80", wikifuzz::makeFuzz(1) ) );
- }
-
- function __construct() {
- $this->pagePath = "index.php?title=WIKIFUZZ";
-
- $this->params = array (
- "action" => "submit",
- "wpMinoredit" => "test",
- "wpPreview" => "test",
- "wpSection" => "test",
- "wpEdittime" => "test",
- "wpSummary" => "test",
- "wpScrolltop" => "test",
- "wpStarttime" => "test",
- "wpAutoSummary" => "test",
- "wpTextbox1" => $this->getGeSHiContent() // the main wiki text, contains fake GeSHi content.
- );
- }
+ "cfdg", "cfm", "cpp", "cpp-qt", "csharp", "css", "d", "delphi", "diff", "div", "dos", "eiffel", "fortran", "freebasic", "gml", "groovy", "html4strict", "idl",
+ "ini", "inno", "io", "java", "java5", "javascript", "latex", "lisp", "lua", "matlab", "mirc", "mpasm", "mysql", "nsis", "objc", "ocaml", "ocaml-brief", "oobas",
+ "oracle8", "pascal", "perl", "php", "php-brief", "plsql", "python", "qbasic", "rails", "reg", "robots", "ruby", "sas", "scheme", "sdlbasic", "smalltalk", "smarty",
+ "sql", "tcl", "text", "thinbasic", "tsql", "vb", "vbnet", "vhdl", "visualfoxpro", "winbatch", "xml", "xpp", "z80", wikiFuzz::makeFuzz( 1 ) ) );
+ }
+
+ function __construct() {
+ $this->pagePath = "index.php?title=WIKIFUZZ";
+
+ $this->params = array (
+ "action" => "submit",
+ "wpMinoredit" => "test",
+ "wpPreview" => "test",
+ "wpSection" => "test",
+ "wpEdittime" => "test",
+ "wpSummary" => "test",
+ "wpScrolltop" => "test",
+ "wpStarttime" => "test",
+ "wpAutoSummary" => "test",
+ "wpTextbox1" => $this->getGeSHiContent() // the main wiki text, contains fake GeSHi content.
+ );
+ }
}
/**
** selects a page test to run.
*/
-function selectPageTest($count) {
-
- // if the user only wants a specific test, then only ever give them that.
- if (defined("SPECIFIC_TEST")) {
- $testType = SPECIFIC_TEST;
- return new $testType ();
- }
-
- // Some of the time we test Special pages, the remaining
- // time we test using the standard edit page.
- switch ($count % 100) {
- case 0 : return new successfulUserLoginTest();
- case 1 : return new listusersTest();
- case 2 : return new searchTest();
- case 3 : return new recentchangesTest();
- case 4 : return new prefixindexTest();
- case 5 : return new mimeSearchTest();
- case 6 : return new specialLogTest();
- case 7 : return new userLoginTest();
- case 8 : return new ipblocklistTest();
- case 9 : return new newImagesTest();
- case 10: return new imagelistTest();
- case 11: return new specialExportTest();
- case 12: return new specialBooksourcesTest();
- case 13: return new specialAllpagesTest();
- case 14: return new pageHistoryTest();
- case 15: return new contributionsTest();
- case 16: return new viewPageTest();
- case 17: return new specialAllmessagesTest();
- case 18: return new specialNewpages();
- case 19: return new searchTest();
- case 20: return new redirectTest();
- case 21: return new confirmEmail();
- case 22: return new watchlistTest();
- case 23: return new specialBlockmeTest();
- case 24: return new specialUndelete();
- case 25: return new specialMovePage();
- case 26: return new specialUnlockdb();
- case 27: return new specialLockdb();
- case 28: return new specialUserrights();
- case 29: return new pageProtectionForm();
- case 30: return new specialBlockip();
- case 31: return new imagepageTest();
- case 32: return new pageDeletion();
- case 33: return new specialRevisionDelete();
- case 34: return new specialImport();
- case 35: return new thumbTest();
- case 36: return new trackbackTest();
- case 37: return new profileInfo();
- case 38: return new specialCite();
- case 39: return new specialFilepath();
- case 40: return new specialMakebot();
- case 41: return new specialMakesysop();
- case 42: return new specialRenameuser();
- case 43: return new specialLinksearch();
- case 44: return new specialCategoryTree();
- case 45: return new api();
- case 45: return new specialChemicalsourcesTest();
- default: return new editPageTest();
- }
-}
-
-
-/////////////////////// SAVING OUTPUT /////////////////////////
+function selectPageTest( $count ) {
+
+ // if the user only wants a specific test, then only ever give them that.
+ if ( defined( "SPECIFIC_TEST" ) ) {
+ $testType = SPECIFIC_TEST;
+ return new $testType ();
+ }
+
+ // Some of the time we test Special pages, the remaining
+ // time we test using the standard edit page.
+ switch ( $count % 100 ) {
+ case 0 : return new successfulUserLoginTest();
+ case 1 : return new listusersTest();
+ case 2 : return new searchTest();
+ case 3 : return new recentchangesTest();
+ case 4 : return new prefixindexTest();
+ case 5 : return new mimeSearchTest();
+ case 6 : return new specialLogTest();
+ case 7 : return new userLoginTest();
+ case 8 : return new ipblocklistTest();
+ case 9 : return new newImagesTest();
+ case 10: return new imagelistTest();
+ case 11: return new specialExportTest();
+ case 12: return new specialBooksourcesTest();
+ case 13: return new specialAllpagesTest();
+ case 14: return new pageHistoryTest();
+ case 15: return new contributionsTest();
+ case 16: return new viewPageTest();
+ case 17: return new specialAllmessagesTest();
+ case 18: return new specialNewpages();
+ case 19: return new searchTest();
+ case 20: return new redirectTest();
+ case 21: return new confirmEmail();
+ case 22: return new watchlistTest();
+ case 23: return new specialBlockmeTest();
+ case 24: return new specialUndelete();
+ case 25: return new specialMovePage();
+ case 26: return new specialUnlockdb();
+ case 27: return new specialLockdb();
+ case 28: return new specialUserrights();
+ case 29: return new pageProtectionForm();
+ case 30: return new specialBlockip();
+ case 31: return new imagepageTest();
+ case 32: return new pageDeletion();
+ case 33: return new specialRevisionDelete();
+ case 34: return new specialImport();
+ case 35: return new thumbTest();
+ case 36: return new trackbackTest();
+ case 37: return new profileInfo();
+ case 38: return new specialCite();
+ case 39: return new specialFilepath();
+ case 40: return new specialMakebot();
+ case 41: return new specialMakesysop();
+ case 42: return new specialRenameuser();
+ case 43: return new specialLinksearch();
+ case 44: return new specialCategoryTree();
+ case 45: return new api();
+ case 45: return new specialChemicalsourcesTest();
+ default: return new editPageTest();
+ }
+}
+
+
+// ///////////////////// SAVING OUTPUT /////////////////////////
/**
** Utility function for saving a file. Currently has no error checking.
*/
-function saveFile($data, $name) {
- file_put_contents($name, $data);
+function saveFile( $data, $name ) {
+ file_put_contents( $name, $data );
}
/**
** Returns a test as an experimental GET-to-POST URL.
- ** This doesn't seem to always work though, and sometimes the output is too long
+ ** This doesn't seem to always work though, and sometimes the output is too long
** to be a valid GET URL, so we also save in other formats.
*/
-function getAsURL(pageTest $test) {
- $used_question_mark = (strpos($test->getPagePath(), "?") !== false);
- $retval = "http://get-to-post.nickj.org/?" . WIKI_BASE_URL . $test->getPagePath();
- foreach ($test->getParams() as $param => $value) {
- if (!$used_question_mark) {
- $retval .= "?";
- $used_question_mark = true;
- }
- else {
- $retval .= "&";
- }
- $retval .= $param . "=" . urlencode($value);
- }
- return $retval;
+function getAsURL( pageTest $test ) {
+ $used_question_mark = ( strpos( $test->getPagePath(), "?" ) !== false );
+ $retval = "http://get-to-post.nickj.org/?" . WIKI_BASE_URL . $test->getPagePath();
+ foreach ( $test->getParams() as $param => $value ) {
+ if ( !$used_question_mark ) {
+ $retval .= "?";
+ $used_question_mark = true;
+ }
+ else {
+ $retval .= "&";
+ }
+ $retval .= $param . "=" . urlencode( $value );
+ }
+ return $retval;
}
/**
** Saves a plain-text human-readable version of a test.
*/
-function saveTestAsText(pageTest $test, $filename) {
- $str = "Test: " . $test->getPagePath();
- foreach ($test->getParams() as $param => $value) {
- $str .= "\n$param: $value";
- }
- $str .= "\nGet-to-post URL: " . getAsURL($test) . "\n";
- saveFile($str, $filename);
+function saveTestAsText( pageTest $test, $filename ) {
+ $str = "Test: " . $test->getPagePath();
+ foreach ( $test->getParams() as $param => $value ) {
+ $str .= "\n$param: $value";
+ }
+ $str .= "\nGet-to-post URL: " . getAsURL( $test ) . "\n";
+ saveFile( $str, $filename );
}
@@ -2320,37 +2321,37 @@ function saveTestAsText(pageTest $test, $filename) {
** Saves a test as a standalone basic PHP script that shows this one problem.
** Resulting script requires PHP-Curl be installed in order to work.
*/
-function saveTestAsPHP(pageTest $test, $filename) {
- $str = "<?php\n"
- . "\$params = " . var_export(escapeForCurl($test->getParams()), true) . ";\n"
- . "\$ch = curl_init();\n"
- . "curl_setopt(\$ch, CURLOPT_POST, 1);\n"
- . "curl_setopt(\$ch, CURLOPT_POSTFIELDS, \$params );\n"
- . "curl_setopt(\$ch, CURLOPT_URL, " . var_export(WIKI_BASE_URL . $test->getPagePath(), true) . ");\n"
- . "curl_setopt(\$ch, CURLOPT_RETURNTRANSFER,1);\n"
- . ($test->getCookie() ? "curl_setopt(\$ch, CURLOPT_COOKIE, " . var_export($test->getCookie(), true) . ");\n" : "")
- . "\$result=curl_exec(\$ch);\n"
- . "curl_close (\$ch);\n"
- . "print \$result;\n"
- . "?>\n";
- saveFile($str, $filename);
+function saveTestAsPHP( pageTest $test, $filename ) {
+ $str = "<?php\n"
+ . "\$params = " . var_export( escapeForCurl( $test->getParams() ), true ) . ";\n"
+ . "\$ch = curl_init();\n"
+ . "curl_setopt(\$ch, CURLOPT_POST, 1);\n"
+ . "curl_setopt(\$ch, CURLOPT_POSTFIELDS, \$params );\n"
+ . "curl_setopt(\$ch, CURLOPT_URL, " . var_export( WIKI_BASE_URL . $test->getPagePath(), true ) . ");\n"
+ . "curl_setopt(\$ch, CURLOPT_RETURNTRANSFER,1);\n"
+ . ( $test->getCookie() ? "curl_setopt(\$ch, CURLOPT_COOKIE, " . var_export( $test->getCookie(), true ) . ");\n" : "" )
+ . "\$result=curl_exec(\$ch);\n"
+ . "curl_close (\$ch);\n"
+ . "print \$result;\n"
+ . "?>\n";
+ saveFile( $str, $filename );
}
/**
** Escapes a value so that it can be used on the command line by Curl.
- ** Specifically, "<" and "@" need to be escaped if they are the first character,
+ ** Specifically, "<" and "@" need to be escaped if they are the first character,
** otherwise curl interprets these as meaning that we want to insert a file.
*/
-function escapeForCurl(array $input_params) {
- $output_params = array();
- foreach ($input_params as $param => $value) {
- if (strlen($value) > 0 && ( $value[0] == "@" || $value[0] == "<")) {
- $value = "\\" . $value;
- }
- $output_params[$param] = $value;
- }
- return $output_params;
+function escapeForCurl( array $input_params ) {
+ $output_params = array();
+ foreach ( $input_params as $param => $value ) {
+ if ( strlen( $value ) > 0 && ( $value[0] == "@" || $value[0] == "<" ) ) {
+ $value = "\\" . $value;
+ }
+ $output_params[$param] = $value;
+ }
+ return $output_params;
}
@@ -2358,124 +2359,124 @@ function escapeForCurl(array $input_params) {
** Saves a test as a standalone CURL shell script that shows this one problem.
** Resulting script requires standalone Curl be installed in order to work.
*/
-function saveTestAsCurl(pageTest $test, $filename) {
- $str = "#!/bin/bash\n"
- . "curl --silent --include --globoff \\\n"
- . ($test->getCookie() ? " --cookie " . escapeshellarg($test->getCookie()) . " \\\n" : "");
- foreach (escapeForCurl($test->getParams()) as $param => $value) {
- $str .= " -F " . escapeshellarg($param) . "=" . escapeshellarg($value) . " \\\n";
- }
- $str .= " " . escapeshellarg(WIKI_BASE_URL . $test->getPagePath()); // beginning space matters.
- $str .= "\n";
- saveFile($str, $filename);
- chmod($filename, 0755); // make executable
+function saveTestAsCurl( pageTest $test, $filename ) {
+ $str = "#!/bin/bash\n"
+ . "curl --silent --include --globoff \\\n"
+ . ( $test->getCookie() ? " --cookie " . escapeshellarg( $test->getCookie() ) . " \\\n" : "" );
+ foreach ( escapeForCurl( $test->getParams() ) as $param => $value ) {
+ $str .= " -F " . escapeshellarg( $param ) . "=" . escapeshellarg( $value ) . " \\\n";
+ }
+ $str .= " " . escapeshellarg( WIKI_BASE_URL . $test->getPagePath() ); // beginning space matters.
+ $str .= "\n";
+ saveFile( $str, $filename );
+ chmod( $filename, 0755 ); // make executable
}
/**
** Saves the internal data structure to file.
*/
-function saveTestData (pageTest $test, $filename) {
- saveFile(serialize($test), $filename);
+function saveTestData ( pageTest $test, $filename ) {
+ saveFile( serialize( $test ), $filename );
}
/**
** saves a test in the various formats.
*/
-function saveTest(pageTest $test, $testname) {
- $base_name = DIRECTORY . "/" . $testname;
- saveTestAsText($test, $base_name . INFO_FILE);
- saveTestAsPHP ($test, $base_name . PHP_TEST );
- saveTestAsCurl($test, $base_name . CURL_TEST);
- saveTestData ($test, $base_name . DATA_FILE);
+function saveTest( pageTest $test, $testname ) {
+ $base_name = DIRECTORY . "/" . $testname;
+ saveTestAsText( $test, $base_name . INFO_FILE );
+ saveTestAsPHP ( $test, $base_name . PHP_TEST );
+ saveTestAsCurl( $test, $base_name . CURL_TEST );
+ saveTestData ( $test, $base_name . DATA_FILE );
}
-//////////////////// MEDIAWIKI OUTPUT /////////////////////////
+// ////////////////// MEDIAWIKI OUTPUT /////////////////////////
/**
** Asks MediaWiki for the HTML output of a test.
*/
-function wikiTestOutput(pageTest $test) {
+function wikiTestOutput( pageTest $test ) {
- $ch = curl_init();
+ $ch = curl_init();
- // specify the cookie, if required.
- if ($test->getCookie()) curl_setopt($ch, CURLOPT_COOKIE, $test->getCookie());
- curl_setopt($ch, CURLOPT_POST, 1); // save form using a POST
+ // specify the cookie, if required.
+ if ( $test->getCookie() ) curl_setopt( $ch, CURLOPT_COOKIE, $test->getCookie() );
+ curl_setopt( $ch, CURLOPT_POST, 1 ); // save form using a POST
- $params = escapeForCurl($test->getParams());
- curl_setopt($ch, CURLOPT_POSTFIELDS, $params ); // load the POST variables
+ $params = escapeForCurl( $test->getParams() );
+ curl_setopt( $ch, CURLOPT_POSTFIELDS, $params ); // load the POST variables
- curl_setopt($ch, CURLOPT_URL, WIKI_BASE_URL . $test->getPagePath() ); // set url to post to
- curl_setopt($ch, CURLOPT_RETURNTRANSFER,1); // return into a variable
+ curl_setopt( $ch, CURLOPT_URL, WIKI_BASE_URL . $test->getPagePath() ); // set url to post to
+ curl_setopt( $ch, CURLOPT_RETURNTRANSFER, 1 ); // return into a variable
- $result=curl_exec ($ch);
+ $result = curl_exec ( $ch );
- // if we encountered an error, then say so, and return an empty string.
- if (curl_error($ch)) {
- print "\nCurl error #: " . curl_errno($ch) . " - " . curl_error ($ch);
- $result = "";
- }
+ // if we encountered an error, then say so, and return an empty string.
+ if ( curl_error( $ch ) ) {
+ print "\nCurl error #: " . curl_errno( $ch ) . " - " . curl_error ( $ch );
+ $result = "";
+ }
- curl_close ($ch);
+ curl_close ( $ch );
- return $result;
+ return $result;
}
-//////////////////// HTML VALIDATION /////////////////////////
+// ////////////////// HTML VALIDATION /////////////////////////
/*
** Asks the validator whether this is valid HTML, or not.
*/
-function validateHTML($text) {
+function validateHTML( $text ) {
- $params = array ("fragment" => $text);
+ $params = array ( "fragment" => $text );
- $ch = curl_init();
+ $ch = curl_init();
- curl_setopt($ch, CURLOPT_POST, 1); // save form using a POST
- curl_setopt($ch, CURLOPT_POSTFIELDS, $params); // load the POST variables
- curl_setopt($ch, CURLOPT_URL, VALIDATOR_URL); // set url to post to
- curl_setopt($ch, CURLOPT_RETURNTRANSFER,1); // return into a variable
+ curl_setopt( $ch, CURLOPT_POST, 1 ); // save form using a POST
+ curl_setopt( $ch, CURLOPT_POSTFIELDS, $params ); // load the POST variables
+ curl_setopt( $ch, CURLOPT_URL, VALIDATOR_URL ); // set url to post to
+ curl_setopt( $ch, CURLOPT_RETURNTRANSFER, 1 ); // return into a variable
- $result=curl_exec ($ch);
+ $result = curl_exec ( $ch );
- // if we encountered an error, then log it, and exit.
- if (curl_error($ch)) {
- trigger_error("Curl error #: " . curl_errno($ch) . " - " . curl_error ($ch) );
- print "Curl error #: " . curl_errno($ch) . " - " . curl_error ($ch) . " - exiting.\n";
- exit(1);
- }
+ // if we encountered an error, then log it, and exit.
+ if ( curl_error( $ch ) ) {
+ trigger_error( "Curl error #: " . curl_errno( $ch ) . " - " . curl_error ( $ch ) );
+ print "Curl error #: " . curl_errno( $ch ) . " - " . curl_error ( $ch ) . " - exiting.\n";
+ exit( 1 );
+ }
- curl_close ($ch);
+ curl_close ( $ch );
- $valid = (strpos($result, "Failed validation") === false ? true : false);
+ $valid = ( strpos( $result, "Failed validation" ) === false ? true : false );
- return array($valid, $result);
+ return array( $valid, $result );
}
/**
** Get tidy to check for no HTML errors in the output file (e.g. unescaped strings).
*/
-function tidyCheckFile($name) {
- $file = DIRECTORY . "/" . $name;
- $command = PATH_TO_TIDY . " -output /tmp/out.html -quiet $file 2>&1";
- $x = `$command`;
+function tidyCheckFile( $name ) {
+ $file = DIRECTORY . "/" . $name;
+ $command = PATH_TO_TIDY . " -output /tmp/out.html -quiet $file 2>&1";
+ $x = `$command`;
- // Look for the most interesting Tidy errors and warnings.
- if ( strpos($x,"end of file while parsing attributes") !== false
- || strpos($x,"attribute with missing trailing quote mark") !== false
- || strpos($x,"missing '>' for end of tag") !== false
- || strpos($x,"Error:") !== false) {
- print "\nTidy found something - view details with: $command";
- return false;
- } else {
- return true;
- }
+ // Look for the most interesting Tidy errors and warnings.
+ if ( strpos( $x, "end of file while parsing attributes" ) !== false
+ || strpos( $x, "attribute with missing trailing quote mark" ) !== false
+ || strpos( $x, "missing '>' for end of tag" ) !== false
+ || strpos( $x, "Error:" ) !== false ) {
+ print "\nTidy found something - view details with: $command";
+ return false;
+ } else {
+ return true;
+ }
}
@@ -2484,267 +2485,267 @@ function tidyCheckFile($name) {
** the last time this was run. This is used to tell if a test caused a DB error.
*/
function dbErrorLogged() {
- static $filesize;
+ static $filesize;
- // first time running this function
- if (!isset($filesize)) {
- // create log if it does not exist
- if (!file_exists(DB_ERROR_LOG_FILE)) {
- saveFile("", DB_ERROR_LOG_FILE);
- }
- $filesize = filesize(DB_ERROR_LOG_FILE);
- return false;
- }
+ // first time running this function
+ if ( !isset( $filesize ) ) {
+ // create log if it does not exist
+ if ( !file_exists( DB_ERROR_LOG_FILE ) ) {
+ saveFile( "", DB_ERROR_LOG_FILE );
+ }
+ $filesize = filesize( DB_ERROR_LOG_FILE );
+ return false;
+ }
- $newsize = filesize(DB_ERROR_LOG_FILE);
- // if the log has grown, then assume the current test caused it.
- if ($newsize != $filesize) {
- $filesize = $newsize;
- return true;
- }
+ $newsize = filesize( DB_ERROR_LOG_FILE );
+ // if the log has grown, then assume the current test caused it.
+ if ( $newsize != $filesize ) {
+ $filesize = $newsize;
+ return true;
+ }
- return false;
+ return false;
}
-////////////////// TOP-LEVEL PROBLEM-FINDING FUNCTION ////////////////////////
+// //////////////// TOP-LEVEL PROBLEM-FINDING FUNCTION ////////////////////////
/**
** takes a page test, and runs it and tests it for problems in the output.
** Returns: False on finding a problem, or True on no problems being found.
*/
-function runWikiTest(pageTest $test, &$testname, $can_overwrite = false) {
-
- // by default don't overwrite a previous test of the same name.
- while ( ! $can_overwrite && file_exists(DIRECTORY . "/" . $testname . DATA_FILE)) {
- $testname .= "-" . mt_rand(0,9);
- }
-
- $filename = DIRECTORY . "/" . $testname . DATA_FILE;
-
- // Store the time before and after, to find slow pages.
- $before = microtime(true);
-
- // Get MediaWiki to give us the output of this test.
- $wiki_preview = wikiTestOutput($test);
-
- $after = microtime(true);
-
- // if we received no response, then that's interesting.
- if ($wiki_preview == "") {
- print "\nNo response received for: $filename";
- return false;
- }
-
- // save output HTML to file.
- $html_file = DIRECTORY . "/" . $testname . HTML_FILE;
- saveFile($wiki_preview, $html_file);
-
- // if there were PHP errors in the output, then that's interesting too.
- if ( strpos($wiki_preview, "<b>Warning</b>: " ) !== false
- || strpos($wiki_preview, "<b>Fatal error</b>: " ) !== false
- || strpos($wiki_preview, "<b>Notice</b>: " ) !== false
- || strpos($wiki_preview, "<b>Error</b>: " ) !== false
- || strpos($wiki_preview, "<b>Strict Standards:</b>") !== false
- ) {
- $error = substr($wiki_preview, strpos($wiki_preview, "</b>:") + 7, 50);
- // Avoid probable PHP bug with bad session ids; http://bugs.php.net/bug.php?id=38224
- if ($error != "Unknown: The session id contains illegal character") {
- print "\nPHP error/warning/notice in HTML output: $html_file ; $error";
- return false;
- }
- }
-
- // if there was a MediaWiki Backtrace message in the output, then that's also interesting.
- if( strpos($wiki_preview, "Backtrace:") !== false ) {
- print "\nInternal MediaWiki error in HTML output: $html_file";
- return false;
- }
-
- // if there was a Parser error comment in the output, then that's potentially interesting.
- if( strpos($wiki_preview, "!-- ERR") !== false ) {
- print "\nParser Error comment in HTML output: $html_file";
- return false;
- }
-
- // if a database error was logged, then that's definitely interesting.
- if( dbErrorLogged() ) {
- print "\nDatabase Error logged for: $filename";
- return false;
- }
-
- // validate result
- $valid = true;
- if( VALIDATE_ON_WEB ) {
- list ($valid, $validator_output) = validateHTML($wiki_preview);
- if (!$valid) print "\nW3C web validation failed - view details with: html2text " . DIRECTORY . "/" . $testname . ".validator_output.html";
- }
-
- // Get tidy to check the page, unless we already know it produces non-XHTML output.
- if( $test->tidyValidate() ) {
- $valid = tidyCheckFile( $testname . HTML_FILE ) && $valid;
- }
-
- // if it took more than 2 seconds to render, then it may be interesting too. (Possible DoS attack?)
- if (($after - $before) >= 2) {
- print "\nParticularly slow to render (" . round($after - $before, 2) . " seconds): $filename";
- return false;
- }
-
- if( $valid ) {
- // Remove temp HTML file if test was valid:
- unlink( $html_file );
- } elseif( VALIDATE_ON_WEB ) {
- saveFile($validator_output, DIRECTORY . "/" . $testname . ".validator_output.html");
- }
-
- return $valid;
-}
-
-
-/////////////////// RERUNNING OLD TESTS ///////////////////
+function runWikiTest( pageTest $test, &$testname, $can_overwrite = false ) {
+
+ // by default don't overwrite a previous test of the same name.
+ while ( ! $can_overwrite && file_exists( DIRECTORY . "/" . $testname . DATA_FILE ) ) {
+ $testname .= "-" . mt_rand( 0, 9 );
+ }
+
+ $filename = DIRECTORY . "/" . $testname . DATA_FILE;
+
+ // Store the time before and after, to find slow pages.
+ $before = microtime( true );
+
+ // Get MediaWiki to give us the output of this test.
+ $wiki_preview = wikiTestOutput( $test );
+
+ $after = microtime( true );
+
+ // if we received no response, then that's interesting.
+ if ( $wiki_preview == "" ) {
+ print "\nNo response received for: $filename";
+ return false;
+ }
+
+ // save output HTML to file.
+ $html_file = DIRECTORY . "/" . $testname . HTML_FILE;
+ saveFile( $wiki_preview, $html_file );
+
+ // if there were PHP errors in the output, then that's interesting too.
+ if ( strpos( $wiki_preview, "<b>Warning</b>: " ) !== false
+ || strpos( $wiki_preview, "<b>Fatal error</b>: " ) !== false
+ || strpos( $wiki_preview, "<b>Notice</b>: " ) !== false
+ || strpos( $wiki_preview, "<b>Error</b>: " ) !== false
+ || strpos( $wiki_preview, "<b>Strict Standards:</b>" ) !== false
+ ) {
+ $error = substr( $wiki_preview, strpos( $wiki_preview, "</b>:" ) + 7, 50 );
+ // Avoid probable PHP bug with bad session ids; http://bugs.php.net/bug.php?id=38224
+ if ( $error != "Unknown: The session id contains illegal character" ) {
+ print "\nPHP error/warning/notice in HTML output: $html_file ; $error";
+ return false;
+ }
+ }
+
+ // if there was a MediaWiki Backtrace message in the output, then that's also interesting.
+ if ( strpos( $wiki_preview, "Backtrace:" ) !== false ) {
+ print "\nInternal MediaWiki error in HTML output: $html_file";
+ return false;
+ }
+
+ // if there was a Parser error comment in the output, then that's potentially interesting.
+ if ( strpos( $wiki_preview, "!-- ERR" ) !== false ) {
+ print "\nParser Error comment in HTML output: $html_file";
+ return false;
+ }
+
+ // if a database error was logged, then that's definitely interesting.
+ if ( dbErrorLogged() ) {
+ print "\nDatabase Error logged for: $filename";
+ return false;
+ }
+
+ // validate result
+ $valid = true;
+ if ( VALIDATE_ON_WEB ) {
+ list ( $valid, $validator_output ) = validateHTML( $wiki_preview );
+ if ( !$valid ) print "\nW3C web validation failed - view details with: html2text " . DIRECTORY . "/" . $testname . ".validator_output.html";
+ }
+
+ // Get tidy to check the page, unless we already know it produces non-XHTML output.
+ if ( $test->tidyValidate() ) {
+ $valid = tidyCheckFile( $testname . HTML_FILE ) && $valid;
+ }
+
+ // if it took more than 2 seconds to render, then it may be interesting too. (Possible DoS attack?)
+ if ( ( $after - $before ) >= 2 ) {
+ print "\nParticularly slow to render (" . round( $after - $before, 2 ) . " seconds): $filename";
+ return false;
+ }
+
+ if ( $valid ) {
+ // Remove temp HTML file if test was valid:
+ unlink( $html_file );
+ } elseif ( VALIDATE_ON_WEB ) {
+ saveFile( $validator_output, DIRECTORY . "/" . $testname . ".validator_output.html" );
+ }
+
+ return $valid;
+}
+
+
+// ///////////////// RERUNNING OLD TESTS ///////////////////
/**
** We keep our failed tests so that they can be rerun.
** This function does that retesting.
*/
function rerunPreviousTests() {
- print "Retesting previously found problems.\n";
+ print "Retesting previously found problems.\n";
- $dir_contents = scandir (DIRECTORY);
+ $dir_contents = scandir ( DIRECTORY );
- // sort file into the order a normal person would use.
- natsort ($dir_contents);
+ // sort file into the order a normal person would use.
+ natsort ( $dir_contents );
- foreach ($dir_contents as $file) {
+ foreach ( $dir_contents as $file ) {
- // if file is not a test, then skip it.
- // Note we need to escape any periods or will be treated as "any character".
- $matches = array();
- if (!ereg("(.*)" . str_replace(".", "\.", DATA_FILE) . "$", $file, $matches)) continue;
+ // if file is not a test, then skip it.
+ // Note we need to escape any periods or will be treated as "any character".
+ $matches = array();
+ if ( !preg_match( "/(.*)" . str_replace( ".", "\.", DATA_FILE ) . "$/", $file, $matches ) ) continue;
- // reload the test.
- $full_path = DIRECTORY . "/" . $file;
- $test = unserialize(file_get_contents($full_path));
+ // reload the test.
+ $full_path = DIRECTORY . "/" . $file;
+ $test = unserialize( file_get_contents( $full_path ) );
- // if this is not a valid test, then skip it.
- if (! $test instanceof pageTest) {
- print "\nSkipping invalid test - $full_path";
- continue;
- }
+ // if this is not a valid test, then skip it.
+ if ( ! $test instanceof pageTest ) {
+ print "\nSkipping invalid test - $full_path";
+ continue;
+ }
- // The date format is in Apache log format, which makes it easier to locate
- // which retest caused which error in the Apache logs (only happens usually if
- // apache segfaults).
- if (!QUIET) print "[" . date ("D M d H:i:s Y") . "] Retesting $file (" . get_class($test) . ")";
+ // The date format is in Apache log format, which makes it easier to locate
+ // which retest caused which error in the Apache logs (only happens usually if
+ // apache segfaults).
+ if ( !QUIET ) print "[" . date ( "D M d H:i:s Y" ) . "] Retesting $file (" . get_class( $test ) . ")";
- // run test
- $testname = $matches[1];
- $valid = runWikiTest($test, $testname, true);
+ // run test
+ $testname = $matches[1];
+ $valid = runWikiTest( $test, $testname, true );
- if (!$valid) {
- saveTest($test, $testname);
- if (QUIET) {
- print "\nTest: " . get_class($test) . " ; Testname: $testname\n------";
- } else {
- print "\n";
- }
- }
- else {
- if (!QUIET) print "\r";
- if (DELETE_PASSED_RETESTS) {
- $prefix = DIRECTORY . "/" . $testname;
- if (is_file($prefix . DATA_FILE)) unlink($prefix . DATA_FILE);
- if (is_file($prefix . PHP_TEST )) unlink($prefix . PHP_TEST );
- if (is_file($prefix . CURL_TEST)) unlink($prefix . CURL_TEST);
- if (is_file($prefix . INFO_FILE)) unlink($prefix . INFO_FILE);
- }
- }
- }
+ if ( !$valid ) {
+ saveTest( $test, $testname );
+ if ( QUIET ) {
+ print "\nTest: " . get_class( $test ) . " ; Testname: $testname\n------";
+ } else {
+ print "\n";
+ }
+ }
+ else {
+ if ( !QUIET ) print "\r";
+ if ( DELETE_PASSED_RETESTS ) {
+ $prefix = DIRECTORY . "/" . $testname;
+ if ( is_file( $prefix . DATA_FILE ) ) unlink( $prefix . DATA_FILE );
+ if ( is_file( $prefix . PHP_TEST ) ) unlink( $prefix . PHP_TEST );
+ if ( is_file( $prefix . CURL_TEST ) ) unlink( $prefix . CURL_TEST );
+ if ( is_file( $prefix . INFO_FILE ) ) unlink( $prefix . INFO_FILE );
+ }
+ }
+ }
- print "\nDone retesting.\n";
+ print "\nDone retesting.\n";
}
-////////////////////// MAIN LOOP ////////////////////////
+// //////////////////// MAIN LOOP ////////////////////////
// first check whether CURL is installed, because sometimes it's not.
-if( ! function_exists('curl_init') ) {
- die("Could not find 'curl_init' function. Is the curl extension compiled into PHP?\n");
+if ( ! function_exists( 'curl_init' ) ) {
+ die( "Could not find 'curl_init' function. Is the curl extension compiled into PHP?\n" );
}
-// Initialization of types. wikiFuzz doesn't have a constructor because we want to
+// Initialization of types. wikiFuzz doesn't have a constructor because we want to
// access it staticly and not have any globals.
-wikiFuzz::$types = array_keys(wikiFuzz::$data);
+wikiFuzz::$types = array_keys( wikiFuzz::$data );
// Make directory if doesn't exist
-if (!is_dir(DIRECTORY)) {
- mkdir (DIRECTORY, 0700 );
+if ( !is_dir( DIRECTORY ) ) {
+ mkdir ( DIRECTORY, 0700 );
}
// otherwise, we first retest the things that we have found in previous runs
-else if (RERUN_OLD_TESTS) {
- rerunPreviousTests();
+else if ( RERUN_OLD_TESTS ) {
+ rerunPreviousTests();
}
// main loop.
-$start_time = date("U");
+$start_time = date( "U" );
$num_errors = 0;
-if (!QUIET) {
- print "Beginning main loop. Results are stored in the " . DIRECTORY . " directory.\n";
- print "Press CTRL+C to stop testing.\n";
-}
-
-for ($count=0; true; $count++) {
- if (!QUIET) {
- // spinning progress indicator.
- switch( $count % 4 ) {
- case '0': print "\r/"; break;
- case '1': print "\r-"; break;
- case '2': print "\r\\"; break;
- case '3': print "\r|"; break;
- }
- print " $count";
- }
-
- // generate a page test to run.
- $test = selectPageTest($count);
-
- $mins = ( date("U") - $start_time ) / 60;
- if (!QUIET && $mins > 0) {
- print ". $num_errors poss errors. "
- . floor($mins) . " mins. "
- . round ($count / $mins, 0) . " tests/min. "
- . get_class($test); // includes the current test name.
- }
-
- // run this test against MediaWiki, and see if the output was valid.
- $testname = $count;
- $valid = runWikiTest($test, $testname, false);
-
- // save the failed test
- if ( ! $valid ) {
- if (QUIET) {
- print "\nTest: " . get_class($test) . " ; Testname: $testname\n------";
- } else {
- print "\n";
- }
- saveTest($test, $testname);
- $num_errors += 1;
- } else if ( KEEP_PASSED_TESTS ) {
- // print current time, with microseconds (matches "strace" format), and the test name.
- print " " . date("H:i:s.") . substr(current(explode(" ", microtime())), 2) . " " . $testname;
- saveTest($test, $testname);
- }
-
- // stop if we have reached max number of errors.
- if (defined("MAX_ERRORS") && $num_errors>=MAX_ERRORS) {
- break;
- }
-
- // stop if we have reached max number of mins runtime.
- if (defined("MAX_RUNTIME") && $mins>=MAX_RUNTIME) {
- break;
- }
+if ( !QUIET ) {
+ print "Beginning main loop. Results are stored in the " . DIRECTORY . " directory.\n";
+ print "Press CTRL+C to stop testing.\n";
+}
+
+for ( $count = 0; true; $count++ ) {
+ if ( !QUIET ) {
+ // spinning progress indicator.
+ switch( $count % 4 ) {
+ case '0': print "\r/"; break;
+ case '1': print "\r-"; break;
+ case '2': print "\r\\"; break;
+ case '3': print "\r|"; break;
+ }
+ print " $count";
+ }
+
+ // generate a page test to run.
+ $test = selectPageTest( $count );
+
+ $mins = ( date( "U" ) - $start_time ) / 60;
+ if ( !QUIET && $mins > 0 ) {
+ print ". $num_errors poss errors. "
+ . floor( $mins ) . " mins. "
+ . round ( $count / $mins, 0 ) . " tests/min. "
+ . get_class( $test ); // includes the current test name.
+ }
+
+ // run this test against MediaWiki, and see if the output was valid.
+ $testname = $count;
+ $valid = runWikiTest( $test, $testname, false );
+
+ // save the failed test
+ if ( ! $valid ) {
+ if ( QUIET ) {
+ print "\nTest: " . get_class( $test ) . " ; Testname: $testname\n------";
+ } else {
+ print "\n";
+ }
+ saveTest( $test, $testname );
+ $num_errors += 1;
+ } else if ( KEEP_PASSED_TESTS ) {
+ // print current time, with microseconds (matches "strace" format), and the test name.
+ print " " . date( "H:i:s." ) . substr( current( explode( " ", microtime() ) ), 2 ) . " " . $testname;
+ saveTest( $test, $testname );
+ }
+
+ // stop if we have reached max number of errors.
+ if ( defined( "MAX_ERRORS" ) && $num_errors >= MAX_ERRORS ) {
+ break;
+ }
+
+ // stop if we have reached max number of mins runtime.
+ if ( defined( "MAX_RUNTIME" ) && $mins >= MAX_RUNTIME ) {
+ break;
+ }
}
diff --git a/maintenance/gearman/gearman.inc b/maintenance/gearman/gearman.inc
index 514b9bac..15f80e62 100644
--- a/maintenance/gearman/gearman.inc
+++ b/maintenance/gearman/gearman.inc
@@ -72,7 +72,7 @@ class NonScaryGearmanWorker extends Net_Gearman_Worker {
if (isset($resp['data']['arg']) &&
Net_Gearman_Connection::stringLength($resp['data']['arg'])) {
$arg = json_decode($resp['data']['arg'], true);
- }
+ }
### START MW DIFFERENT BIT
if ( $name != 'mw_job' ) {
diff --git a/maintenance/gearman/gearmanRefreshLinks.php b/maintenance/gearman/gearmanRefreshLinks.php
index eb3104eb..730db96b 100644
--- a/maintenance/gearman/gearmanRefreshLinks.php
+++ b/maintenance/gearman/gearmanRefreshLinks.php
@@ -2,8 +2,8 @@
$optionsWithArgs = array( 'fake-job' );
-require( dirname(__FILE__).'/../commandLine.inc' );
-require( dirname(__FILE__).'/gearman.inc' );
+require( dirname( __FILE__ ) . '/../commandLine.inc' );
+require( dirname( __FILE__ ) . '/gearman.inc' );
if ( !$args ) {
$args = array( 'localhost' );
@@ -15,12 +15,12 @@ $dbr = wfGetDB( DB_SLAVE );
$startId = 0;
$endId = $dbr->selectField( 'page', 'MAX(page_id)', false, __METHOD__ );
while ( true ) {
- $res = $dbr->select(
- 'page',
+ $res = $dbr->select(
+ 'page',
array( 'page_namespace', 'page_title', 'page_id' ),
- array( 'page_id > ' . intval( $startId ) ),
+ array( 'page_id > ' . intval( $startId ) ),
__METHOD__,
- array( 'LIMIT' => $batchSize )
+ array( 'LIMIT' => $batchSize )
);
if ( $res->numRows() == 0 ) {
diff --git a/maintenance/gearman/gearmanWorker.php b/maintenance/gearman/gearmanWorker.php
index d6f3949f..aea126a7 100644
--- a/maintenance/gearman/gearmanWorker.php
+++ b/maintenance/gearman/gearmanWorker.php
@@ -1,10 +1,10 @@
<?php
$optionsWithArgs = array( 'fake-job', 'procs' );
-require( dirname(__FILE__).'/../commandLine.inc' );
-require( dirname(__FILE__).'/gearman.inc' );
+require( dirname( __FILE__ ) . '/../commandLine.inc' );
+require( dirname( __FILE__ ) . '/gearman.inc' );
-ini_set('memory_limit', '150M' );
+ini_set( 'memory_limit', '150M' );
if ( isset( $options['procs'] ) ) {
$procs = $options['procs'];
diff --git a/maintenance/generateSitemap.php b/maintenance/generateSitemap.php
index 04dbbc4d..e483f7c9 100644
--- a/maintenance/generateSitemap.php
+++ b/maintenance/generateSitemap.php
@@ -4,6 +4,9 @@ define( 'GS_TALK', -1 );
/**
* Creates a sitemap for the site
*
+ * Copyright © 2005, Ævar Arnfjörð Bjarmason, Jens Frank <jeluf@gmx.de> and
+ * Brion Vibber <brion@pobox.com>
+ *
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
@@ -19,19 +22,13 @@ define( 'GS_TALK', -1 );
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
+ * @file
* @ingroup Maintenance
- *
- * @copyright Copyright © 2005, Ævar Arnfjörð Bjarmason
- * @copyright Copyright © 2005, Jens Frank <jeluf@gmx.de>
- * @copyright Copyright © 2005, Brion Vibber <brion@pobox.com>
- *
* @see http://www.sitemaps.org/
* @see http://www.sitemaps.org/schemas/sitemap/0.9/sitemap.xsd
- *
- * @license http://www.gnu.org/copyleft/gpl.html GNU General Public License 2.0 or later
*/
-require_once( dirname(__FILE__) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class GenerateSitemap extends Maintenance {
/**
@@ -60,11 +57,11 @@ class GenerateSitemap extends Maintenance {
var $fspath;
/**
- * The path to append to the domain name
+ * The URL path to prepend to filenames in the index; should resolve to the same directory as $fspath
*
* @var string
*/
- var $path;
+ var $urlpath;
/**
* Whether or not to use compression
@@ -129,8 +126,8 @@ class GenerateSitemap extends Maintenance {
public function __construct() {
parent::__construct();
$this->mDescription = "Creates a sitemap for the site";
- $this->addOption( 'fspath', 'The file system path to save to, e.g. /tmp/sitemap' .
- "\n\t\tdefaults to current directory", false, true );
+ $this->addOption( 'fspath', 'The file system path to save to, e.g. /tmp/sitemap; defaults to current directory', false, true );
+ $this->addOption( 'urlpath', 'The URL path corresponding to --fspath, prepended to filenames in the index; defaults to an empty string', false, true );
$this->addOption( 'compress', 'Compress the sitemap files, can take value yes|no, default yes', false, true );
}
@@ -138,11 +135,14 @@ class GenerateSitemap extends Maintenance {
* Execute
*/
public function execute() {
- global $wgScriptPath;
$this->setNamespacePriorities();
$this->url_limit = 50000;
$this->size_limit = pow( 2, 20 ) * 10;
$this->fspath = self::init_path( $this->getOption( 'fspath', getcwd() ) );
+ $this->urlpath = $this->getOption( 'urlpath', "" );
+ if ( $this->urlpath !== "" && substr( $this->urlpath, -1 ) !== '/' ) {
+ $this->urlpath .= '/';
+ }
$this->compress = $this->getOption( 'compress', 'yes' ) !== 'no';
$this->dbr = wfGetDB( DB_SLAVE );
$this->generateNamespaces();
@@ -179,15 +179,15 @@ class GenerateSitemap extends Maintenance {
* Create directory if it does not exist and return pathname with a trailing slash
*/
private static function init_path( $fspath ) {
- if( !isset( $fspath ) ) {
+ if ( !isset( $fspath ) ) {
return null;
}
# Create directory if needed
- if( $fspath && !is_dir( $fspath ) ) {
- wfMkdirParents( $fspath ) or die("Can not create directory $fspath.\n");
+ if ( $fspath && !is_dir( $fspath ) ) {
+ wfMkdirParents( $fspath ) or die( "Can not create directory $fspath.\n" );
}
- return realpath( $fspath ). DIRECTORY_SEPARATOR ;
+ return realpath( $fspath ) . DIRECTORY_SEPARATOR ;
}
/**
@@ -196,7 +196,7 @@ class GenerateSitemap extends Maintenance {
function generateNamespaces() {
// Only generate for specific namespaces if $wgSitemapNamespaces is an array.
global $wgSitemapNamespaces;
- if( is_array( $wgSitemapNamespaces ) ) {
+ if ( is_array( $wgSitemapNamespaces ) ) {
$this->namespaces = $wgSitemapNamespaces;
return;
}
@@ -218,11 +218,9 @@ class GenerateSitemap extends Maintenance {
/**
* Get the priority of a given namespace
*
- * @param int $namespace The namespace to get the priority for
- +
- * @return string
+ * @param $namespace Integer: the namespace to get the priority for
+ * @return String
*/
-
function priority( $namespace ) {
return isset( $this->priorities[$namespace] ) ? $this->priorities[$namespace] : $this->guessPriority( $namespace );
}
@@ -232,9 +230,8 @@ class GenerateSitemap extends Maintenance {
* default priority for the namespace, varies depending on whether it's
* a talkpage or not.
*
- * @param int $namespace The namespace to get the priority for
- *
- * @return string
+ * @param $namespace Integer: the namespace to get the priority for
+ * @return String
*/
function guessPriority( $namespace ) {
return MWNamespace::isMain( $namespace ) ? $this->priorities[GS_MAIN] : $this->priorities[GS_TALK];
@@ -243,9 +240,8 @@ class GenerateSitemap extends Maintenance {
/**
* Return a database resolution of all the pages in a given namespace
*
- * @param int $namespace Limit the query to this namespace
- *
- * @return resource
+ * @param $namespace Integer: limit the query to this namespace
+ * @return Resource
*/
function getPageRes( $namespace ) {
return $this->dbr->select( 'page',
@@ -261,10 +257,8 @@ class GenerateSitemap extends Maintenance {
/**
* Main loop
- *
- * @access public
*/
- function main() {
+ public function main() {
global $wgContLang;
fwrite( $this->findex, $this->openIndex() );
@@ -298,11 +292,11 @@ class GenerateSitemap extends Maintenance {
$length += strlen( $entry );
$this->write( $this->file, $entry );
// generate pages for language variants
- if($wgContLang->hasVariants()){
+ if ( $wgContLang->hasVariants() ) {
$variants = $wgContLang->getVariants();
- foreach($variants as $vCode){
- if($vCode==$wgContLang->getCode()) continue; // we don't want default variant
- $entry = $this->fileEntry( $title->getFullURL('',$vCode), $date, $this->priority( $namespace ) );
+ foreach ( $variants as $vCode ) {
+ if ( $vCode == $wgContLang->getCode() ) continue; // we don't want default variant
+ $entry = $this->fileEntry( $title->