summaryrefslogtreecommitdiff
path: root/maintenance
diff options
context:
space:
mode:
authorPierre Schmitz <pierre@archlinux.de>2012-05-03 13:01:35 +0200
committerPierre Schmitz <pierre@archlinux.de>2012-05-03 13:01:35 +0200
commitd9022f63880ce039446fba8364f68e656b7bf4cb (patch)
tree16b40fbf17bf7c9ee6f4ead25b16dd192378050a /maintenance
parent27cf83d177256813e2e802241085fce5dd0f3fb9 (diff)
Update to MediaWiki 1.19.0
Diffstat (limited to 'maintenance')
-rw-r--r--maintenance/Doxyfile180
-rw-r--r--maintenance/Maintenance.php102
-rw-r--r--maintenance/Makefile7
-rw-r--r--maintenance/Site.php19
-rw-r--r--maintenance/addwiki.php479
-rw-r--r--maintenance/archives/patch-ar_sha1.sql3
-rw-r--r--maintenance/archives/patch-drop-user_options.sql1
-rw-r--r--maintenance/archives/patch-jobs-add-timestamp.sql2
-rw-r--r--maintenance/archives/patch-logging-type-action-index.sql1
-rw-r--r--maintenance/archives/patch-page_redirect_namespace_len.sql6
-rw-r--r--maintenance/archives/patch-rev_sha1.sql3
-rw-r--r--maintenance/archives/patch-trackbacks.sql10
-rw-r--r--maintenance/archives/patch-ufg_group-length-increase.sql2
-rw-r--r--maintenance/archives/patch-ug_group-length-increase.sql2
-rw-r--r--maintenance/archives/patch-uploadstash_chunk.sql3
-rw-r--r--maintenance/archives/patch-user_former_groups.sql2
-rw-r--r--maintenance/attachLatest.php4
-rw-r--r--maintenance/backup.inc14
-rw-r--r--maintenance/backupPrefetch.inc5
-rw-r--r--maintenance/benchmarks/Benchmarker.php3
-rw-r--r--maintenance/benchmarks/bench_HTTP_HTTPS.php4
-rw-r--r--maintenance/benchmarks/bench_delete_truncate.php4
-rw-r--r--maintenance/benchmarks/bench_if_switch.php4
-rw-r--r--maintenance/benchmarks/bench_strtr_str_replace.php4
-rw-r--r--maintenance/benchmarks/bench_wfIsWindows.php4
-rw-r--r--maintenance/benchmarks/benchmarkHooks.php80
-rw-r--r--maintenance/benchmarks/benchmarkPurge.php6
-rw-r--r--maintenance/changePassword.php2
-rw-r--r--maintenance/checkImages.php4
-rw-r--r--maintenance/checkSyntax.php10
-rw-r--r--maintenance/cleanupCaps.php13
-rw-r--r--maintenance/cleanupImages.php17
-rw-r--r--maintenance/cleanupSpam.php28
-rw-r--r--maintenance/cleanupTable.inc2
-rw-r--r--maintenance/cleanupTitles.php3
-rw-r--r--maintenance/cleanupUploadStash.php34
-rw-r--r--maintenance/clear_stats.php3
-rw-r--r--maintenance/commandLine.inc12
-rw-r--r--maintenance/convertUserOptions.php33
-rw-r--r--maintenance/deleteBatch.php39
-rw-r--r--maintenance/deleteDefaultMessages.php18
-rw-r--r--maintenance/dev/README7
-rw-r--r--maintenance/dev/includes/php.sh12
-rw-r--r--maintenance/dev/includes/require-php.sh8
-rw-r--r--maintenance/dev/includes/router.php82
-rw-r--r--maintenance/dev/install.sh8
-rw-r--r--maintenance/dev/installmw.sh18
-rw-r--r--maintenance/dev/installphp.sh57
-rw-r--r--maintenance/dev/start.sh14
-rw-r--r--maintenance/doMaintenance.php22
-rw-r--r--maintenance/dtrace/counts.d23
-rw-r--r--maintenance/dtrace/tree.d26
-rw-r--r--maintenance/dumpBackup.php2
-rw-r--r--maintenance/dumpHTML.php7
-rw-r--r--maintenance/dumpInterwiki.php251
-rw-r--r--maintenance/dumpLinks.php2
-rw-r--r--maintenance/dumpTextPass.php52
-rw-r--r--maintenance/edit.php4
-rw-r--r--maintenance/eval.php13
-rw-r--r--maintenance/fetchText.php2
-rw-r--r--maintenance/findHooks.php34
-rw-r--r--maintenance/fixExtLinksProtocolRelative.php4
-rw-r--r--maintenance/formatInstallDoc.php3
-rw-r--r--maintenance/fuzz-tester.php165
-rw-r--r--maintenance/gearman/gearman.inc104
-rw-r--r--maintenance/gearman/gearmanRefreshLinks.php45
-rw-r--r--maintenance/gearman/gearmanWorker.php43
-rw-r--r--maintenance/generateSitemap.php31
-rw-r--r--maintenance/getSlaveServer.php14
-rw-r--r--maintenance/hiphop/extra-files1
-rw-r--r--maintenance/hiphop/make3
-rw-r--r--maintenance/ibm_db2/foreignkeys.sql5
-rw-r--r--maintenance/ibm_db2/tables.sql984
-rw-r--r--maintenance/importDump.php32
-rw-r--r--maintenance/importImages.php390
-rw-r--r--maintenance/importSiteScripts.php22
-rw-r--r--maintenance/importTextFile.php4
-rw-r--r--maintenance/importUseModWiki.php375
-rw-r--r--maintenance/importUseModWikipedia.php892
-rw-r--r--maintenance/install.php1
-rw-r--r--maintenance/jsparse.php1
-rw-r--r--maintenance/language/StatOutputs.php4
-rw-r--r--maintenance/language/checkLanguage.inc10
-rw-r--r--maintenance/language/function-list.php63
-rw-r--r--maintenance/language/generateCollationData.php23
-rw-r--r--maintenance/language/languages.inc112
-rw-r--r--maintenance/language/messageTypes.inc33
-rw-r--r--maintenance/language/messages.inc280
-rw-r--r--maintenance/language/transstat.php4
-rw-r--r--maintenance/language/writeMessagesArray.inc3
-rw-r--r--maintenance/locking/LockServerDaemon.php617
-rw-r--r--maintenance/locking/file_locks.sql11
-rw-r--r--maintenance/mctest.php5
-rw-r--r--maintenance/mergeMessageFileList.php79
-rw-r--r--maintenance/moveBatch.php3
-rw-r--r--maintenance/mssql/tables.sql10
-rw-r--r--maintenance/mwdocgen.php69
-rw-r--r--maintenance/namespaceDupes.php20
-rw-r--r--maintenance/nextJobDB.php10
-rw-r--r--maintenance/nukeNS.php6
-rw-r--r--maintenance/oracle/archives/patch-ar_sha1_field.sql3
-rw-r--r--maintenance/oracle/archives/patch-config.sql8
-rw-r--r--maintenance/oracle/archives/patch-job_timestamp_field.sql4
-rw-r--r--maintenance/oracle/archives/patch-job_timestamp_index.sql4
-rw-r--r--maintenance/oracle/archives/patch-logging_type_action_index.sql4
-rw-r--r--maintenance/oracle/archives/patch-page_redirect_namespace_len.sql4
-rw-r--r--maintenance/oracle/archives/patch-rev_sha1_field.sql4
-rw-r--r--maintenance/oracle/archives/patch-ug_group-length-increase.sql3
-rw-r--r--maintenance/oracle/archives/patch-us_chunk_inx_field.sql4
-rw-r--r--maintenance/oracle/archives/patch_fk_rename_deferred.sql1
-rw-r--r--maintenance/oracle/archives/patch_rebuild_dupfunc.sql11
-rw-r--r--maintenance/oracle/archives/patch_remove_not_null_empty_defs2.sql3
-rw-r--r--maintenance/oracle/tables.sql75
-rw-r--r--maintenance/orphans.php11
-rw-r--r--maintenance/ourusers.php70
-rw-r--r--maintenance/parse.php110
-rw-r--r--maintenance/populateCategory.php2
-rw-r--r--maintenance/populateImageSha1.php (renamed from maintenance/populateSha1.php)38
-rw-r--r--maintenance/populateLogSearch.php42
-rw-r--r--maintenance/populateLogUsertext.php29
-rw-r--r--maintenance/populateParentId.php39
-rw-r--r--maintenance/populateRevisionLength.php44
-rw-r--r--maintenance/populateRevisionSha1.php185
-rw-r--r--maintenance/postgres/mediawiki_mysql2postgres.pl2
-rw-r--r--maintenance/postgres/tables.sql23
-rw-r--r--maintenance/protect.php25
-rw-r--r--maintenance/proxy_check.php1
-rw-r--r--maintenance/pruneFileCache.php105
-rw-r--r--maintenance/purgeDeletedFiles.php89
-rw-r--r--maintenance/purgeList.php2
-rw-r--r--maintenance/purgeParserCache.php26
-rw-r--r--maintenance/purgeStaleMemcachedText.php4
-rw-r--r--maintenance/reassignEdits.php7
-rw-r--r--maintenance/rebuildFileCache.php67
-rw-r--r--maintenance/rebuildImages.php26
-rw-r--r--maintenance/rebuildInterwiki.php275
-rw-r--r--maintenance/rebuildrecentchanges.php4
-rw-r--r--maintenance/rebuildtextindex.php2
-rw-r--r--maintenance/refreshImageMetadata.php12
-rw-r--r--maintenance/refreshLinks.php15
-rw-r--r--maintenance/removeUnusedAccounts.php2
-rw-r--r--maintenance/renamewiki.php89
-rw-r--r--maintenance/rollbackEdits.php9
-rw-r--r--maintenance/runBatchedQuery.php1
-rw-r--r--maintenance/runJobs.php6
-rw-r--r--maintenance/showJobs.php2
-rw-r--r--maintenance/showStats.php3
-rw-r--r--maintenance/sql.php74
-rw-r--r--maintenance/sqlite.php4
-rw-r--r--maintenance/sqlite/archives/initial-indexes.sql5
-rw-r--r--maintenance/sqlite/archives/patch-drop-user_options.sql31
-rw-r--r--maintenance/sqlite/archives/patch-jobs-add-timestamp.sql2
-rw-r--r--maintenance/sqlite/archives/patch-page_redirect_namespace_len.sql7
-rw-r--r--maintenance/sqlite/archives/patch-ufg_group-length-increase.sql15
-rw-r--r--maintenance/sqlite/archives/patch-ug_group-length-increase.sql15
-rw-r--r--maintenance/stats.php4
-rw-r--r--maintenance/storage/checkStorage.php39
-rw-r--r--maintenance/storage/compressOld.php19
-rw-r--r--maintenance/storage/fixBug20757.php3
-rw-r--r--maintenance/storage/recompressTracked.php7
-rw-r--r--maintenance/tables.sql54
-rw-r--r--maintenance/term/MWTerm.php50
-rw-r--r--maintenance/undelete.php3
-rw-r--r--maintenance/update.php6
-rw-r--r--maintenance/updateCollation.php8
-rw-r--r--maintenance/upgrade1_5.php14
-rw-r--r--maintenance/userOptions.inc34
-rw-r--r--maintenance/userOptions.php2
-rw-r--r--maintenance/wikipedia-interwiki.sql289
-rw-r--r--maintenance/wiktionary-interwiki.sql184
170 files changed, 3875 insertions, 4786 deletions
diff --git a/maintenance/Doxyfile b/maintenance/Doxyfile
index 7d7849c8..b7c1e5e8 100644
--- a/maintenance/Doxyfile
+++ b/maintenance/Doxyfile
@@ -1,4 +1,4 @@
-# Doxyfile 1.5.6
+# Doxyfile 1.7.5.1
#
# Some placeholders have been added for MediaWiki usage:
@@ -8,14 +8,16 @@
# {{SVNSTAT}}
# {{INPUT}}
#
-# A number of MediaWiki-specific aliases are near the end of this file.
# To generate documentation run: php mwdocgen.php --no-extensions
#---------------------------------------------------------------------------
# Project related configuration options
#---------------------------------------------------------------------------
+DOXYFILE_ENCODING = UTF-8
PROJECT_NAME = MediaWiki
PROJECT_NUMBER = {{CURRENT_VERSION}}
+PROJECT_BRIEF =
+PROJECT_LOGO =
OUTPUT_DIRECTORY = {{OUTPUT_DIRECTORY}}
CREATE_SUBDIRS = NO
OUTPUT_LANGUAGE = English
@@ -36,18 +38,47 @@ ALWAYS_DETAILED_SEC = NO
INLINE_INHERITED_MEMB = NO
FULL_PATH_NAMES = YES
STRIP_FROM_PATH = {{STRIP_FROM_PATH}}
-STRIP_FROM_INC_PATH =
+STRIP_FROM_INC_PATH =
SHORT_NAMES = NO
JAVADOC_AUTOBRIEF = YES
+QT_AUTOBRIEF = NO
MULTILINE_CPP_IS_BRIEF = NO
INHERIT_DOCS = YES
SEPARATE_MEMBER_PAGES = NO
TAB_SIZE = 8
+ALIASES = "type{1}=<b> \1 </b>:" \
+ "types{2}=<b> \1 </b> or <b> \2 </b>:" \
+ "types{3}=<b> \1 </b>, <b> \2 </b>, or <b> \3 </b>:" \
+ "arrayof{2}=<b> Array </b> of \2" \
+ "null=\type{Null}" \
+ "boolean=\type{Boolean}" \
+ "bool=\type{Boolean}" \
+ "integer=\type{Integer}" \
+ "int=\type{Integer}" \
+ "string=\type{String}" \
+ "str=\type{String}" \
+ "mixed=\type{Mixed}" \
+ "access=\par Access:\n" \
+ "private=\access private" \
+ "protected=\access protected" \
+ "public=\access public" \
+ "copyright=\note" \
+ "license=\note"
OPTIMIZE_OUTPUT_FOR_C = NO
OPTIMIZE_OUTPUT_JAVA = NO
+OPTIMIZE_FOR_FORTRAN = NO
+OPTIMIZE_OUTPUT_VHDL = NO
+EXTENSION_MAPPING =
BUILTIN_STL_SUPPORT = NO
+CPP_CLI_SUPPORT = NO
+SIP_SUPPORT = NO
+IDL_PROPERTY_SUPPORT = NO
DISTRIBUTE_GROUP_DOC = YES
SUBGROUPING = YES
+INLINE_GROUPED_CLASSES = NO
+INLINE_SIMPLE_STRUCTS = NO
+TYPEDEF_HIDES_STRUCT = NO
+SYMBOL_CACHE_SIZE = 0
#---------------------------------------------------------------------------
# Build related configuration options
#---------------------------------------------------------------------------
@@ -56,6 +87,7 @@ EXTRACT_PRIVATE = YES
EXTRACT_STATIC = YES
EXTRACT_LOCAL_CLASSES = YES
EXTRACT_LOCAL_METHODS = NO
+EXTRACT_ANON_NSPACES = NO
HIDE_UNDOC_MEMBERS = NO
HIDE_UNDOC_CLASSES = NO
HIDE_FRIEND_COMPOUNDS = NO
@@ -64,20 +96,27 @@ INTERNAL_DOCS = NO
CASE_SENSE_NAMES = YES
HIDE_SCOPE_NAMES = NO
SHOW_INCLUDE_FILES = YES
+FORCE_LOCAL_INCLUDES = NO
INLINE_INFO = YES
SORT_MEMBER_DOCS = YES
-SORT_BRIEF_DOCS = NO
+SORT_BRIEF_DOCS = YES
+SORT_MEMBERS_CTORS_1ST = NO
+SORT_GROUP_NAMES = NO
SORT_BY_SCOPE_NAME = NO
+STRICT_PROTO_MATCHING = NO
GENERATE_TODOLIST = YES
GENERATE_TESTLIST = YES
GENERATE_BUGLIST = YES
GENERATE_DEPRECATEDLIST= YES
-ENABLED_SECTIONS =
+ENABLED_SECTIONS =
MAX_INITIALIZER_LINES = 30
SHOW_USED_FILES = YES
-SHOW_DIRECTORIES = NO
+SHOW_DIRECTORIES = YES
+SHOW_FILES = YES
SHOW_NAMESPACES = NO
FILE_VERSION_FILTER = {{SVNSTAT}}
+LAYOUT_FILE =
+CITE_BIB_FILES =
#---------------------------------------------------------------------------
# configuration options related to warning and progress messages
#---------------------------------------------------------------------------
@@ -87,11 +126,12 @@ WARN_IF_UNDOCUMENTED = YES
WARN_IF_DOC_ERROR = YES
WARN_NO_PARAMDOC = NO
WARN_FORMAT = "$file:$line: $text"
-WARN_LOGFILE =
+WARN_LOGFILE =
#---------------------------------------------------------------------------
# configuration options related to the input files
#---------------------------------------------------------------------------
INPUT = {{INPUT}}
+INPUT_ENCODING = UTF-8
FILE_PATTERNS = *.c \
*.cc \
*.cxx \
@@ -134,16 +174,18 @@ FILE_PATTERNS = *.c \
*.MM \
*.PY
RECURSIVE = YES
-EXCLUDE = {{EXCLUDE}}
+EXCLUDE = {{EXCLUDE}}
EXCLUDE_SYMLINKS = YES
EXCLUDE_PATTERNS = LocalSettings.php AdminSettings.php StartProfiler.php .svn */.git/* {{EXCLUDE_PATTERNS}}
-EXAMPLE_PATH =
+EXCLUDE_SYMBOLS =
+EXAMPLE_PATH =
EXAMPLE_PATTERNS = *
EXAMPLE_RECURSIVE = NO
-IMAGE_PATH =
-INPUT_FILTER =
-FILTER_PATTERNS =
+IMAGE_PATH =
+INPUT_FILTER =
+FILTER_PATTERNS =
FILTER_SOURCE_FILES = NO
+FILTER_SOURCE_PATTERNS =
#---------------------------------------------------------------------------
# configuration options related to source browsing
#---------------------------------------------------------------------------
@@ -152,6 +194,7 @@ INLINE_SOURCES = NO
STRIP_CODE_COMMENTS = YES
REFERENCED_BY_RELATION = YES
REFERENCES_RELATION = YES
+REFERENCES_LINK_SOURCE = YES
USE_HTAGS = NO
VERBATIM_HEADERS = YES
#---------------------------------------------------------------------------
@@ -159,27 +202,58 @@ VERBATIM_HEADERS = YES
#---------------------------------------------------------------------------
ALPHABETICAL_INDEX = NO
COLS_IN_ALPHA_INDEX = 5
-IGNORE_PREFIX =
+IGNORE_PREFIX =
#---------------------------------------------------------------------------
# configuration options related to the HTML output
#---------------------------------------------------------------------------
GENERATE_HTML = YES
HTML_OUTPUT = html
HTML_FILE_EXTENSION = .html
-HTML_HEADER =
-HTML_FOOTER =
-HTML_STYLESHEET =
+HTML_HEADER =
+HTML_FOOTER =
+HTML_STYLESHEET =
+HTML_EXTRA_FILES =
+HTML_COLORSTYLE_HUE = 220
+HTML_COLORSTYLE_SAT = 100
+HTML_COLORSTYLE_GAMMA = 80
+HTML_TIMESTAMP = YES
HTML_ALIGN_MEMBERS = YES
+HTML_DYNAMIC_SECTIONS = NO
+GENERATE_DOCSET = NO
+DOCSET_FEEDNAME = "Doxygen generated docs"
+DOCSET_BUNDLE_ID = org.doxygen.Project
+DOCSET_PUBLISHER_ID = org.doxygen.Publisher
+DOCSET_PUBLISHER_NAME = Publisher
GENERATE_HTMLHELP = NO
-CHM_FILE =
-HHC_LOCATION =
+CHM_FILE =
+HHC_LOCATION =
GENERATE_CHI = NO
+CHM_INDEX_ENCODING =
BINARY_TOC = NO
TOC_EXPAND = YES
+GENERATE_QHP = NO
+QCH_FILE =
+QHP_NAMESPACE = org.doxygen.Project
+QHP_VIRTUAL_FOLDER = doc
+QHP_CUST_FILTER_NAME =
+QHP_CUST_FILTER_ATTRS =
+QHP_SECT_FILTER_ATTRS =
+QHG_LOCATION =
+GENERATE_ECLIPSEHELP = NO
+ECLIPSE_DOC_ID = org.doxygen.Project
DISABLE_INDEX = NO
ENUM_VALUES_PER_LINE = 4
GENERATE_TREEVIEW = YES
+USE_INLINE_TREES = YES
TREEVIEW_WIDTH = 250
+EXT_LINKS_IN_WINDOW = NO
+FORMULA_FONTSIZE = 10
+FORMULA_TRANSPARENT = YES
+USE_MATHJAX = NO
+MATHJAX_RELPATH = http://www.mathjax.org/mathjax
+MATHJAX_EXTENSIONS =
+SEARCHENGINE = YES
+SERVER_BASED_SEARCH = YES
#---------------------------------------------------------------------------
# configuration options related to the LaTeX output
#---------------------------------------------------------------------------
@@ -189,12 +263,15 @@ LATEX_CMD_NAME = latex
MAKEINDEX_CMD_NAME = makeindex
COMPACT_LATEX = NO
PAPER_TYPE = a4wide
-EXTRA_PACKAGES =
-LATEX_HEADER =
+EXTRA_PACKAGES =
+LATEX_HEADER =
+LATEX_FOOTER =
PDF_HYPERLINKS = YES
USE_PDFLATEX = YES
LATEX_BATCHMODE = NO
LATEX_HIDE_INDICES = NO
+LATEX_SOURCE_CODE = NO
+LATEX_BIB_STYLE = plain
#---------------------------------------------------------------------------
# configuration options related to the RTF output
#---------------------------------------------------------------------------
@@ -202,12 +279,12 @@ GENERATE_RTF = NO
RTF_OUTPUT = rtf
COMPACT_RTF = NO
RTF_HYPERLINKS = NO
-RTF_STYLESHEET_FILE =
-RTF_EXTENSIONS_FILE =
+RTF_STYLESHEET_FILE =
+RTF_EXTENSIONS_FILE =
#---------------------------------------------------------------------------
# configuration options related to the man page output
#---------------------------------------------------------------------------
-GENERATE_MAN = NO
+GENERATE_MAN = {{GENERATE_MAN}}
MAN_OUTPUT = man
MAN_EXTENSION = .3
MAN_LINKS = NO
@@ -216,8 +293,8 @@ MAN_LINKS = NO
#---------------------------------------------------------------------------
GENERATE_XML = NO
XML_OUTPUT = xml
-XML_SCHEMA =
-XML_DTD =
+XML_SCHEMA =
+XML_DTD =
XML_PROGRAMLISTING = YES
#---------------------------------------------------------------------------
# configuration options for the AutoGen Definitions output
@@ -229,33 +306,38 @@ GENERATE_AUTOGEN_DEF = NO
GENERATE_PERLMOD = NO
PERLMOD_LATEX = NO
PERLMOD_PRETTY = YES
-PERLMOD_MAKEVAR_PREFIX =
+PERLMOD_MAKEVAR_PREFIX =
#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
+# Configuration options related to the preprocessor
#---------------------------------------------------------------------------
ENABLE_PREPROCESSING = YES
MACRO_EXPANSION = NO
EXPAND_ONLY_PREDEF = NO
SEARCH_INCLUDES = YES
-INCLUDE_PATH =
-INCLUDE_FILE_PATTERNS =
-PREDEFINED =
-EXPAND_AS_DEFINED =
+INCLUDE_PATH =
+INCLUDE_FILE_PATTERNS =
+PREDEFINED =
+EXPAND_AS_DEFINED =
SKIP_FUNCTION_MACROS = YES
#---------------------------------------------------------------------------
-# Configuration::additions related to external references
+# Configuration::additions related to external references
#---------------------------------------------------------------------------
-TAGFILES =
+TAGFILES =
GENERATE_TAGFILE = {{OUTPUT_DIRECTORY}}/html/tagfile.xml
ALLEXTERNALS = NO
EXTERNAL_GROUPS = YES
PERL_PATH = /usr/bin/perl
#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
+# Configuration options related to the dot tool
#---------------------------------------------------------------------------
CLASS_DIAGRAMS = NO
+MSCGEN_PATH =
HIDE_UNDOC_RELATIONS = YES
HAVE_DOT = {{HAVE_DOT}}
+DOT_NUM_THREADS = 0
+DOT_FONTNAME = Helvetica
+DOT_FONTSIZE = 10
+DOT_FONTPATH =
CLASS_GRAPH = YES
COLLABORATION_GRAPH = YES
GROUP_GRAPHS = YES
@@ -268,34 +350,14 @@ CALLER_GRAPH = YES
GRAPHICAL_HIERARCHY = YES
DIRECTORY_GRAPH = YES
DOT_IMAGE_FORMAT = png
-DOT_PATH =
-DOTFILE_DIRS =
+INTERACTIVE_SVG = NO
+DOT_PATH =
+DOTFILE_DIRS =
+MSCFILE_DIRS =
+DOT_GRAPH_MAX_NODES = 50
MAX_DOT_GRAPH_DEPTH = 1000
DOT_TRANSPARENT = NO
DOT_MULTI_TARGETS = NO
GENERATE_LEGEND = YES
DOT_CLEANUP = YES
-#---------------------------------------------------------------------------
-# Configuration::additions related to the search engine
-#---------------------------------------------------------------------------
-SEARCHENGINE = NO
-
-ALIASES = "type{1}=<b> \1 </b>:" \
- "types{2}=<b> \1 </b> or <b> \2 </b>:" \
- "types{3}=<b> \1 </b>, <b> \2 </b>, or <b> \3 </b>:" \
- "arrayof{2}=<b> Array </b> of \2" \
- "null=\type{Null}" \
- "boolean=\type{Boolean}" \
- "bool=\type{Boolean}" \
- "integer=\type{Integer}" \
- "int=\type{Integer}" \
- "string=\type{String}" \
- "str=\type{String}" \
- "mixed=\type{Mixed}" \
- "access=\par Access:\n" \
- "private=\access private" \
- "protected=\access protected" \
- "public=\access public" \
- "copyright=\note" \
- "license=\note"
diff --git a/maintenance/Maintenance.php b/maintenance/Maintenance.php
index 3618515a..082cf8be 100644
--- a/maintenance/Maintenance.php
+++ b/maintenance/Maintenance.php
@@ -20,6 +20,11 @@
* @defgroup Maintenance Maintenance
*/
+/**
+ * @defgroup MaintenanceArchive Maintenance archives
+ * @ingroup Maintenance
+ */
+
// Define this so scripts can easily find doMaintenance.php
define( 'RUN_MAINTENANCE_IF_MAIN', dirname( __FILE__ ) . '/doMaintenance.php' );
define( 'DO_MAINTENANCE', RUN_MAINTENANCE_IF_MAIN ); // original name, harmless
@@ -130,14 +135,15 @@ abstract class Maintenance {
*/
public static function shouldExecute() {
$bt = debug_backtrace();
- if ( count( $bt ) < 2 ) {
+ $count = count( $bt );
+ if ( $count < 2 ) {
return false; // sanity
}
if ( $bt[0]['class'] !== 'Maintenance' || $bt[0]['function'] !== 'shouldExecute' ) {
return false; // last call should be to this function
}
- $includeFuncs = array( 'require_once', 'require', 'include' );
- for( $i=1; $i < count( $bt ); $i++ ) {
+ $includeFuncs = array( 'require_once', 'require', 'include', 'include_once' );
+ for( $i=1; $i < $count; $i++ ) {
if ( !in_array( $bt[$i]['function'], $includeFuncs ) ) {
return false; // previous calls should all be "requires"
}
@@ -247,6 +253,20 @@ abstract class Maintenance {
*/
protected function setBatchSize( $s = 0 ) {
$this->mBatchSize = $s;
+
+ // If we support $mBatchSize, show the option.
+ // Used to be in addDefaultParams, but in order for that to
+ // work, subclasses would have to call this function in the constructor
+ // before they called parent::__construct which is just weird
+ // (and really wasn't done).
+ if ( $this->mBatchSize ) {
+ $this->addOption( 'batch-size', 'Run this many operations ' .
+ 'per batch, default: ' . $this->mBatchSize, false, true );
+ if ( isset( $this->mParams['batch-size'] ) ) {
+ // This seems a little ugly...
+ $this->mDependantParameters['batch-size'] = $this->mParams['batch-size'];
+ }
+ }
}
/**
@@ -299,8 +319,7 @@ abstract class Maintenance {
} else {
print( $out );
}
- }
- else {
+ } else {
$out = preg_replace( '/\n\z/', '', $out );
$this->outputChanneled( $out, $channel );
}
@@ -427,11 +446,7 @@ abstract class Maintenance {
$this->addOption( 'dbuser', 'The DB user to use for this script', false, true );
$this->addOption( 'dbpass', 'The password to use for this script', false, true );
}
- // If we support $mBatchSize, show the option
- if ( $this->mBatchSize ) {
- $this->addOption( 'batch-size', 'Run this many operations ' .
- 'per batch, default: ' . $this->mBatchSize, false, true );
- }
+
# Save additional script dependant options to display
# them separately in help
$this->mDependantParameters = array_diff_key( $this->mParams, $this->mGenericParameters );
@@ -455,6 +470,9 @@ abstract class Maintenance {
}
}
+ /**
+ * @var $child Maintenance
+ */
$child = new $maintClass();
$child->loadParamsAndArgs( $this->mSelf, $this->mOptions, $this->mArgs );
if ( !is_null( $this->mDb ) ) {
@@ -525,6 +543,7 @@ abstract class Maintenance {
* to allow sysadmins to explicitly set one if they'd prefer to override
* defaults (or for people using Suhosin which yells at you for trying
* to disable the limits)
+ * @return string
*/
public function memoryLimit() {
$limit = $this->getOption( 'memory-limit', 'max' );
@@ -851,6 +870,9 @@ abstract class Maintenance {
$wgDBpassword = $wgDBadminpassword;
if ( $wgDBservers ) {
+ /**
+ * @var $wgDBservers array
+ */
foreach ( $wgDBservers as $i => $server ) {
$wgDBservers[$i]['user'] = $wgDBuser;
$wgDBservers[$i]['password'] = $wgDBpassword;
@@ -890,57 +912,6 @@ abstract class Maintenance {
}
/**
- * Do setup specific to WMF
- */
- public function loadWikimediaSettings() {
- global $IP, $wgNoDBParam, $wgUseNormalUser, $wgConf, $site, $lang;
-
- if ( empty( $wgNoDBParam ) ) {
- # Check if we were passed a db name
- if ( isset( $this->mOptions['wiki'] ) ) {
- $db = $this->mOptions['wiki'];
- } else {
- $db = array_shift( $this->mArgs );
- }
- list( $site, $lang ) = $wgConf->siteFromDB( $db );
-
- # If not, work out the language and site the old way
- if ( is_null( $site ) || is_null( $lang ) ) {
- if ( !$db ) {
- $lang = 'aa';
- } else {
- $lang = $db;
- }
- if ( isset( $this->mArgs[0] ) ) {
- $site = array_shift( $this->mArgs );
- } else {
- $site = 'wikipedia';
- }
- }
- } else {
- $lang = 'aa';
- $site = 'wikipedia';
- }
-
- # This is for the IRC scripts, which now run as the apache user
- # The apache user doesn't have access to the wikiadmin_pass command
- if ( $_ENV['USER'] == 'apache' ) {
- # if ( posix_geteuid() == 48 ) {
- $wgUseNormalUser = true;
- }
-
- putenv( 'wikilang=' . $lang );
-
- ini_set( 'include_path', ".:$IP:$IP/includes:$IP/languages:$IP/maintenance" );
-
- if ( $lang == 'test' && $site == 'wikipedia' ) {
- if ( !defined( 'TESTWIKI' ) ) {
- define( 'TESTWIKI', 1 );
- }
- }
- }
-
- /**
* Generic setup for most installs. Returns the location of LocalSettings
* @return String
*/
@@ -1030,6 +1001,7 @@ abstract class Maintenance {
/**
* Get the maintenance directory.
+ * @return string
*/
protected function getDir() {
return dirname( __FILE__ );
@@ -1177,6 +1149,7 @@ abstract class Maintenance {
* Update the searchindex table for a given pageid
* @param $dbw Database: a database write handle
* @param $pageId Integer: the page ID to update.
+ * @return null|string
*/
public function updateSearchIndexForPage( $dbw, $pageId ) {
// Get current revision
@@ -1208,7 +1181,7 @@ abstract class Maintenance {
} else {
return posix_isatty( $fd );
}
-}
+ }
/**
* Prompt the console for input
@@ -1274,6 +1247,9 @@ abstract class Maintenance {
}
}
+/**
+ * Fake maintenance wrapper, mostly used for the web installer/updater
+ */
class FakeMaintenance extends Maintenance {
protected $mSelf = "FakeMaintenanceScript";
public function execute() {
@@ -1347,4 +1323,4 @@ abstract class LoggedUpdateMaintenance extends Maintenance {
* @return String
*/
abstract protected function getUpdateKey();
-} \ No newline at end of file
+}
diff --git a/maintenance/Makefile b/maintenance/Makefile
index a92751c9..30b568dc 100644
--- a/maintenance/Makefile
+++ b/maintenance/Makefile
@@ -1,6 +1,7 @@
help:
@echo "Run 'make test' to run the parser tests."
@echo "Run 'make doc' to run the doxygen generation."
+ @echo "Run 'make man' to run the doxygen generation with man pages."
test:
php tests/parserTests.php --quiet
@@ -8,3 +9,9 @@ test:
doc:
php mwdocgen.php --all
@echo 'Doc generation done. Look at ./docs/html/'
+
+man:
+ php mwdocgen.php --all --generate-man
+ @echo 'Doc generation done. Look at ./docs/html/ and ./docs/man'
+ @echo 'You might want to update your MANPATH currently:'
+ @echo 'MANPATH: $(MANPATH)'
diff --git a/maintenance/Site.php b/maintenance/Site.php
deleted file mode 100644
index 87d637b4..00000000
--- a/maintenance/Site.php
+++ /dev/null
@@ -1,19 +0,0 @@
-<?php
-/**
- * @todo document
- * @ingroup Maintenance
- */
-class Site {
- var $suffix, $lateral, $url;
-
- function __construct( $s, $l, $u ) {
- $this->suffix = $s;
- $this->lateral = $l;
- $this->url = $u;
- }
-
- function getURL( $lang, $urlprotocol ) {
- $xlang = str_replace( '_', '-', $lang );
- return "$urlprotocol//$xlang.{$this->url}/wiki/\$1";
- }
-}
diff --git a/maintenance/addwiki.php b/maintenance/addwiki.php
deleted file mode 100644
index 43f42be5..00000000
--- a/maintenance/addwiki.php
+++ /dev/null
@@ -1,479 +0,0 @@
-<?php
-/**
- * @defgroup Wikimedia Wikimedia
- */
-
-/**
- * Add a new wiki
- * Wikimedia specific!
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License along
- * with this program; if not, write to the Free Software Foundation, Inc.,
- * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * http://www.gnu.org/copyleft/gpl.html
- *
- * @file
- * @ingroup Maintenance
- * @ingroup Wikimedia
- */
-
-require_once( dirname( __FILE__ ) . '/Maintenance.php' );
-
-class AddWiki extends Maintenance {
- public function __construct() {
- global $wgNoDBParam;
-
- parent::__construct();
- $this->mDescription = "Add a new wiki to the family. Wikimedia specific!";
- $this->addArg( 'language', 'Language code of new site, e.g. en' );
- $this->addArg( 'site', 'Type of site, e.g. wikipedia' );
- $this->addArg( 'dbname', 'Name of database to create, e.g. enwiki' );
- $this->addArg( 'domain', 'Domain name of the wiki, e.g. en.wikipedia.org' );
-
- $wgNoDBParam = true;
- }
-
- public function getDbType() {
- return Maintenance::DB_ADMIN;
- }
-
- public function execute() {
- global $IP, $wgDefaultExternalStore, $wgVersionNumber;
- if ( !$wgVersionNumber ) { // set in CommonSettings.php
- $this->error( '$wgVersionNumber is not set, please use MWScript.php wrapper.', true );
- }
-
- $lang = $this->getArg( 0 );
- $site = $this->getArg( 1 );
- $dbName = $this->getArg( 2 );
- $domain = $this->getArg( 3 );
- $languageNames = Language::getLanguageNames();
-
- if ( !isset( $languageNames[$lang] ) ) {
- $this->error( "Language $lang not found in Names.php", true );
- }
- $name = $languageNames[$lang];
-
- $dbw = wfGetDB( DB_MASTER );
- $common = "/home/wikipedia/common";
-
- $this->output( "Creating database $dbName for $lang.$site ($name)\n" );
-
- # Set up the database
- $dbw->query( "SET table_type=Innodb" );
- $dbw->query( "CREATE DATABASE $dbName" );
- $dbw->selectDB( $dbName );
-
- $this->output( "Initialising tables\n" );
- $dbw->sourceFile( $this->getDir() . '/tables.sql' );
- $dbw->sourceFile( "$IP/extensions/OAI/update_table.sql" );
- $dbw->sourceFile( "$IP/extensions/AntiSpoof/sql/patch-antispoof.mysql.sql" );
- $dbw->sourceFile( "$IP/extensions/CheckUser/cu_changes.sql" );
- $dbw->sourceFile( "$IP/extensions/CheckUser/cu_log.sql" );
- $dbw->sourceFile( "$IP/extensions/TitleKey/titlekey.sql" );
- $dbw->sourceFile( "$IP/extensions/Oversight/hidden.sql" );
- $dbw->sourceFile( "$IP/extensions/GlobalBlocking/localdb_patches/setup-global_block_whitelist.sql" );
- $dbw->sourceFile( "$IP/extensions/AbuseFilter/abusefilter.tables.sql" );
- $dbw->sourceFile( "$IP/extensions/PrefStats/patches/PrefStats.sql" );
- $dbw->sourceFile( "$IP/extensions/ProofreadPage/ProofreadPage.sql" );
- $dbw->sourceFile( "$IP/extensions/ClickTracking/patches/ClickTrackingEvents.sql" );
- $dbw->sourceFile( "$IP/extensions/ClickTracking/patches/ClickTracking.sql" );
- $dbw->sourceFile( "$IP/extensions/UserDailyContribs/patches/UserDailyContribs.sql" );
-
- $dbw->query( "INSERT INTO site_stats(ss_row_id) VALUES (1)" );
-
- # Initialise external storage
- if ( is_array( $wgDefaultExternalStore ) ) {
- $stores = $wgDefaultExternalStore;
- } elseif ( $wgDefaultExternalStore ) {
- $stores = array( $wgDefaultExternalStore );
- } else {
- $stores = array();
- }
- if ( count( $stores ) ) {
- global $wgDBuser, $wgDBpassword, $wgExternalServers;
- foreach ( $stores as $storeURL ) {
- $m = array();
- if ( !preg_match( '!^DB://(.*)$!', $storeURL, $m ) ) {
- continue;
- }
-
- $cluster = $m[1];
- $this->output( "Initialising external storage $cluster...\n" );
-
- # Hack
- $wgExternalServers[$cluster][0]['user'] = $wgDBuser;
- $wgExternalServers[$cluster][0]['password'] = $wgDBpassword;
-
- $store = new ExternalStoreDB;
- $extdb = $store->getMaster( $cluster );
- $extdb->query( "SET table_type=InnoDB" );
- $extdb->query( "CREATE DATABASE $dbName" );
- $extdb->selectDB( $dbName );
-
- # Hack x2
- $blobsTable = $store->getTable( $extdb );
- $sedCmd = "sed s/blobs\\\\\\>/$blobsTable/ " . $this->getDir() . "/storage/blobs.sql";
- $blobsFile = popen( $sedCmd, 'r' );
- $extdb->sourceStream( $blobsFile );
- pclose( $blobsFile );
- $extdb->commit();
- }
- }
-
- $title = Title::newFromText( wfMessage( 'mainpage' )->inLanguage( $lang )->useDatabase( false )->plain() );
- $this->output( "Writing main page to " . $title->getPrefixedDBkey() . "\n" );
- $article = new Article( $title );
- $ucsite = ucfirst( $site );
-
- $article->doEdit( $this->getFirstArticle( $ucsite, $name ), '', EDIT_NEW | EDIT_AUTOSUMMARY );
-
- $this->output( "Adding to dblists\n" );
-
- # Add to dblist
- $file = fopen( "$common/all.dblist", "a" );
- fwrite( $file, "$dbName\n" );
- fclose( $file );
-
- # Update the sublists
- shell_exec( "cd $common && ./refresh-dblist" );
-
- # Add to wikiversions.dat
- $file = fopen( "$common/wikiversions.dat", "a" );
- fwrite( $file, "$dbName php-$wgVersionNumber\n" );
- fclose( $file );
- # Rebuild wikiversions.cdb
- shell_exec( "cd $common/multiversion && ./refreshWikiversionsCDB" );
-
- # print "Constructing interwiki SQL\n";
- # Rebuild interwiki tables
- # passthru( '/home/wikipedia/conf/interwiki/update' );
-
- $time = wfTimestamp( TS_RFC2822 );
- // These arguments need to be escaped twice: once for echo and once for at
- $escDbName = wfEscapeShellArg( wfEscapeShellArg( $dbName ) );
- $escTime = wfEscapeShellArg( wfEscapeShellArg( $time ) );
- $escUcsite = wfEscapeShellArg( wfEscapeShellArg( $ucsite ) );
- $escName = wfEscapeShellArg( wfEscapeShellArg( $name ) );
- $escLang = wfEscapeShellArg( wfEscapeShellArg( $lang ) );
- $escDomain = wfEscapeShellArg( wfEscapeShellArg( $domain ) );
- shell_exec( "echo notifyNewProjects $escDbName $escTime $escUcsite $escName $escLang $escDomain | at now + 15 minutes" );
-
- $this->output( "Script ended. You still have to:
- * Add any required settings in InitialiseSettings.php
- * Run sync-common-all
- * Run /home/wikipedia/conf/interwiki/update
- " );
- }
-
- private function getFirstArticle( $ucsite, $name ) {
- return <<<EOT
-==This subdomain is reserved for the creation of a [[wikimedia:Our projects|$ucsite]] in '''[[w:en:{$name}|{$name}]]''' language==
-
-* Please '''do not start editing''' this new site. This site has a test project on the [[incubator:|Wikimedia Incubator]] (or on the [[betawikiversity:|BetaWikiversity]] or on the [[oldwikisource:|Old Wikisource]]) and it will be imported to here.
-
-* If you would like to help translating the interface to this language, please do not translate here, but go to [[translatewiki:|translatewiki]], a special wiki for translating the interface. That way everyone can use it on every wiki using the [[mw:|same software]].
-
-* For information about how to edit and for other general help, see [[m:Help:Contents|Help on Wikimedia's Meta-Wiki]] or [[mw:Help:Contents|Help on MediaWiki.org]].
-
-== Sister projects ==
-<span class="plainlinks">
-[http://www.wikipedia.org Wikipedia] |
-[http://www.wiktionary.org Wiktonary] |
-[http://www.wikibooks.org Wikibooks] |
-[http://www.wikinews.org Wikinews] |
-[http://www.wikiquote.org Wikiquote] |
-[http://www.wikisource.org Wikisource]
-[http://www.wikiversity.org Wikiversity]
-</span>
-
-See Wikimedia's [[m:|Meta-Wiki]] for the coordination of these projects.
-
-[[aa:]]
-[[ab:]]
-[[ace:]]
-[[af:]]
-[[ak:]]
-[[als:]]
-[[am:]]
-[[an:]]
-[[ang:]]
-[[ar:]]
-[[arc:]]
-[[arz:]]
-[[as:]]
-[[ast:]]
-[[av:]]
-[[ay:]]
-[[az:]]
-[[ba:]]
-[[bar:]]
-[[bat-smg:]]
-[[bcl:]]
-[[be:]]
-[[be-x-old:]]
-[[bg:]]
-[[bh:]]
-[[bi:]]
-[[bm:]]
-[[bn:]]
-[[bo:]]
-[[bpy:]]
-[[br:]]
-[[bs:]]
-[[bug:]]
-[[bxr:]]
-[[ca:]]
-[[cbk-zam:]]
-[[cdo:]]
-[[ce:]]
-[[ceb:]]
-[[ch:]]
-[[cho:]]
-[[chr:]]
-[[chy:]]
-[[ckb:]]
-[[co:]]
-[[cr:]]
-[[crh:]]
-[[cs:]]
-[[csb:]]
-[[cu:]]
-[[cv:]]
-[[cy:]]
-[[da:]]
-[[de:]]
-[[diq:]]
-[[dk:]]
-[[dsb:]]
-[[dv:]]
-[[dz:]]
-[[ee:]]
-[[el:]]
-[[eml:]]
-[[en:]]
-[[eo:]]
-[[es:]]
-[[et:]]
-[[eu:]]
-[[ext:]]
-[[fa:]]
-[[ff:]]
-[[fi:]]
-[[fiu-vro:]]
-[[fj:]]
-[[fo:]]
-[[fr:]]
-[[frp:]]
-[[fur:]]
-[[fy:]]
-[[ga:]]
-[[gan:]]
-[[gd:]]
-[[gl:]]
-[[glk:]]
-[[gn:]]
-[[got:]]
-[[gu:]]
-[[gv:]]
-[[ha:]]
-[[hak:]]
-[[haw:]]
-[[he:]]
-[[hi:]]
-[[hif:]]
-[[ho:]]
-[[hr:]]
-[[hsb:]]
-[[ht:]]
-[[hu:]]
-[[hy:]]
-[[hz:]]
-[[ia:]]
-[[id:]]
-[[ie:]]
-[[ig:]]
-[[ii:]]
-[[ik:]]
-[[ilo:]]
-[[io:]]
-[[is:]]
-[[it:]]
-[[iu:]]
-[[ja:]]
-[[jbo:]]
-[[jv:]]
-[[ka:]]
-[[kaa:]]
-[[kab:]]
-[[kg:]]
-[[ki:]]
-[[kj:]]
-[[kk:]]
-[[kl:]]
-[[km:]]
-[[kn:]]
-[[ko:]]
-[[kr:]]
-[[ks:]]
-[[ksh:]]
-[[ku:]]
-[[kv:]]
-[[kw:]]
-[[ky:]]
-[[la:]]
-[[lad:]]
-[[lb:]]
-[[lbe:]]
-[[lg:]]
-[[li:]]
-[[lij:]]
-[[lmo:]]
-[[ln:]]
-[[lo:]]
-[[lt:]]
-[[lv:]]
-[[map-bms:]]
-[[mdf:]]
-[[mg:]]
-[[mh:]]
-[[mhr:]]
-[[mi:]]
-[[mk:]]
-[[ml:]]
-[[mn:]]
-[[mo:]]
-[[mr:]]
-[[ms:]]
-[[mt:]]
-[[mus:]]
-[[mwl:]]
-[[my:]]
-[[myv:]]
-[[mzn:]]
-[[na:]]
-[[nan:]]
-[[nap:]]
-[[nds:]]
-[[nds-nl:]]
-[[ne:]]
-[[new:]]
-[[ng:]]
-[[nl:]]
-[[nn:]]
-[[no:]]
-[[nov:]]
-[[nrm:]]
-[[nv:]]
-[[ny:]]
-[[oc:]]
-[[om:]]
-[[or:]]
-[[os:]]
-[[pa:]]
-[[pag:]]
-[[pam:]]
-[[pap:]]
-[[pdc:]]
-[[pi:]]
-[[pih:]]
-[[pl:]]
-[[pms:]]
-[[pnt:]]
-[[pnb:]]
-[[ps:]]
-[[pt:]]
-[[qu:]]
-[[rm:]]
-[[rmy:]]
-[[rn:]]
-[[ro:]]
-[[roa-tara:]]
-[[ru:]]
-[[rup:]]
-[[rw:]]
-[[sa:]]
-[[sah:]]
-[[sc:]]
-[[scn:]]
-[[sco:]]
-[[sd:]]
-[[se:]]
-[[sg:]]
-[[sh:]]
-[[si:]]
-[[simple:]]
-[[sk:]]
-[[sl:]]
-[[sm:]]
-[[sn:]]
-[[so:]]
-[[sq:]]
-[[sr:]]
-[[srn:]]
-[[ss:]]
-[[st:]]
-[[stq:]]
-[[su:]]
-[[sv:]]
-[[sw:]]
-[[szl:]]
-[[ta:]]
-[[te:]]
-[[tet:]]
-[[tg:]]
-[[th:]]
-[[ti:]]
-[[tk:]]
-[[tl:]]
-[[tn:]]
-[[to:]]
-[[tpi:]]
-[[tr:]]
-[[ts:]]
-[[tt:]]
-[[tum:]]
-[[tw:]]
-[[ty:]]
-[[udm:]]
-[[ug:]]
-[[uk:]]
-[[ur:]]
-[[uz:]]
-[[ve:]]
-[[vec:]]
-[[vi:]]
-[[vls:]]
-[[vo:]]
-[[wa:]]
-[[war:]]
-[[wo:]]
-[[wuu:]]
-[[xal:]]
-[[xh:]]
-[[yi:]]
-[[yo:]]
-[[za:]]
-[[zea:]]
-[[zh:]]
-[[zh-classical:]]
-[[zh-min-nan:]]
-[[zh-yue:]]
-[[zu:]]
-
-EOT;
- }
-}
-
-$maintClass = "AddWiki";
-require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/archives/patch-ar_sha1.sql b/maintenance/archives/patch-ar_sha1.sql
new file mode 100644
index 00000000..1c7d8e91
--- /dev/null
+++ b/maintenance/archives/patch-ar_sha1.sql
@@ -0,0 +1,3 @@
+-- Adding ar_sha1 field
+ALTER TABLE /*$wgDBprefix*/archive
+ ADD ar_sha1 varbinary(32) NOT NULL default '';
diff --git a/maintenance/archives/patch-drop-user_options.sql b/maintenance/archives/patch-drop-user_options.sql
new file mode 100644
index 00000000..15b7d278
--- /dev/null
+++ b/maintenance/archives/patch-drop-user_options.sql
@@ -0,0 +1 @@
+ALTER TABLE /*_*/user DROP COLUMN user_options; \ No newline at end of file
diff --git a/maintenance/archives/patch-jobs-add-timestamp.sql b/maintenance/archives/patch-jobs-add-timestamp.sql
new file mode 100644
index 00000000..c5e6e711
--- /dev/null
+++ b/maintenance/archives/patch-jobs-add-timestamp.sql
@@ -0,0 +1,2 @@
+ALTER TABLE /*_*/job ADD COLUMN job_timestamp varbinary(14) NULL default NULL;
+CREATE INDEX /*i*/job_timestamp ON /*_*/job(job_timestamp);
diff --git a/maintenance/archives/patch-logging-type-action-index.sql b/maintenance/archives/patch-logging-type-action-index.sql
new file mode 100644
index 00000000..5edc61a5
--- /dev/null
+++ b/maintenance/archives/patch-logging-type-action-index.sql
@@ -0,0 +1 @@
+CREATE INDEX /*i*/type_action ON /*_*/logging(log_type, log_action, log_timestamp);
diff --git a/maintenance/archives/patch-page_redirect_namespace_len.sql b/maintenance/archives/patch-page_redirect_namespace_len.sql
new file mode 100644
index 00000000..392945fb
--- /dev/null
+++ b/maintenance/archives/patch-page_redirect_namespace_len.sql
@@ -0,0 +1,6 @@
+--
+-- Add the page_redirect_namespace_len index
+--
+
+CREATE INDEX /*i*/page_redirect_namespace_len ON /*_*/page (page_is_redirect, page_namespace, page_len);
+
diff --git a/maintenance/archives/patch-rev_sha1.sql b/maintenance/archives/patch-rev_sha1.sql
new file mode 100644
index 00000000..0100c365
--- /dev/null
+++ b/maintenance/archives/patch-rev_sha1.sql
@@ -0,0 +1,3 @@
+-- Adding rev_sha1 field
+ALTER TABLE /*$wgDBprefix*/revision
+ ADD rev_sha1 varbinary(32) NOT NULL default '';
diff --git a/maintenance/archives/patch-trackbacks.sql b/maintenance/archives/patch-trackbacks.sql
deleted file mode 100644
index e3a47aae..00000000
--- a/maintenance/archives/patch-trackbacks.sql
+++ /dev/null
@@ -1,10 +0,0 @@
-CREATE TABLE /*$wgDBprefix*/trackbacks (
- tb_id INTEGER AUTO_INCREMENT PRIMARY KEY,
- tb_page INTEGER REFERENCES page(page_id) ON DELETE CASCADE,
- tb_title VARCHAR(255) NOT NULL,
- tb_url BLOB NOT NULL,
- tb_ex TEXT,
- tb_name VARCHAR(255),
-
- INDEX (tb_page)
-);
diff --git a/maintenance/archives/patch-ufg_group-length-increase.sql b/maintenance/archives/patch-ufg_group-length-increase.sql
new file mode 100644
index 00000000..e24cba02
--- /dev/null
+++ b/maintenance/archives/patch-ufg_group-length-increase.sql
@@ -0,0 +1,2 @@
+ALTER TABLE /*_*/user_former_groups
+ MODIFY COLUMN ufg_group varbinary(32) NOT NULL default '';
diff --git a/maintenance/archives/patch-ug_group-length-increase.sql b/maintenance/archives/patch-ug_group-length-increase.sql
new file mode 100644
index 00000000..e944a858
--- /dev/null
+++ b/maintenance/archives/patch-ug_group-length-increase.sql
@@ -0,0 +1,2 @@
+ALTER TABLE /*_*/user_groups
+ MODIFY COLUMN ug_group varbinary(32) NOT NULL default '';
diff --git a/maintenance/archives/patch-uploadstash_chunk.sql b/maintenance/archives/patch-uploadstash_chunk.sql
new file mode 100644
index 00000000..29e41870
--- /dev/null
+++ b/maintenance/archives/patch-uploadstash_chunk.sql
@@ -0,0 +1,3 @@
+-- Adding us_chunk_inx field
+ALTER TABLE /*$wgDBprefix*/uploadstash
+ ADD us_chunk_inx int unsigned NULL;
diff --git a/maintenance/archives/patch-user_former_groups.sql b/maintenance/archives/patch-user_former_groups.sql
index ed18b2b6..ef56db06 100644
--- a/maintenance/archives/patch-user_former_groups.sql
+++ b/maintenance/archives/patch-user_former_groups.sql
@@ -3,7 +3,7 @@
CREATE TABLE /*_*/user_former_groups (
-- Key to user_id
ufg_user int unsigned NOT NULL default 0,
- ufg_group varbinary(16) NOT NULL default ''
+ ufg_group varbinary(32) NOT NULL default ''
) /*$wgDBTableOptions*/;
CREATE UNIQUE INDEX /*i*/ufg_user_group ON /*_*/user_former_groups (ufg_user,ufg_group);
diff --git a/maintenance/attachLatest.php b/maintenance/attachLatest.php
index e6287f43..6e09671a 100644
--- a/maintenance/attachLatest.php
+++ b/maintenance/attachLatest.php
@@ -65,8 +65,8 @@ class AttachLatest extends Maintenance {
$id = $revision->getId();
$this->output( wfWikiID() . " $pageId [[$name]] latest time $latestTime, rev id $id\n" );
if ( $this->hasOption( 'fix' ) ) {
- $article = new Article( $title );
- $article->updateRevisionOn( $dbw, $revision );
+ $page = WikiPage::factory( $title );
+ $page->updateRevisionOn( $dbw, $revision );
}
$n++;
}
diff --git a/maintenance/backup.inc b/maintenance/backup.inc
index 4cb9c58a..9f67a1ac 100644
--- a/maintenance/backup.inc
+++ b/maintenance/backup.inc
@@ -47,6 +47,8 @@ class BackupDumper {
var $skipFooter = false; // don't output </mediawiki>
var $startId = 0;
var $endId = 0;
+ var $revStartId = 0;
+ var $revEndId = 0;
var $sink = null; // Output filters
var $stubText = false; // include rev_text_id instead of text; for 2-pass dump
var $dumpUploads = false;
@@ -56,7 +58,14 @@ class BackupDumper {
var $revCountLast = 0;
var $ID = 0;
- function BackupDumper( $args ) {
+ var $outputTypes = array(), $filterTypes = array();
+
+ /**
+ * @var LoadBalancer
+ */
+ protected $lb;
+
+ function __construct( $args ) {
$this->stderr = fopen( "php://stderr", "wt" );
// Built-in output and filter plugins
@@ -247,6 +256,7 @@ class BackupDumper {
* @todo Fixme: the --server parameter is currently not respected, as it
* doesn't seem terribly easy to ask the load balancer for a particular
* connection by name.
+ * @return DatabaseBase
*/
function backupDb() {
$this->lb = wfGetLBFactory()->newMainLB();
@@ -332,7 +342,7 @@ class BackupDumper {
}
class ExportProgressFilter extends DumpFilter {
- function ExportProgressFilter( &$sink, &$progress ) {
+ function __construct( &$sink, &$progress ) {
parent::__construct( $sink );
$this->progress = $progress;
}
diff --git a/maintenance/backupPrefetch.inc b/maintenance/backupPrefetch.inc
index 93f75c65..0e12a1ce 100644
--- a/maintenance/backupPrefetch.inc
+++ b/maintenance/backupPrefetch.inc
@@ -133,6 +133,7 @@ class BaseDump {
/**
* @access private
+ * @return string
*/
function nextText() {
$this->skipTo( 'text' );
@@ -141,6 +142,9 @@ class BaseDump {
/**
* @access private
+ * @param $name string
+ * @param $parent string
+ * @return bool|null
*/
function skipTo( $name, $parent = 'page' ) {
if ( $this->atEnd ) {
@@ -192,6 +196,7 @@ class BaseDump {
/**
* @access private
+ * @return null
*/
function close() {
$this->reader->close();
diff --git a/maintenance/benchmarks/Benchmarker.php b/maintenance/benchmarks/Benchmarker.php
index 57fb8759..0056c3c7 100644
--- a/maintenance/benchmarks/Benchmarker.php
+++ b/maintenance/benchmarks/Benchmarker.php
@@ -1,6 +1,7 @@
<?php
/**
* @defgroup Benchmark Benchmark
+ * @ingroup Maintenance
*/
/**
@@ -23,7 +24,7 @@
*
* @todo Report PHP version, OS ..
* @file
- * @ingroup Maintenance Benchmark
+ * @ingroup Benchmark
*/
require_once( dirname( __FILE__ ) . '/../Maintenance.php' );
diff --git a/maintenance/benchmarks/bench_HTTP_HTTPS.php b/maintenance/benchmarks/bench_HTTP_HTTPS.php
index 0038b2d1..cf62aadb 100644
--- a/maintenance/benchmarks/bench_HTTP_HTTPS.php
+++ b/maintenance/benchmarks/bench_HTTP_HTTPS.php
@@ -18,8 +18,8 @@
* http://www.gnu.org/copyleft/gpl.html
*
* @file
- * @ingroup Maintenance
- * @author Platonides
+ * @ingroup Benchmark
+ * @author Platonides
*/
require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
diff --git a/maintenance/benchmarks/bench_delete_truncate.php b/maintenance/benchmarks/bench_delete_truncate.php
index 9fe9bea9..71385520 100644
--- a/maintenance/benchmarks/bench_delete_truncate.php
+++ b/maintenance/benchmarks/bench_delete_truncate.php
@@ -1,4 +1,8 @@
<?php
+/**
+ * @file
+ * @ingroup Benchmark
+ */
require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
diff --git a/maintenance/benchmarks/bench_if_switch.php b/maintenance/benchmarks/bench_if_switch.php
index 11c00b3c..dafce050 100644
--- a/maintenance/benchmarks/bench_if_switch.php
+++ b/maintenance/benchmarks/bench_if_switch.php
@@ -18,8 +18,8 @@
* http://www.gnu.org/copyleft/gpl.html
*
* @file
- * @ingroup Maintenance
- * @author Platonides
+ * @ingroup Benchmark
+ * @author Platonides
*/
require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
diff --git a/maintenance/benchmarks/bench_strtr_str_replace.php b/maintenance/benchmarks/bench_strtr_str_replace.php
index ae576981..73ace2bd 100644
--- a/maintenance/benchmarks/bench_strtr_str_replace.php
+++ b/maintenance/benchmarks/bench_strtr_str_replace.php
@@ -1,4 +1,8 @@
<?php
+/**
+ * @file
+ * @ingroup Benchmark
+ */
require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
diff --git a/maintenance/benchmarks/bench_wfIsWindows.php b/maintenance/benchmarks/bench_wfIsWindows.php
index 4c35221d..4caebc5e 100644
--- a/maintenance/benchmarks/bench_wfIsWindows.php
+++ b/maintenance/benchmarks/bench_wfIsWindows.php
@@ -18,8 +18,8 @@
* http://www.gnu.org/copyleft/gpl.html
*
* @file
- * @ingroup Maintenance
- * @author Platonides
+ * @ingroup Benchmark
+ * @author Platonides
*/
require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
diff --git a/maintenance/benchmarks/benchmarkHooks.php b/maintenance/benchmarks/benchmarkHooks.php
new file mode 100644
index 00000000..4ec26168
--- /dev/null
+++ b/maintenance/benchmarks/benchmarkHooks.php
@@ -0,0 +1,80 @@
+<?php
+/**
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @file
+ * @ingroup Benchmark
+ */
+
+require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
+
+class BenchmarkHooks extends Benchmarker {
+
+ public function __construct() {
+ parent::__construct();
+ $this->mDescription = 'Benchmark MediaWiki Hooks.';
+ }
+
+ public function execute() {
+ global $wgHooks;
+ $wgHooks['Test'] = array();
+
+ $time = $this->benchHooks();
+ $this->output( 'Empty hook: ' . $time . "\n" );
+
+ $wgHooks['Test'][] = array( $this, 'test' );
+ $time = $this->benchHooks();
+ $this->output( 'Loaded (one) hook: ' . $time . "\n" );
+
+ for( $i = 0; $i < 9; $i++ ) {
+ $wgHooks['Test'][] = array( $this, 'test' );
+ }
+ $time = $this->benchHooks();
+ $this->output( 'Loaded (ten) hook: ' . $time . "\n" );
+
+ for( $i = 0; $i < 90; $i++ ) {
+ $wgHooks['Test'][] = array( $this, 'test' );
+ }
+ $time = $this->benchHooks();
+ $this->output( 'Loaded (one hundred) hook: ' . $time . "\n" );
+ $this->output( "\n" );
+ }
+
+ /**
+ * @param $trials int
+ * @return string
+ */
+ private function benchHooks( $trials = 10 ) {
+ $start = wfTime();
+ for ( $i = 0; $i < $trials; $i++ ) {
+ wfRunHooks( 'Test' );
+ }
+ $delta = wfTime() - $start;
+ $pertrial = $delta / $trials;
+ return sprintf( "Took %6.2fs",
+ $pertrial );
+ }
+
+ /**
+ * @return bool
+ */
+ public function test() {
+ return true;
+ }
+}
+
+$maintClass = 'BenchmarkHooks';
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/benchmarks/benchmarkPurge.php b/maintenance/benchmarks/benchmarkPurge.php
index 4ab7aa10..e9d2ec7a 100644
--- a/maintenance/benchmarks/benchmarkPurge.php
+++ b/maintenance/benchmarks/benchmarkPurge.php
@@ -18,7 +18,7 @@
* http://www.gnu.org/copyleft/gpl.html
*
* @file
- * @ingroup Maintenance
+ * @ingroup Benchmark
*/
require_once( dirname( __FILE__ ) . '/Benchmarker.php' );
@@ -54,6 +54,7 @@ class BenchmarkPurge extends Benchmarker {
* to benchmark Squid response times.
* @param $urls array A bunch of URLs to purge
* @param $trials int How many times to run the test?
+ * @return string
*/
private function benchSquid( $urls, $trials = 1 ) {
$start = wfTime();
@@ -70,6 +71,7 @@ class BenchmarkPurge extends Benchmarker {
/**
* Get an array of randomUrl()'s.
* @param $length int How many urls to add to the array
+ * @return array
*/
private function randomUrlList( $length ) {
$list = array();
@@ -82,6 +84,7 @@ class BenchmarkPurge extends Benchmarker {
/**
* Return a random URL of the wiki. Not necessarily an actual title in the
* database, but at least a URL that looks like one.
+ * @return string
*/
private function randomUrl() {
global $wgServer, $wgArticlePath;
@@ -91,6 +94,7 @@ class BenchmarkPurge extends Benchmarker {
/**
* Create a random title string (not necessarily a Title object).
* For use with randomUrl().
+ * @return string
*/
private function randomTitle() {
$str = '';
diff --git a/maintenance/changePassword.php b/maintenance/changePassword.php
index ef87dfbd..cfcac406 100644
--- a/maintenance/changePassword.php
+++ b/maintenance/changePassword.php
@@ -43,7 +43,7 @@ class ChangePassword extends Maintenance {
} else {
$this->error( "A \"user\" or \"userid\" must be set to change the password for" , true );
}
- if ( !$user->getId() ) {
+ if ( !$user || !$user->getId() ) {
$this->error( "No such user: " . $this->getOption( 'user' ), true );
}
try {
diff --git a/maintenance/checkImages.php b/maintenance/checkImages.php
index 96b93f22..484217d9 100644
--- a/maintenance/checkImages.php
+++ b/maintenance/checkImages.php
@@ -49,7 +49,9 @@ class CheckImages extends Maintenance {
$this->output( "{$row->img_name}: not locally accessible\n" );
continue;
}
- $stat = @stat( $file->getPath() );
+ wfSuppressWarnings();
+ $stat = stat( $file->getPath() );
+ wfRestoreWarnings();
if ( !$stat ) {
$this->output( "{$row->img_name}: missing\n" );
continue;
diff --git a/maintenance/checkSyntax.php b/maintenance/checkSyntax.php
index 83f73be5..cc4e5af5 100644
--- a/maintenance/checkSyntax.php
+++ b/maintenance/checkSyntax.php
@@ -165,6 +165,8 @@ class CheckSyntax extends Maintenance {
/**
* Returns true if $file is of a type we can check
+ * @param $file string
+ * @return bool
*/
private function isSuitableFile( $file ) {
$file = str_replace( '\\', '/', $file );
@@ -181,6 +183,8 @@ class CheckSyntax extends Maintenance {
/**
* Add given path to file list, searching it in include path if needed
+ * @param $path string
+ * @return bool
*/
private function addPath( $path ) {
global $IP;
@@ -188,8 +192,10 @@ class CheckSyntax extends Maintenance {
}
/**
- * Add given file to file list, or, if it's a directory, add its content
- */
+ * Add given file to file list, or, if it's a directory, add its content
+ * @param $path string
+ * @return bool
+ */
private function addFileOrDir( $path ) {
if ( is_dir( $path ) ) {
$this->addDirectoryContent( $path );
diff --git a/maintenance/cleanupCaps.php b/maintenance/cleanupCaps.php
index 2d945a52..6f8e180c 100644
--- a/maintenance/cleanupCaps.php
+++ b/maintenance/cleanupCaps.php
@@ -40,11 +40,15 @@ class CapsCleanup extends TableCleanup {
public function execute() {
global $wgCapitalLinks, $wgUser;
+
+ if ( $wgCapitalLinks ) {
+ $this->error( "\$wgCapitalLinks is on -- no need for caps links cleanup.", true );
+ }
+
+ $wgUser = User::newFromName( 'Conversion script' );
+
$this->namespace = intval( $this->getOption( 'namespace', 0 ) );
$this->dryrun = $this->hasOption( 'dry-run' );
- $wgUser->setName( 'Conversion script' );
- if ( $wgCapitalLinks )
- $this->error( "\$wgCapitalLinks is on -- no need for caps links cleanup.", true );
$this->runTable( array(
'table' => 'page',
@@ -88,9 +92,8 @@ class CapsCleanup extends TableCleanup {
return $this->processRow( $row );
}
}
- } else {
- $this->progress( 0 );
}
+ return $this->progress( 0 );
}
}
diff --git a/maintenance/cleanupImages.php b/maintenance/cleanupImages.php
index c8e90958..81d1c85b 100644
--- a/maintenance/cleanupImages.php
+++ b/maintenance/cleanupImages.php
@@ -73,8 +73,9 @@ class ImageCleanup extends TableCleanup {
if ( is_null( $title ) ) {
$this->output( "page $source ($cleaned) is illegal.\n" );
$safe = $this->buildSafeTitle( $cleaned );
- if ( $safe === false )
+ if ( $safe === false ) {
return $this->progress( 0 );
+ }
$this->pokeFile( $source, $safe );
return $this->progress( 1 );
}
@@ -86,9 +87,12 @@ class ImageCleanup extends TableCleanup {
return $this->progress( 1 );
}
- $this->progress( 0 );
+ return $this->progress( 0 );
}
+ /**
+ * @param $name string
+ */
private function killRow( $name ) {
if ( $this->dryrun ) {
$this->output( "DRY RUN: would delete bogus row '$name'\n" );
@@ -120,7 +124,8 @@ class ImageCleanup extends TableCleanup {
$path = $this->filePath( $orig );
if ( !file_exists( $path ) ) {
$this->output( "missing file: $path\n" );
- return $this->killRow( $orig );
+ $this->killRow( $orig );
+ return;
}
$db = wfGetDB( DB_MASTER );
@@ -135,7 +140,7 @@ class ImageCleanup extends TableCleanup {
$version = 0;
$final = $new;
$conflict = ( $this->imageExists( $final, $db ) ||
- ( $this->pageExists( $orig, $db ) && $this->pageExists( $final, $db ) ) );
+ ( $this->pageExists( $orig, $db ) && $this->pageExists( $final, $db ) ) );
while ( $conflict ) {
$this->output( "Rename conflicts with '$final'...\n" );
@@ -166,8 +171,8 @@ class ImageCleanup extends TableCleanup {
__METHOD__ );
$dir = dirname( $finalPath );
if ( !file_exists( $dir ) ) {
- if ( !wfMkdirParents( $dir ) ) {
- $this->log( "RENAME FAILED, COULD NOT CREATE $dir" );
+ if ( !wfMkdirParents( $dir, null, __METHOD__ ) ) {
+ $this->output( "RENAME FAILED, COULD NOT CREATE $dir" );
$db->rollback();
return;
}
diff --git a/maintenance/cleanupSpam.php b/maintenance/cleanupSpam.php
index 8561281d..ca1e302b 100644
--- a/maintenance/cleanupSpam.php
+++ b/maintenance/cleanupSpam.php
@@ -36,6 +36,9 @@ class CleanupSpam extends Maintenance {
$username = wfMsg( 'spambot_username' );
$wgUser = User::newFromName( $username );
+ if ( !$wgUser ) {
+ $this->error( "Invalid username", true );
+ }
// Create the user if necessary
if ( !$wgUser->getId() ) {
$wgUser->addToDatabase();
@@ -91,36 +94,29 @@ class CleanupSpam extends Maintenance {
$this->output( $title->getPrefixedDBkey() . " ..." );
$rev = Revision::newFromTitle( $title );
- $revId = $rev->getId();
- $currentRevId = $revId;
+ $currentRevId = $rev->getId();
- while ( $rev && LinkFilter::matchEntry( $rev->getText() , $domain ) ) {
- # Revision::getPrevious can't be used in this way before MW 1.6 (Revision.php 1.26)
- # $rev = $rev->getPrevious();
- $revId = $title->getPreviousRevisionID( $revId );
- if ( $revId ) {
- $rev = Revision::newFromTitle( $title, $revId );
- } else {
- $rev = false;
- }
+ while ( $rev && ( $rev->isDeleted( Revision::DELETED_TEXT ) || LinkFilter::matchEntry( $rev->getText() , $domain ) ) ) {
+ $rev = $rev->getPrevious();
}
- if ( $revId == $currentRevId ) {
+
+ if ( $rev && $rev->getId() == $currentRevId ) {
// The regex didn't match the current article text
// This happens e.g. when a link comes from a template rather than the page itself
$this->output( "False match\n" );
} else {
$dbw = wfGetDB( DB_MASTER );
$dbw->begin();
+ $page = WikiPage::factory( $title );
if ( !$rev ) {
// Didn't find a non-spammy revision, blank the page
$this->output( "blanking\n" );
- $article = new Article( $title );
- $article->doEdit( '', wfMsg( 'spam_blanking', $domain ) );
+ $page->doEdit( '', wfMsgForContent( 'spam_blanking', $domain ) );
} else {
// Revert to this revision
$this->output( "reverting\n" );
- $article = new Article( $title );
- $article->doEdit( $rev->getText(), wfMsg( 'spam_reverting', $domain ), EDIT_UPDATE );
+ $page->doEdit( $rev->getText(), wfMsgForContent( 'spam_reverting', $domain ),
+ EDIT_UPDATE, $rev->getId() );
}
$dbw->commit();
}
diff --git a/maintenance/cleanupTable.inc b/maintenance/cleanupTable.inc
index f63c6d74..1c279762 100644
--- a/maintenance/cleanupTable.inc
+++ b/maintenance/cleanupTable.inc
@@ -43,7 +43,7 @@ class TableCleanup extends Maintenance {
public function execute() {
global $wgUser;
- $wgUser->setName( 'Conversion script' );
+ $wgUser = User::newFromName( 'Conversion script' );
$this->dryrun = $this->hasOption( 'dry-run' );
if ( $this->dryrun ) {
$this->output( "Checking for bad titles...\n" );
diff --git a/maintenance/cleanupTitles.php b/maintenance/cleanupTitles.php
index f03b7957..4fc6415e 100644
--- a/maintenance/cleanupTitles.php
+++ b/maintenance/cleanupTitles.php
@@ -145,8 +145,7 @@ class TitleCleanup extends TableCleanup {
),
array( 'page_id' => $row->page_id ),
__METHOD__ );
- $linkCache = LinkCache::singleton();
- $linkCache->clear();
+ LinkCache::singleton()->clear();
}
}
}
diff --git a/maintenance/cleanupUploadStash.php b/maintenance/cleanupUploadStash.php
index 1926c05a..5f57ffdf 100644
--- a/maintenance/cleanupUploadStash.php
+++ b/maintenance/cleanupUploadStash.php
@@ -32,44 +32,52 @@ class UploadStashCleanup extends Maintenance {
public function __construct() {
parent::__construct();
$this->mDescription = "Clean up abandoned files in temporary uploaded file stash";
- }
+ }
- public function execute() {
+ public function execute() {
$repo = RepoGroup::singleton()->getLocalRepo();
-
+
$dbr = $repo->getSlaveDb();
-
+
+ // how far back should this look for files to delete?
+ global $wgUploadStashMaxAge;
+
$this->output( "Getting list of files to clean up...\n" );
$res = $dbr->select(
'uploadstash',
'us_key',
- 'us_timestamp < ' . $dbr->addQuotes( $dbr->timestamp( time() - UploadStash::REPO_AGE * 3600 ) ),
+ 'us_timestamp < ' . $dbr->addQuotes( $dbr->timestamp( time() - $wgUploadStashMaxAge ) ),
__METHOD__
);
-
+
if( !is_object( $res ) || $res->numRows() == 0 ) {
+ $this->output( "No files to cleanup!\n" );
// nothing to do.
- return false;
+ return;
}
// finish the read before starting writes.
$keys = array();
- foreach($res as $row) {
+ foreach( $res as $row ) {
array_push( $keys, $row->us_key );
}
-
+
$this->output( 'Removing ' . count($keys) . " file(s)...\n" );
// this could be done some other, more direct/efficient way, but using
// UploadStash's own methods means it's less likely to fall accidentally
// out-of-date someday
$stash = new UploadStash( $repo );
-
+
foreach( $keys as $key ) {
- $stash->getFile( $key, true );
- $stash->removeFileNoAuth( $key );
+ try {
+ $stash->getFile( $key, true );
+ $stash->removeFileNoAuth( $key );
+ } catch ( UploadStashBadPathException $ex ) {
+ $this->output( "Failed removing stashed upload with key: $key\n" );
+ }
}
}
}
$maintClass = "UploadStashCleanup";
-require_once( RUN_MAINTENANCE_IF_MAIN ); \ No newline at end of file
+require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/clear_stats.php b/maintenance/clear_stats.php
index 8f91864e..61314e67 100644
--- a/maintenance/clear_stats.php
+++ b/maintenance/clear_stats.php
@@ -36,7 +36,6 @@ class clear_stats extends Maintenance {
$wgMemc->delete( "$db:stats:request_with_session" );
$wgMemc->delete( "$db:stats:request_without_session" );
$wgMemc->delete( "$db:stats:pcache_hit" );
- $wgMemc->delete( "$db:stats:pcache_miss_invalid" );
$wgMemc->delete( "$db:stats:pcache_miss_expired" );
$wgMemc->delete( "$db:stats:pcache_miss_absent" );
$wgMemc->delete( "$db:stats:pcache_miss_stub" );
@@ -46,6 +45,8 @@ class clear_stats extends Maintenance {
$wgMemc->delete( "$db:stats:diff_cache_hit" );
$wgMemc->delete( "$db:stats:diff_cache_miss" );
$wgMemc->delete( "$db:stats:diff_uncacheable" );
+ $wgMemc->delete( "$db:stats:job-insert" );
+ $wgMemc->delete( "$db:stats:job-pop" );
}
}
}
diff --git a/maintenance/commandLine.inc b/maintenance/commandLine.inc
index f57c0b67..c7adbfbc 100644
--- a/maintenance/commandLine.inc
+++ b/maintenance/commandLine.inc
@@ -21,7 +21,7 @@
* @ingroup Maintenance
*/
-require( dirname( __FILE__ ) . '/Maintenance.php' );
+require_once( dirname( __FILE__ ) . '/Maintenance.php' );
global $optionsWithArgs;
if ( !isset( $optionsWithArgs ) ) {
@@ -37,19 +37,13 @@ class CommandLineInc extends Maintenance {
}
}
- public function getDbType() {
- global $wgUseNormalUser;
-
- return ( isset( $wgUseNormalUser ) && $wgUseNormalUser ) ?
- Maintenance::DB_STD : Maintenance::DB_ADMIN;
- }
-
/**
* No help, it would just be misleading since it misses custom options
*/
protected function maybeHelp( $force = false ) {
- if ( !$force )
+ if ( !$force ) {
return;
+ }
parent::maybeHelp( true );
}
diff --git a/maintenance/convertUserOptions.php b/maintenance/convertUserOptions.php
index f46f710d..da6ff9b6 100644
--- a/maintenance/convertUserOptions.php
+++ b/maintenance/convertUserOptions.php
@@ -33,27 +33,38 @@ class ConvertUserOptions extends Maintenance {
}
public function execute() {
- $this->output( "Beginning batch conversion of user options.\n" );
+ $this->output( "...batch conversion of user_options: " );
$id = 0;
$dbw = wfGetDB( DB_MASTER );
+ if ( !$dbw->fieldExists( 'user', 'user_options', __METHOD__ ) ) {
+ $this->output( "nothing to migrate. " );
+ return;
+ }
while ( $id !== null ) {
- $idCond = 'user_id>' . $dbw->addQuotes( $id );
- $optCond = "user_options!=" . $dbw->addQuotes( '' ); // For compatibility
+ $idCond = 'user_id > ' . $dbw->addQuotes( $id );
+ $optCond = "user_options != " . $dbw->addQuotes( '' ); // For compatibility
$res = $dbw->select( 'user', '*',
- array( $optCond, $idCond ), __METHOD__,
- array( 'LIMIT' => 50, 'FOR UPDATE' ) );
+ array( $optCond, $idCond ), __METHOD__,
+ array( 'LIMIT' => 50, 'FOR UPDATE' )
+ );
$id = $this->convertOptionBatch( $res, $dbw );
$dbw->commit();
wfWaitForSlaves();
- if ( $id )
+ if ( $id ) {
$this->output( "--Converted to ID $id\n" );
+ }
}
- $this->output( "Conversion done. Converted " . $this->mConversionCount . " user records.\n" );
+ $this->output( "done. Converted " . $this->mConversionCount . " user records.\n" );
}
+ /**
+ * @param $res
+ * @param $dbw DatabaseBase
+ * @return null|int
+ */
function convertOptionBatch( $res, $dbw ) {
$id = null;
foreach ( $res as $row ) {
@@ -62,6 +73,14 @@ class ConvertUserOptions extends Maintenance {
$u = User::newFromRow( $row );
$u->saveSettings();
+
+ // Do this here as saveSettings() doesn't set user_options to '' anymore!
+ $dbw->update(
+ 'user',
+ array( 'user_options' => '' ),
+ array( 'user_id' => $row->user_id ),
+ __METHOD__
+ );
$id = $row->user_id;
}
diff --git a/maintenance/deleteBatch.php b/maintenance/deleteBatch.php
index eb2e1f34..56fe13a4 100644
--- a/maintenance/deleteBatch.php
+++ b/maintenance/deleteBatch.php
@@ -49,9 +49,16 @@ class DeleteBatch extends Maintenance {
chdir( $oldCwd );
# Options processing
- $user = $this->getOption( 'u', 'Delete page script' );
+ $username = $this->getOption( 'u', 'Delete page script' );
$reason = $this->getOption( 'r', '' );
$interval = $this->getOption( 'i', 0 );
+
+ $user = User::newFromName( $username );
+ if ( !$user ) {
+ $this->error( "Invalid username", true );
+ }
+ $wgUser = $user;
+
if ( $this->hasArg() ) {
$file = fopen( $this->getArg(), 'r' );
} else {
@@ -62,7 +69,7 @@ class DeleteBatch extends Maintenance {
if ( !$file ) {
$this->error( "Unable to read file, exiting", true );
}
- $wgUser = User::newFromName( $user );
+
$dbw = wfGetDB( DB_MASTER );
# Handle each entry
@@ -71,31 +78,27 @@ class DeleteBatch extends Maintenance {
if ( $line == '' ) {
continue;
}
- $page = Title::newFromText( $line );
- if ( is_null( $page ) ) {
+ $title = Title::newFromText( $line );
+ if ( is_null( $title ) ) {
$this->output( "Invalid title '$line' on line $linenum\n" );
continue;
}
- if ( !$page->exists() ) {
+ if ( !$title->exists() ) {
$this->output( "Skipping nonexistent page '$line'\n" );
continue;
}
-
- $this->output( $page->getPrefixedText() );
+ $this->output( $title->getPrefixedText() );
$dbw->begin();
- if ( $page->getNamespace() == NS_FILE ) {
- $art = new ImagePage( $page );
- $img = wfFindFile( $art->mTitle );
- if ( !$img
- || !$img->isLocal()
- || !$img->delete( $reason ) ) {
- $this->output( " FAILED to delete image file... " );
+ if ( $title->getNamespace() == NS_FILE ) {
+ $img = wfFindFile( $title );
+ if ( $img && $img->isLocal() && !$img->delete( $reason ) ) {
+ $this->output( " FAILED to delete associated file... " );
}
- } else {
- $art = new Article( $page );
}
- $success = $art->doDeleteArticle( $reason );
+ $page = WikiPage::factory( $title );
+ $error = '';
+ $success = $page->doDeleteArticle( $reason, false, 0, false, $error, $user );
$dbw->commit();
if ( $success ) {
$this->output( " Deleted!\n" );
@@ -107,7 +110,7 @@ class DeleteBatch extends Maintenance {
sleep( $interval );
}
wfWaitForSlaves();
-}
+ }
}
}
diff --git a/maintenance/deleteDefaultMessages.php b/maintenance/deleteDefaultMessages.php
index a33921b1..21d7755f 100644
--- a/maintenance/deleteDefaultMessages.php
+++ b/maintenance/deleteDefaultMessages.php
@@ -31,9 +31,7 @@ class DeleteDefaultMessages extends Maintenance {
}
public function execute() {
-
- $user = 'MediaWiki default';
- $reason = 'No longer required';
+ global $wgUser;
$this->output( "Checking existence of old default messages..." );
$dbr = wfGetDB( DB_SLAVE );
@@ -54,9 +52,12 @@ class DeleteDefaultMessages extends Maintenance {
# Deletions will be made by $user temporarly added to the bot group
# in order to hide it in RecentChanges.
- global $wgUser;
- $wgUser = User::newFromName( $user );
- $wgUser->addGroup( 'bot' );
+ $user = User::newFromName( 'MediaWiki default' );
+ if ( !$user ) {
+ $this->error( "Invalid username", true );
+ }
+ $user->addGroup( 'bot' );
+ $wgUser = $user;
# Handle deletion
$this->output( "\n...deleting old default messages (this may take a long time!)...", 'msg' );
@@ -66,9 +67,10 @@ class DeleteDefaultMessages extends Maintenance {
wfWaitForSlaves();
$dbw->ping();
$title = Title::makeTitle( $row->page_namespace, $row->page_title );
- $article = new Article( $title );
+ $page = WikiPage::factory( $title );
$dbw->begin();
- $article->doDeleteArticle( $reason );
+ $error = ''; // Passed by ref
+ $page->doDeleteArticle( 'No longer required', false, 0, false, $error, $user );
$dbw->commit();
}
diff --git a/maintenance/dev/README b/maintenance/dev/README
new file mode 100644
index 00000000..ca47d136
--- /dev/null
+++ b/maintenance/dev/README
@@ -0,0 +1,7 @@
+maintenance/dev/ scripts can help quickly setup a local MediaWiki for development purposes.
+
+Wikis setup in this way are NOT meant to be publicly available. They use a development database not acceptible for use in production. Place a sqlite database in an unsafe location a real wiki should never place it in. And use predictable default logins for the initial administrator user.
+
+Running maintenance/dev/install.sh will download and install a local copy of php 5.4, install a sqlite powered instance of MW for development, and then start up a local webserver to view the wiki.
+
+After installation you can bring the webserver back up at any time you want with maintenance/dev/start.sh
diff --git a/maintenance/dev/includes/php.sh b/maintenance/dev/includes/php.sh
new file mode 100644
index 00000000..3021b93b
--- /dev/null
+++ b/maintenance/dev/includes/php.sh
@@ -0,0 +1,12 @@
+# Include-able script to determine the location of our php if any
+
+if [ -d "$DEV/php" -a -x "$DEV/php/bin/php" ]; then
+ # Quick local copy
+ PHP="$DEV/php/bin/php"
+elif [ -d "$HOME/.mediawiki/php" -a -x "$HOME/.mediawiki/php/bin/php" ]; then
+ # Previous home directory location to install php in
+ PHP="$HOME/.mediawiki/php/bin/php"
+elif [ -d "$HOME/.mwphp" -a -x "$HOME/.mwphp/bin/php" ]; then
+ # Previous home directory location to install php in
+ PHP="$HOME/.mwphp/bin/php"
+fi
diff --git a/maintenance/dev/includes/require-php.sh b/maintenance/dev/includes/require-php.sh
new file mode 100644
index 00000000..470e6eb8
--- /dev/null
+++ b/maintenance/dev/includes/require-php.sh
@@ -0,0 +1,8 @@
+# Include-able script to require that we have a known php binary we can execute
+
+. "$DEV/includes/php.sh"
+
+if [ "x$PHP" == "x" -o ! -x "$PHP" ]; then
+ echo "Local copy of PHP is not installed"
+ exit 1
+fi
diff --git a/maintenance/dev/includes/router.php b/maintenance/dev/includes/router.php
new file mode 100644
index 00000000..f6a062b6
--- /dev/null
+++ b/maintenance/dev/includes/router.php
@@ -0,0 +1,82 @@
+<?php
+
+# Router for the php cli-server built-in webserver
+# http://ca2.php.net/manual/en/features.commandline.webserver.php
+
+if ( php_sapi_name() != 'cli-server' ) {
+ die( "This script can only be run by php's cli-server sapi." );
+}
+
+ini_set('display_errors', 1);
+error_reporting(E_ALL);
+
+if ( isset( $_SERVER["SCRIPT_FILENAME"] ) ) {
+ # Known resource, sometimes a script sometimes a file
+ $file = $_SERVER["SCRIPT_FILENAME"];
+} elseif ( isset( $_SERVER["SCRIPT_NAME"] ) ) {
+ # Usually unknown, document root relative rather than absolute
+ # Happens with some cases like /wiki/File:Image.png
+ if ( is_readable( $_SERVER['DOCUMENT_ROOT'] . $_SERVER["SCRIPT_NAME"] ) ) {
+ # Just in case this actually IS a file, set it here
+ $file = $_SERVER['DOCUMENT_ROOT'] . $_SERVER["SCRIPT_NAME"];
+ } else {
+ # Otherwise let's pretend that this is supposed to go to index.php
+ $file = $_SERVER['DOCUMENT_ROOT'] . '/index.php';
+ }
+} else {
+ # Meh, we'll just give up
+ return false;
+}
+
+# And now do handling for that $file
+
+if ( !is_readable( $file ) ) {
+ # Let the server throw the error if it doesn't exist
+ return false;
+}
+$ext = pathinfo( $file, PATHINFO_EXTENSION );
+if ( $ext == 'php' || $ext == 'php5' ) {
+ # Execute php files
+ # We use require and return true here because when you return false
+ # the php webserver will discard post data and things like login
+ # will not function in the dev environment.
+ require( $file );
+ return true;
+}
+$mime = false;
+$lines = explode( "\n", file_get_contents( "includes/mime.types" ) );
+foreach ( $lines as $line ) {
+ $exts = explode( " ", $line );
+ $mime = array_shift( $exts );
+ if ( in_array( $ext, $exts ) ) {
+ break; # this is the right value for $mime
+ }
+ $mime = false;
+}
+if ( !$mime ) {
+ $basename = basename( $file );
+ if ( $basename == strtoupper( $basename ) ) {
+ # IF it's something like README serve it as text
+ $mime = "text/plain";
+ }
+}
+if ( $mime ) {
+ # Use custom handling to serve files with a known mime type
+ # This way we can serve things like .svg files that the built-in
+ # PHP webserver doesn't understand.
+ # ;) Nicely enough we just happen to bundle a mime.types file
+ $f = fopen($file, 'rb');
+ if ( preg_match( '^text/', $mime ) ) {
+ # Text should have a charset=UTF-8 (php's webserver does this too)
+ header("Content-Type: $mime; charset=UTF-8");
+ } else {
+ header("Content-Type: $mime");
+ }
+ header("Content-Length: " . filesize($file));
+ // Stream that out to the browser
+ fpassthru($f);
+ return true;
+}
+
+# Let the php server handle things on it's own otherwise
+return false;
diff --git a/maintenance/dev/install.sh b/maintenance/dev/install.sh
new file mode 100644
index 00000000..2219894d
--- /dev/null
+++ b/maintenance/dev/install.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+
+if [ "x$BASH_SOURCE" == "x" ]; then echo '$BASH_SOURCE not set'; exit 1; fi
+DEV=$(cd -P "$(dirname "${BASH_SOURCE[0]}" )" && pwd)
+
+"$DEV/installphp.sh"
+"$DEV/installmw.sh"
+"$DEV/start.sh"
diff --git a/maintenance/dev/installmw.sh b/maintenance/dev/installmw.sh
new file mode 100644
index 00000000..9ae3c593
--- /dev/null
+++ b/maintenance/dev/installmw.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+
+if [ "x$BASH_SOURCE" == "x" ]; then echo '$BASH_SOURCE not set'; exit 1; fi
+DEV=$(cd -P "$(dirname "${BASH_SOURCE[0]}" )" && pwd)
+
+. "$DEV/includes/require-php.sh"
+
+set -e
+
+PORT=4881
+
+cd "$DEV/../../"; # $IP
+
+mkdir -p "$DEV/data"
+"$PHP" maintenance/install.php --server="http://localhost:$PORT" --scriptpath="" --dbtype=sqlite --dbpath="$DEV/data" --pass=admin "Trunk Test" "$USER"
+echo ""
+echo "Development wiki created with admin user $USER and password 'admin'."
+echo ""
diff --git a/maintenance/dev/installphp.sh b/maintenance/dev/installphp.sh
new file mode 100644
index 00000000..d26ffa67
--- /dev/null
+++ b/maintenance/dev/installphp.sh
@@ -0,0 +1,57 @@
+#!/bin/bash
+
+if [ "x$BASH_SOURCE" == "x" ]; then echo '$BASH_SOURCE not set'; exit 1; fi
+DEV=$(cd -P "$(dirname "${BASH_SOURCE[0]}" )" && pwd)
+
+set -e # DO NOT USE PIPES unless this is rewritten
+
+. "$DEV/includes/php.sh"
+
+if [ "x$PHP" != "x" -a -x "$PHP" ]; then
+ echo "PHP is already installed"
+ exit 0
+fi
+
+TAR=php5.4-latest.tar.gz
+PHPURL="http://snaps.php.net/$TAR"
+
+cd "$DEV"
+
+echo "Preparing to download and install a local copy of PHP 5.4, note that this can take some time to do."
+echo "If you wish to avoid re-doing this for uture dev installations of MediaWiki we suggest installing php in ~/.mediawiki/php"
+echo -n "Install PHP in ~/.mediawiki/php [y/N]: "
+read INSTALLINHOME
+
+case "$INSTALLINHOME" in
+ [Yy] | [Yy][Ee][Ss] )
+ PREFIX="$HOME/.mediawiki/php"
+ ;;
+ *)
+ PREFIX="$DEV/php/"
+ ;;
+esac
+
+# Some debain-like systems bundle wget but not curl, some other systems
+# like os x bundle curl but not wget... use whatever is available
+echo -n "Downloading PHP 5.4"
+if command -v wget &>/dev/null; then
+ echo "- using wget"
+ wget "$PHPURL"
+elif command -v curl &>/dev/null; then
+ echo "- using curl"
+ curl -O "$PHPURL"
+else
+ echo "- aborting"
+ echo "Could not find curl or wget." >&2;
+ exit 1;
+fi
+
+echo "Extracting php 5.4"
+tar -xzf "$TAR"
+
+cd php5.4-*/
+
+echo "Configuring and installing php 5.4 in $PREFIX"
+./configure --prefix="$PREFIX"
+make
+make install
diff --git a/maintenance/dev/start.sh b/maintenance/dev/start.sh
new file mode 100644
index 00000000..dd7363a8
--- /dev/null
+++ b/maintenance/dev/start.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+if [ "x$BASH_SOURCE" == "x" ]; then echo '$BASH_SOURCE not set'; exit 1; fi
+DEV=$(cd -P "$(dirname "${BASH_SOURCE[0]}" )" && pwd)
+
+. "$DEV/includes/require-php.sh"
+
+PORT=4881
+
+echo "Starting up MediaWiki at http://localhost:$PORT/"
+echo ""
+
+cd "$DEV/../../"; # $IP
+"$PHP" -S "localhost:$PORT" "$DEV/includes/router.php"
diff --git a/maintenance/doMaintenance.php b/maintenance/doMaintenance.php
index 44e00032..6b29c5fd 100644
--- a/maintenance/doMaintenance.php
+++ b/maintenance/doMaintenance.php
@@ -74,18 +74,18 @@ require_once( MWInit::compiledPath( 'includes/DefaultSettings.php' ) );
if ( defined( 'MW_CONFIG_CALLBACK' ) ) {
# Use a callback function to configure MediaWiki
MWFunction::call( MW_CONFIG_CALLBACK );
-} elseif ( file_exists( "$IP/../wmf-config/wikimedia-mode" ) ) {
- // Load settings, using wikimedia-mode if needed
- // @todo FIXME: Replace this hack with general farm-friendly code
- # @todo FIXME: Wikimedia-specific stuff needs to go away to an ext
- # Maybe a hook?
- global $cluster;
- $cluster = 'pmtpa';
- require( MWInit::interpretedPath( '../wmf-config/wgConf.php' ) );
- $maintenance->loadWikimediaSettings();
- require( MWInit::interpretedPath( '../wmf-config/CommonSettings.php' ) );
} else {
- require_once( $maintenance->loadSettings() );
+ if ( file_exists( "$IP/../wmf-config/wikimedia-mode" ) ) {
+ // Load settings, using wikimedia-mode if needed
+ // @todo FIXME: Replace this hack with general farm-friendly code
+ # @todo FIXME: Wikimedia-specific stuff needs to go away to an ext
+ # Maybe a hook?
+ global $cluster;
+ $cluster = 'pmtpa';
+ require( MWInit::interpretedPath( '../wmf-config/wgConf.php' ) );
+ }
+ // Require the configuration (probably LocalSettings.php)
+ require( $maintenance->loadSettings() );
}
if ( $maintenance->getDbType() === Maintenance::DB_ADMIN &&
diff --git a/maintenance/dtrace/counts.d b/maintenance/dtrace/counts.d
deleted file mode 100644
index bedb4547..00000000
--- a/maintenance/dtrace/counts.d
+++ /dev/null
@@ -1,23 +0,0 @@
-/*
- * This software is in the public domain.
- *
- * $Id$
- */
-
-#pragma D option quiet
-
-self int tottime;
-BEGIN {
- tottime = timestamp;
-}
-
-php$target:::function-entry
- @counts[copyinstr(arg0)] = count();
-}
-
-END {
- printf("Total time: %dus\n", (timestamp - tottime) / 1000);
- printf("# calls by function:\n");
- printa("%-40s %@d\n", @counts);
-}
-
diff --git a/maintenance/dtrace/tree.d b/maintenance/dtrace/tree.d
deleted file mode 100644
index a799cb12..00000000
--- a/maintenance/dtrace/tree.d
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * This software is in the public domain.
- *
- * $Id$
- */
-
-#pragma D option quiet
-
-self int indent;
-self int times[int];
-
-php$target:::function-entry
-{
- @counts[copyinstr(arg0)] = count();
- printf("%*s", self->indent, "");
- printf("-> %s\n", copyinstr(arg0));
- self->times[self->indent] = timestamp;
- self->indent += 2;
-}
-
-php$target:::function-return
-{
- self->indent -= 2;
- printf("%*s", self->indent, "");
- printf("<- %s %dus\n", copyinstr(arg0), (timestamp - self->times[self->indent]) / 1000);
-}
diff --git a/maintenance/dumpBackup.php b/maintenance/dumpBackup.php
index 15189261..c49a2963 100644
--- a/maintenance/dumpBackup.php
+++ b/maintenance/dumpBackup.php
@@ -119,7 +119,7 @@ Options:
Fancy stuff: (Works? Add examples please.)
--plugin=<class>[:<file>] Load a dump plugin class
--output=<type>:<file> Begin a filtered output stream;
- <type>s: file, gzip, bzip2, 7zip
+ <type>s: file, gzip, bzip2, 7zip
--filter=<type>[:<options>] Add a filter on an output branch
ENDS
diff --git a/maintenance/dumpHTML.php b/maintenance/dumpHTML.php
deleted file mode 100644
index bd94958e..00000000
--- a/maintenance/dumpHTML.php
+++ /dev/null
@@ -1,7 +0,0 @@
-dumpHTML has moved to the DumpHTML extension.
-
-WebDAV/SVN:
-http://svn.wikimedia.org/svnroot/mediawiki/trunk/extensions/DumpHTML/
-
-Web:
-http://svn.wikimedia.org/viewvc/mediawiki/trunk/extensions/DumpHTML/
diff --git a/maintenance/dumpInterwiki.php b/maintenance/dumpInterwiki.php
deleted file mode 100644
index 217afd88..00000000
--- a/maintenance/dumpInterwiki.php
+++ /dev/null
@@ -1,251 +0,0 @@
-<?php
-/**
- * Build constant slightly compact database of interwiki prefixes
- * Wikimedia specific!
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License along
- * with this program; if not, write to the Free Software Foundation, Inc.,
- * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * http://www.gnu.org/copyleft/gpl.html
- *
- * @file
- * @todo document
- * @ingroup Maintenance
- * @ingroup Wikimedia
- */
-
-require_once( dirname( __FILE__ ) . '/Site.php' );
-
-require_once( dirname( __FILE__ ) . '/Maintenance.php' );
-
-class DumpInterwiki extends Maintenance {
-
- public function __construct() {
- parent::__construct();
- $this->mDescription = "Build constant slightly compact database of interwiki prefixes.";
- $this->addOption( 'langlist', 'File with one language code per line', false, true );
- $this->addOption( 'dblist', 'File with one db per line', false, true );
- $this->addOption( 'specialdbs', "File with one 'special' db per line", false, true );
- $this->addOption( 'o', 'Cdb output file', false, true );
- $this->addOption( 'protocolrelative', 'Output wikimedia interwiki urls as protocol relative', false, false );
- }
-
- function execute() {
- # List of language prefixes likely to be found in multi-language sites
- $this->langlist = array_map( "trim", file( $this->getOption( 'langlist', "/home/wikipedia/common/langlist" ) ) );
-
- # List of all database names
- $this->dblist = array_map( "trim", file( $this->getOption( 'dblist', "/home/wikipedia/common/all.dblist" ) ) );
-
- # Special-case databases
- $this->specials = array_flip( array_map( "trim", file( $this->getOption( 'specialdbs', "/home/wikipedia/common/special.dblist" ) ) ) );
-
- if ( $this->hasOption( 'o' ) ) {
- $this->dbFile = CdbWriter::open( $this->getOption( 'o' ) ) ;
- } else {
- $this->dbFile = false;
- }
-
- if ( $this->hasOption( 'protocolrelative' ) ) {
- $this->urlprotocol = '';
- } else {
- $this->urlprotocol = 'http:';
- }
-
- $this->getRebuildInterwikiDump();
- }
-
- function getRebuildInterwikiDump() {
- global $wgContLang;
-
- # Multi-language sites
- # db suffix => db suffix, iw prefix, hostname
- $sites = array(
- 'wiki' => new Site( 'wiki', 'w', 'wikipedia.org' ),
- 'wiktionary' => new Site( 'wiktionary', 'wikt', 'wiktionary.org' ),
- 'wikiquote' => new Site( 'wikiquote', 'q', 'wikiquote.org' ),
- 'wikibooks' => new Site( 'wikibooks', 'b', 'wikibooks.org' ),
- 'wikinews' => new Site( 'wikinews', 'n', 'wikinews.org' ),
- 'wikisource' => new Site( 'wikisource', 's', 'wikisource.org' ),
- 'wikimedia' => new Site( 'wikimedia', 'chapter', 'wikimedia.org' ),
- 'wikiversity' => new Site( 'wikiversity', 'v', 'wikiversity.org' ),
- );
-
- # Extra interwiki links that can't be in the intermap for some reason
- $extraLinks = array(
- array( 'm', $this->urlprotocol . '//meta.wikimedia.org/wiki/$1', 1 ),
- array( 'meta', $this->urlprotocol . '//meta.wikimedia.org/wiki/$1', 1 ),
- array( 'sep11', $this->urlprotocol . '//sep11.wikipedia.org/wiki/$1', 1 ),
- );
-
- # Language aliases, usually configured as redirects to the real wiki in apache
- # Interlanguage links are made directly to the real wiki
- # Something horrible happens if you forget to list an alias here, I can't
- # remember what
- $this->languageAliases = array(
- 'zh-cn' => 'zh',
- 'zh-tw' => 'zh',
- 'dk' => 'da',
- 'nb' => 'no',
- );
-
- # Special case prefix rewrites, for the benefit of Swedish which uses s:t
- # as an abbreviation for saint
- $this->prefixRewrites = array(
- 'svwiki' => array( 's' => 'src' ),
- );
-
- # Construct a list of reserved prefixes
- $reserved = array();
- foreach ( $this->langlist as $lang ) {
- $reserved[$lang] = 1;
- }
- foreach ( $this->languageAliases as $alias => $lang ) {
- $reserved[$alias] = 1;
- }
- foreach ( $sites as $site ) {
- $reserved[$site->lateral] = 1;
- }
-
- # Extract the intermap from meta
- $intermap = Http::get( 'http://meta.wikimedia.org/w/index.php?title=Interwiki_map&action=raw', 30 );
- $lines = array_map( 'trim', explode( "\n", trim( $intermap ) ) );
-
- if ( !$lines || count( $lines ) < 2 ) {
- $this->error( "m:Interwiki_map not found", true );
- }
-
- # Global iterwiki map
- foreach ( $lines as $line ) {
- if ( preg_match( '/^\|\s*(.*?)\s*\|\|\s*(.*?)\s*$/', $line, $matches ) ) {
- $prefix = $wgContLang->lc( $matches[1] );
- $prefix = str_replace( ' ', '_', $prefix );
-
- $url = $matches[2];
- if ( preg_match( '/(wikipedia|wiktionary|wikisource|wikiquote|wikibooks|wikimedia)\.org/', $url ) ) {
- if ( $this->hasOption( 'protocolrelative' ) ) {
- if ( substr( $url, 0, 5 ) == 'http:' ) {
- $url = substr( $url, 5 );
- } else if ( substr( $url, 0, 6 ) == 'https:' ) {
- $url = substr( $url, 6 );
- }
- }
- $local = 1;
- } else {
- $local = 0;
- }
-
- if ( empty( $reserved[$prefix] ) ) {
- $imap = array( "iw_prefix" => $prefix, "iw_url" => $url, "iw_local" => $local );
- $this->makeLink ( $imap, "__global" );
- }
- }
- }
-
- # Exclude Wikipedia for Wikipedia
- $this->makeLink ( array ( 'iw_prefix' => 'wikipedia', 'is_url' => null ), "_wiki" );
-
- # Multilanguage sites
- foreach ( $sites as $site ) {
- $this->makeLanguageLinks ( $site, "_" . $site->suffix );
- }
-
- foreach ( $this->dblist as $db ) {
- if ( isset( $this->specials[$db] ) ) {
- # Special wiki
- # Has interwiki links and interlanguage links to wikipedia
-
- $this->makeLink( array( 'iw_prefix' => $db, 'iw_url' => "wiki" ), "__sites" );
- # Links to multilanguage sites
- foreach ( $sites as $targetSite ) {
- $this->makeLink( array( 'iw_prefix' => $targetSite->lateral,
- 'iw_url' => $targetSite->getURL( 'en', $this->urlprotocol ),
- 'iw_local' => 1 ), $db );
- }
- } else {
- # Find out which site this DB belongs to
- $site = false;
- foreach ( $sites as $candidateSite ) {
- $suffix = $candidateSite->suffix;
- if ( preg_match( "/(.*)$suffix$/", $db, $matches ) ) {
- $site = $candidateSite;
- break;
- }
- }
- $this->makeLink( array( 'iw_prefix' => $db, 'iw_url' => $site->suffix ), "__sites" );
- if ( !$site ) {
- $this->error( "Invalid database $db\n" );
- continue;
- }
- $lang = $matches[1];
-
- # Lateral links
- foreach ( $sites as $targetSite ) {
- if ( $targetSite->suffix != $site->suffix ) {
- $this->makeLink( array( 'iw_prefix' => $targetSite->lateral,
- 'iw_url' => $targetSite->getURL( $lang, $this->urlprotocol ),
- 'iw_local' => 1 ), $db );
- }
- }
-
- if ( $site->suffix == "wiki" ) {
- $this->makeLink( array( 'iw_prefix' => 'w',
- 'iw_url' => $this->urlprotocol . "//en.wikipedia.org/wiki/$1",
- 'iw_local' => 1 ), $db );
- }
-
- }
- }
- foreach ( $extraLinks as $link ) {
- $this->makeLink( $link, "__global" );
- }
- }
-
- # ------------------------------------------------------------------------------------------
-
- # Executes part of an INSERT statement, corresponding to all interlanguage links to a particular site
- function makeLanguageLinks( &$site, $source ) {
- # Actual languages with their own databases
- foreach ( $this->langlist as $targetLang ) {
- $this->makeLink( array( $targetLang, $site->getURL( $targetLang, $this->urlprotocol ), 1 ), $source );
- }
-
- # Language aliases
- foreach ( $this->languageAliases as $alias => $lang ) {
- $this->makeLink( array( $alias, $site->getURL( $lang, $this->urlprotocol ), 1 ), $source );
- }
- }
-
- function makeLink( $entry, $source ) {
- if ( isset( $this->prefixRewrites[$source] ) && isset( $this->prefixRewrites[$source][$entry[0]] ) )
- $entry[0] = $this->prefixRewrites[$source][$entry[0]];
-
- if ( !array_key_exists( "iw_prefix", $entry ) ) {
- $entry = array( "iw_prefix" => $entry[0], "iw_url" => $entry[1], "iw_local" => $entry[2] );
- }
- if ( array_key_exists( $source, $this->prefixRewrites ) &&
- array_key_exists( $entry['iw_prefix'], $this->prefixRewrites[$source] ) ) {
- $entry['iw_prefix'] = $this->prefixRewrites[$source][$entry['iw_prefix']];
- }
-
- if ( $this->dbFile ) {
- $this->dbFile->set( "{$source}:{$entry['iw_prefix']}", trim( "{$entry['iw_local']} {$entry['iw_url']}" ) );
- } else {
- $this->output( "{$source}:{$entry['iw_prefix']} {$entry['iw_url']} {$entry['iw_local']}\n" );
- }
- }
-}
-
-$maintClass = "DumpInterwiki";
-require_once( RUN_MAINTENANCE_IF_MAIN );
-
diff --git a/maintenance/dumpLinks.php b/maintenance/dumpLinks.php
index 39a9e955..0101dc8d 100644
--- a/maintenance/dumpLinks.php
+++ b/maintenance/dumpLinks.php
@@ -26,7 +26,7 @@
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
- * @ingroup Mainatenance
+ * @ingroup Maintenance
*/
require_once( dirname( __FILE__ ) . '/Maintenance.php' );
diff --git a/maintenance/dumpTextPass.php b/maintenance/dumpTextPass.php
index 4e85e64e..0fed29fc 100644
--- a/maintenance/dumpTextPass.php
+++ b/maintenance/dumpTextPass.php
@@ -2,7 +2,7 @@
/**
* Script that postprocesses XML dumps from dumpBackup.php to add page text
*
- * Copyright © 2005 Brion Vibber <brion@pobox.com>, 2010 Alexandre Emsenhuber
+ * Copyright (C) 2005 Brion Vibber <brion@pobox.com>
* http://www.mediawiki.org/
*
* This program is free software; you can redistribute it and/or modify
@@ -66,6 +66,11 @@ class TextPassDumper extends BackupDumper {
var $checkpointJustWritten = false;
var $checkpointFiles = array();
+ /**
+ * @var DatabaseBase
+ */
+ protected $db;
+
function initProgress( $history ) {
parent::initProgress();
$this->timeOfCheckpoint = $this->startTime;
@@ -169,7 +174,8 @@ class TextPassDumper extends BackupDumper {
*/
function showReport() {
if ( !$this->prefetch ) {
- return parent::showReport();
+ parent::showReport();
+ return;
}
if ( $this->reporting ) {
@@ -186,8 +192,7 @@ class TextPassDumper extends BackupDumper {
$etats = wfTimestamp( TS_DB, intval( $eta ) );
if ( $this->fetchCount ) {
$fetchRate = 100.0 * $this->prefetchCount / $this->fetchCount;
- }
- else {
+ } else {
$fetchRate = '-';
}
$pageRate = $this->pageCount / $deltaAll;
@@ -201,8 +206,7 @@ class TextPassDumper extends BackupDumper {
if ( $deltaPart ) {
if ( $this->fetchCountLast ) {
$fetchRatePart = 100.0 * $this->prefetchCountLast / $this->fetchCountLast;
- }
- else {
+ } else {
$fetchRatePart = '-';
}
$pageRatePart = $this->pageCountPart / $deltaPart;
@@ -228,9 +232,9 @@ class TextPassDumper extends BackupDumper {
function checkIfTimeExceeded() {
if ( $this->maxTimeAllowed && ( $this->lastTime - $this->timeOfCheckpoint > $this->maxTimeAllowed ) ) {
- return True;
+ return true;
}
- return False;
+ return false;
}
function finalOptionCheck() {
@@ -286,7 +290,7 @@ class TextPassDumper extends BackupDumper {
// we wrote some stuff after last checkpoint that needs renamed
if (file_exists($filenameList[0])) {
$newFilenames = array();
- # we might have just written the header and footer and had no
+ # we might have just written the header and footer and had no
# pages or revisions written... perhaps they were all deleted
# there's no pageID 0 so we use that. the caller is responsible
# for deciding what to do with a file containing only the
@@ -332,7 +336,6 @@ class TextPassDumper extends BackupDumper {
}
private function doGetText( $id ) {
-
$id = intval( $id );
$this->failures = 0;
$ex = new MWException( "Graceful storage failure" );
@@ -345,9 +348,9 @@ class TextPassDumper extends BackupDumper {
$this->closeSpawn();
$this->openSpawn();
}
- $text = $this->getTextSpawned( $id );
+ $text = $this->getTextSpawned( $id );
} else {
- $text = $this->getTextDbSafe( $id );
+ $text = $this->getTextDbSafe( $id );
}
if ( $text === false ) {
$this->failures++;
@@ -359,11 +362,10 @@ class TextPassDumper extends BackupDumper {
$this->failedTextRetrievals++;
if ($this->failedTextRetrievals > $this->maxConsecutiveFailedTextRetrievals) {
throw $ex;
- }
- else {
+ } else {
// would be nice to return something better to the caller someday,
// log what we know about the failure and about the revision
- return("");
+ return "";
}
} else {
$this->progress( "Error $this->failures " .
@@ -373,16 +375,18 @@ class TextPassDumper extends BackupDumper {
}
} else {
$this->failedTextRetrievals= 0;
- return( $text );
+ return $text;
}
}
-
+ return '';
}
/**
* Fetch a text revision from the database, retrying in case of failure.
* This may survive some transitory errors by reconnecting, but
* may not survive a long-term server outage.
+ *
+ * FIXME: WTF? Why is it using a loop and then returning unconditionally?
*/
private function getTextDbSafe( $id ) {
while ( true ) {
@@ -397,6 +401,8 @@ class TextPassDumper extends BackupDumper {
/**
* May throw a database error if, say, the server dies during query.
+ * @param $id
+ * @return bool|string
*/
private function getTextDb( $id ) {
global $wgContLang;
@@ -584,15 +590,15 @@ class TextPassDumper extends BackupDumper {
$this->egress->writeClosePage( $this->buffer );
// nasty hack, we can't just write the chardata after the
// page tag, it will include leading blanks from the next line
- $this->egress->sink->write("\n");
-
+ $this->egress->sink->write("\n");
+
$this->buffer = $this->xmlwriterobj->closeStream();
$this->egress->writeCloseStream( $this->buffer );
$this->buffer = "";
$this->thisPage = "";
// this could be more than one file if we had more than one output arg
- $checkpointFilenames = array();
+
$filenameList = (array)$this->egress->getFilenames();
$newFilenames = array();
$firstPageID = str_pad($this->firstPageWritten,9,"0",STR_PAD_LEFT);
@@ -669,10 +675,10 @@ Options:
pressure on the database.
(Requires the XMLReader extension)
--maxtime=<minutes> Write out checkpoint file after this many minutes (writing
- out complete page, closing xml file properly, and opening new one
+ out complete page, closing xml file properly, and opening new one
with header). This option requires the checkpointfile option.
--checkpointfile=<filenamepattern> Use this string for checkpoint filenames,
- substituting first pageid written for the first %s (required) and the
+ substituting first pageid written for the first %s (required) and the
last pageid written for the second %s if it exists.
--quiet Don't dump status reports to stderr.
--report=n Report position and speed after every n pages processed.
@@ -684,3 +690,5 @@ Options:
ENDS
);
}
+
+
diff --git a/maintenance/edit.php b/maintenance/edit.php
index fb462a40..88573714 100644
--- a/maintenance/edit.php
+++ b/maintenance/edit.php
@@ -58,14 +58,14 @@ class EditCLI extends Maintenance {
$this->error( "Invalid title", true );
}
- $article = new Article( $wgTitle );
+ $page = WikiPage::factory( $wgTitle );
# Read the text
$text = $this->getStdin( Maintenance::STDIN_ALL );
# Do the edit
$this->output( "Saving... " );
- $status = $article->doEdit( $text, $summary,
+ $status = $page->doEdit( $text, $summary,
( $minor ? EDIT_MINOR : 0 ) |
( $bot ? EDIT_FORCE_BOT : 0 ) |
( $autoSummary ? EDIT_AUTOSUMMARY : 0 ) |
diff --git a/maintenance/eval.php b/maintenance/eval.php
index 1502ad3e..3bd164fd 100644
--- a/maintenance/eval.php
+++ b/maintenance/eval.php
@@ -31,12 +31,10 @@
* @ingroup Maintenance
*/
-$wgUseNormalUser = (bool)getenv( 'MW_WIKIUSER' );
-
$optionsWithArgs = array( 'd' );
/** */
-require_once( "commandLine.inc" );
+require_once( dirname( __FILE__ ) . "/commandLine.inc" );
if ( isset( $options['d'] ) ) {
$d = $options['d'];
@@ -57,13 +55,8 @@ if ( isset( $options['d'] ) ) {
}
}
-if ( function_exists( 'readline_add_history' )
- && Maintenance::posix_isatty( 0 /*STDIN*/ ) )
-{
- $useReadline = true;
-} else {
- $useReadline = false;
-}
+$useReadline = function_exists( 'readline_add_history' )
+ && Maintenance::posix_isatty( 0 /*STDIN*/ );
if ( $useReadline ) {
$historyFile = isset( $_ENV['HOME'] ) ?
diff --git a/maintenance/fetchText.php b/maintenance/fetchText.php
index 067ffe45..3b43bcd5 100644
--- a/maintenance/fetchText.php
+++ b/maintenance/fetchText.php
@@ -29,7 +29,7 @@ class FetchText extends Maintenance {
$this->mDescription = "Fetch the revision text from an old_id";
}
- /*
+ /**
* returns a string containing the following in order:
* textid
* \n
diff --git a/maintenance/findHooks.php b/maintenance/findHooks.php
index 5996fd3a..cb582857 100644
--- a/maintenance/findHooks.php
+++ b/maintenance/findHooks.php
@@ -12,7 +12,7 @@
*
* Any instance of wfRunHooks that doesn't meet these parameters will be noted.
*
- * Copyright © Ashar Voultoiz
+ * Copyright © Antoine Musso
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
@@ -31,7 +31,7 @@
*
* @file
* @ingroup Maintenance
- * @author Ashar Voultoiz <hashar at free dot fr>
+ * @author Antoine Musso <hashar at free dot fr>
*/
require_once( dirname( __FILE__ ) . '/Maintenance.php' );
@@ -59,6 +59,7 @@ class FindHooks extends Maintenance {
$IP . '/includes/actions/',
$IP . '/includes/api/',
$IP . '/includes/cache/',
+ $IP . '/includes/context/',
$IP . '/includes/db/',
$IP . '/includes/diff/',
$IP . '/includes/filerepo/',
@@ -106,6 +107,29 @@ class FindHooks extends Maintenance {
*/
private function getHooksFromDoc( $doc ) {
if ( $this->hasOption( 'online' ) ) {
+ return $this->getHooksFromOnlineDoc( );
+ } else {
+ return $this->getHooksFromLocalDoc( $doc );
+ }
+ }
+
+ /**
+ * Get hooks from a local file (for example docs/hooks.txt)
+ * @param $doc string: filename to look in
+ * @return array of documented hooks
+ */
+ private function getHooksFromLocalDoc( $doc ) {
+ $m = array();
+ $content = file_get_contents( $doc );
+ preg_match_all( "/\n'(.*?)'/", $content, $m );
+ return array_unique( $m[1] );
+ }
+
+ /**
+ * Get hooks from www.mediawiki.org using the API
+ * @return array of documented hooks
+ */
+ private function getHooksFromOnlineDoc( ) {
// All hooks
$allhookdata = Http::get( 'http://www.mediawiki.org/w/api.php?action=query&list=categorymembers&cmtitle=Category:MediaWiki_hooks&cmlimit=500&format=php' );
$allhookdata = unserialize( $allhookdata );
@@ -129,12 +153,6 @@ class FindHooks extends Maintenance {
}
}
return array_diff( $allhooks, $removed );
- } else {
- $m = array();
- $content = file_get_contents( $doc );
- preg_match_all( "/\n'(.*?)'/", $content, $m );
- return array_unique( $m[1] );
- }
}
/**
diff --git a/maintenance/fixExtLinksProtocolRelative.php b/maintenance/fixExtLinksProtocolRelative.php
index 1a7025ad..0cabe816 100644
--- a/maintenance/fixExtLinksProtocolRelative.php
+++ b/maintenance/fixExtLinksProtocolRelative.php
@@ -37,7 +37,7 @@ class FixExtLinksProtocolRelative extends LoggedUpdateMaintenance {
protected function updateSkippedMessage() {
return 'protocol-relative URLs in externallinks table already fixed.';
}
-
+
protected function doDBUpdates() {
$db = wfGetDB( DB_MASTER );
if ( !$db->tableExists( 'externallinks' ) ) {
@@ -53,7 +53,7 @@ class FixExtLinksProtocolRelative extends LoggedUpdateMaintenance {
foreach ( $res as $row ) {
$count++;
if ( $count % 100 == 0 ) {
- $this->output( $count );
+ $this->output( $count . "\n" );
wfWaitForSlaves();
}
$db->insert( 'externallinks',
diff --git a/maintenance/formatInstallDoc.php b/maintenance/formatInstallDoc.php
index 9acc16a7..b3bb50ca 100644
--- a/maintenance/formatInstallDoc.php
+++ b/maintenance/formatInstallDoc.php
@@ -1,4 +1,7 @@
<?php
+/**
+ * @ingroup Maintenance
+ */
require_once( dirname( __FILE__ ) .'/Maintenance.php' );
diff --git a/maintenance/fuzz-tester.php b/maintenance/fuzz-tester.php
index 18af4de4..d87d6281 100644
--- a/maintenance/fuzz-tester.php
+++ b/maintenance/fuzz-tester.php
@@ -108,7 +108,6 @@ Wiki configuration for testing:
$wgGroupPermissions['*']['reupload-shared'] = true;
$wgGroupPermissions['*']['rollback'] = true;
$wgGroupPermissions['*']['siteadmin'] = true;
- $wgGroupPermissions['*']['trackback'] = true;
$wgGroupPermissions['*']['unwatchedpages'] = true;
$wgGroupPermissions['*']['upload'] = true;
$wgGroupPermissions['*']['userrights'] = true;
@@ -121,7 +120,6 @@ Wiki configuration for testing:
error_reporting (E_ALL); // At a later date could be increased to E_ALL | E_STRICT
$wgBlockOpenProxies = true; // Some block pages require this to be true in order to test.
$wgEnableUploads = true; // enable uploads.
- //$wgUseTrackbacks = true; // enable trackbacks; However this breaks the viewPageTest, so currently disabled.
$wgDBerrorLog = "/root/mediawiki-db-error-log.txt"; // log DB errors, replace with suitable path.
$wgShowSQLErrors = true; // Show SQL errors (instead of saying the query was hidden).
$wgShowExceptionDetails = true; // want backtraces.
@@ -139,11 +137,7 @@ Wiki configuration for testing:
// Install & enable Special Page extensions to increase code coverage. E.g.:
require_once("extensions/Cite/SpecialCite.php");
- require_once("extensions/Filepath/SpecialFilepath.php");
- require_once("extensions/Makebot/Makebot.php");
- require_once("extensions/Makesysop/SpecialMakesysop.php");
require_once("extensions/Renameuser/SpecialRenameuser.php");
- require_once("extensions/LinkSearch/LinkSearch.php");
// --------- End ---------
If you want to try E_STRICT error logging, add this to the above:
@@ -763,14 +757,18 @@ class wikiFuzz {
static private $maxparams = 10;
/**
- ** Returns random number between finish and start.
+ * Returns random number between finish and start.
+ * @param $finish
+ * @param $start int
+ * @return int
*/
static public function randnum( $finish, $start = 0 ) {
return mt_rand( $start, $finish );
}
/**
- ** Returns a mix of random text and random wiki syntax.
+ * Returns a mix of random text and random wiki syntax.
+ * @return string
*/
static private function randstring() {
$thestring = "";
@@ -800,18 +798,17 @@ class wikiFuzz {
}
/**
- ** Returns either random text, or random wiki syntax, or random data from "ints",
- ** or random data from "other".
+ * Returns either random text, or random wiki syntax, or random data from "ints",
+ * or random data from "other".
+ * @return string
*/
static private function makestring() {
$what = wikiFuzz::randnum( 2 );
if ( $what == 0 ) {
return wikiFuzz::randstring();
- }
- elseif ( $what == 1 ) {
+ } elseif ( $what == 1 ) {
return wikiFuzz::$ints[wikiFuzz::randnum( count( wikiFuzz::$ints ) - 1 )];
- }
- else {
+ } else {
return wikiFuzz::$other[wikiFuzz::randnum( count( wikiFuzz::$other ) - 1 )];
}
}
@@ -819,6 +816,8 @@ class wikiFuzz {
/**
* Returns the matched character slash-escaped as in a C string
* Helper for makeTitleSafe callback
+ * @param $matches
+ * @return atring
*/
static private function stringEscape( $matches ) {
return sprintf( "\\x%02x", ord( $matches[1] ) );
@@ -827,6 +826,8 @@ class wikiFuzz {
/**
** Strips out the stuff that Mediawiki balks at in a page's title.
** Implementation copied/pasted from cleanupTable.inc & cleanupImages.php
+ * @param $str string
+ * @return string
*/
static public function makeTitleSafe( $str ) {
$legalTitleChars = " %!\"$&'()*,\\-.\\/0-9:;=?@A-Z\\\\^_`a-z~\\x80-\\xFF";
@@ -837,6 +838,7 @@ class wikiFuzz {
/**
** Returns a string of fuzz text.
+ * @return string
*/
static private function loop() {
switch ( wikiFuzz::randnum( 3 ) ) {
@@ -864,7 +866,8 @@ class wikiFuzz {
}
/**
- ** Returns one of the three styles of random quote: ', ", and nothing.
+ * Returns one of the three styles of random quote: ', ", and nothing.
+ * @return string
*/
static private function getRandQuote() {
switch ( wikiFuzz::randnum( 3 ) ) {
@@ -876,6 +879,8 @@ class wikiFuzz {
/**
** Returns fuzz text, with the parameter indicating approximately how many lines of text you want.
+ * @param $maxtypes int
+ * @return string
*/
static public function makeFuzz( $maxtypes = 2 ) {
$page = "";
@@ -1812,32 +1817,6 @@ class thumbTest extends pageTest {
}
}
-
-/**
- ** a test for trackback.php
- */
-class trackbackTest extends pageTest {
- function __construct() {
- $this->pagePath = "trackback.php";
-
- $this->params = array (
- "url" => wikiFuzz::makeFuzz( 2 ),
- "blog_name" => wikiFuzz::chooseInput( array( "80", wikiFuzz::randnum( 6000, -200 ), wikiFuzz::makeFuzz( 2 ) ) ),
- "article" => wikiFuzz::chooseInput( array( "Main Page", wikiFuzz::makeFuzz( 2 ) ) ),
- "title" => wikiFuzz::chooseInput( array( "Main Page", wikiFuzz::makeFuzz( 2 ) ) ),
- "excerpt" => wikiFuzz::makeFuzz( 2 ),
- );
-
- // sometimes we don't want to specify certain parameters.
- if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["title"] );
- if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["excerpt"] );
-
- // page does not produce HTML.
- $this->tidyValidate = false;
- }
-}
-
-
/**
** a test for profileinfo.php
*/
@@ -1892,52 +1871,6 @@ class specialFilepathPageTest extends pageTest {
/**
- ** a test for Special:Makebot (extension Special page).
- */
-class specialMakebot extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Makebot";
-
- $this->params = array (
- "username" => wikiFuzz::chooseInput( array( "Nickj2", "192.168.0.2", wikiFuzz::makeFuzz( 1 ) ) ),
- "dosearch" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
- "grant" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
- "comment" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
- "token" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
- );
-
- // sometimes we don't want to specify certain parameters.
- if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["dosearch"] );
- if ( wikiFuzz::randnum( 2 ) == 0 ) unset( $this->params["grant"] );
- if ( wikiFuzz::randnum( 5 ) == 0 ) unset( $this->params["token"] );
- }
-}
-
-
-/**
- ** a test for Special:Makesysop (extension Special page).
- */
-class specialMakesysop extends pageTest {
- function __construct() {
- $this->pagePath = "index.php?title=Special:Makesysop";
-
- $this->params = array (
- "wpMakesysopUser" => wikiFuzz::chooseInput( array( "Nickj2", "192.168.0.2", wikiFuzz::makeFuzz( 1 ) ) ),
- "action" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
- "wpMakesysopSubmit" => wikiFuzz::chooseInput( array( "0", "1", "++--34234", wikiFuzz::makeFuzz( 2 ) ) ),
- "wpEditToken" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
- "wpSetBureaucrat" => wikiFuzz::chooseInput( array( "20398702394", "", wikiFuzz::makeFuzz( 2 ) ) ),
- );
-
- // sometimes we don't want to specify certain parameters.
- if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpMakesysopSubmit"] );
- if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpEditToken"] );
- if ( wikiFuzz::randnum( 3 ) == 0 ) unset( $this->params["wpSetBureaucrat"] );
- }
-}
-
-
-/**
** a test for Special:Renameuser (extension Special page).
*/
class specialRenameuserPageTest extends pageTest {
@@ -2224,9 +2157,10 @@ class GeSHi_Test extends pageTest {
}
}
-
/**
** selects a page test to run.
+ * @param $count
+ * @return \api|\confirmEmail|\contributionsTest|\editPageTest|\imagelistTest|\imagepageTest|\ipblocklistTest|\listusersTest|\mimeSearchTest|\newImagesTest|\pageDeletion|\pageHistoryTest|\pageProtectionForm|\prefixindexTest|\profileInfo|\recentchangesTest|\redirectTest|\searchTest|\specialAllmessagesTest|\specialAllpagesTest|\specialBlockip|\specialBlockmeTest|\specialBooksourcesTest|\specialCategoryTree|\specialChemicalsourcesTest|\specialCitePageTest|\specialExportTest|\specialFilepathPageTest|\specialImportPageTest|\specialLinksearch|\specialLockdbPageTest|\specialLogTest|\specialMovePage|\specialNewpagesPageTest|\specialRenameuserPageTest|\specialRevisionDeletePageTest|\specialUndeletePageTest|\specialUnlockdbPageTest|\specialUserrights|\successfulUserLoginTest|\thumbTest|\userLoginTest|\viewPageTest|\watchlistTest
*/
function selectPageTest( $count ) {
@@ -2275,17 +2209,14 @@ function selectPageTest( $count ) {
case 33: return new specialRevisionDeletePageTest();
case 34: return new specialImportPageTest();
case 35: return new thumbTest();
- case 36: return new trackbackTest();
case 37: return new profileInfo();
case 38: return new specialCitePageTest();
case 39: return new specialFilepathPageTest();
- case 40: return new specialMakebot();
- case 41: return new specialMakesysop();
- case 42: return new specialRenameuserPageTest();
- case 43: return new specialLinksearch();
- case 44: return new specialCategoryTree();
- case 45: return new api();
- case 45: return new specialChemicalsourcesTest();
+ case 40: return new specialRenameuserPageTest();
+ case 41: return new specialLinksearch();
+ case 42: return new specialCategoryTree();
+ case 43: return new api();
+ case 44: return new specialChemicalsourcesTest();
default: return new editPageTest();
}
}
@@ -2300,11 +2231,12 @@ function saveFile( $data, $name ) {
file_put_contents( $name, $data );
}
-
/**
** Returns a test as an experimental GET-to-POST URL.
** This doesn't seem to always work though, and sometimes the output is too long
** to be a valid GET URL, so we also save in other formats.
+ * @param $test pageTest
+ * @return string
*/
function getAsURL( pageTest $test ) {
$used_question_mark = ( strpos( $test->getPagePath(), "?" ) !== false );
@@ -2356,11 +2288,12 @@ function saveTestAsPHP( pageTest $test, $filename ) {
saveFile( $str, $filename );
}
-
/**
- ** Escapes a value so that it can be used on the command line by Curl.
- ** Specifically, "<" and "@" need to be escaped if they are the first character,
- ** otherwise curl interprets these as meaning that we want to insert a file.
+ * Escapes a value so that it can be used on the command line by Curl.
+ * Specifically, "<" and "@" need to be escaped if they are the first character,
+ * otherwise curl interprets these as meaning that we want to insert a file.
+ * @param $input_params array
+ * @return array
*/
function escapeForCurl( array $input_params ) {
$output_params = array();
@@ -2411,18 +2344,21 @@ function saveTest( pageTest $test, $testname ) {
saveTestData ( $test, $base_name . DATA_FILE );
}
-
// ////////////////// MEDIAWIKI OUTPUT /////////////////////////
/**
- ** Asks MediaWiki for the HTML output of a test.
+ * Asks MediaWiki for the HTML output of a test.
+ * @param $test pageTest
+ * @return string
*/
function wikiTestOutput( pageTest $test ) {
$ch = curl_init();
// specify the cookie, if required.
- if ( $test->getCookie() ) curl_setopt( $ch, CURLOPT_COOKIE, $test->getCookie() );
+ if ( $test->getCookie() ) {
+ curl_setopt( $ch, CURLOPT_COOKIE, $test->getCookie() );
+ }
curl_setopt( $ch, CURLOPT_POST, 1 ); // save form using a POST
$params = escapeForCurl( $test->getParams() );
@@ -2447,8 +2383,10 @@ function wikiTestOutput( pageTest $test ) {
// ////////////////// HTML VALIDATION /////////////////////////
-/*
- ** Asks the validator whether this is valid HTML, or not.
+/**
+ * Asks the validator whether this is valid HTML, or not.
+ * @param $text string
+ * @return array
*/
function validateHTML( $text ) {
@@ -2477,9 +2415,10 @@ function validateHTML( $text ) {
return array( $valid, $result );
}
-
/**
- ** Get tidy to check for no HTML errors in the output file (e.g. unescaped strings).
+ * Get tidy to check for no HTML errors in the output file (e.g. unescaped strings).
+ * @param $name
+ * @return bool
*/
function tidyCheckFile( $name ) {
$file = DIRECTORY . "/" . $name;
@@ -2498,10 +2437,10 @@ function tidyCheckFile( $name ) {
}
}
-
/**
** Returns whether or not an database error log file has changed in size since
** the last time this was run. This is used to tell if a test caused a DB error.
+ * @return bool
*/
function dbErrorLogged() {
static $filesize;
@@ -2529,8 +2468,12 @@ function dbErrorLogged() {
// //////////////// TOP-LEVEL PROBLEM-FINDING FUNCTION ////////////////////////
/**
- ** takes a page test, and runs it and tests it for problems in the output.
- ** Returns: False on finding a problem, or True on no problems being found.
+ * takes a page test, and runs it and tests it for problems in the output.
+ * Returns: False on finding a problem, or True on no problems being found.
+ * @param $test pageTest
+ * @param $testname
+ * @param $can_overwrite bool
+ * @return bool
*/
function runWikiTest( pageTest $test, &$testname, $can_overwrite = false ) {
diff --git a/maintenance/gearman/gearman.inc b/maintenance/gearman/gearman.inc
deleted file mode 100644
index 15f80e62..00000000
--- a/maintenance/gearman/gearman.inc
+++ /dev/null
@@ -1,104 +0,0 @@
-<?php
-
-require( 'Net/Gearman/Client.php' );
-require( 'Net/Gearman/Worker.php' );
-
-class MWGearmanJob extends Net_Gearman_Job_Common {
- function switchWiki( $wiki, $params ) {
- echo "Switching to $wiki\n";
-
- # Pretend that we have completed it right now, because the new process won't do it
- $this->complete( array( 'result' => true ) );
- socket_close( $this->conn );
-
- # Close some more sockets
- LBFactory::destroyInstance();
- global $wgMemc;
- $wgMemc->disconnect_all();
-
- # Find PHP
- $php = readlink( '/proc/' . posix_getpid() . '/exe' );
-
- # Run the worker script
- $args = array( $_SERVER['PHP_SELF'],
- '--wiki', $wiki,
- '--fake-job', serialize( $params ) );
- $args = array_merge( $args, $GLOBALS['args'] );
- pcntl_exec( $php, $args, $_ENV );
- echo "Error running exec\n";
- }
-
- function run( $params ) {
- if ( wfWikiID() !== $params['wiki'] ) {
- $this->switchWiki( $params['wiki'], $params );
- }
- return self::runNoSwitch( $params );
- }
-
- static function runNoSwitch( $params ) {
- echo implode( ' ', $params ) . "\n";
- $title = Title::newFromText( $params['title'] );
- $mwJob = Job::factory( $params['command'], $title, $params['params'] );
- return $mwJob->run();
- }
-}
-
-class NonScaryGearmanWorker extends Net_Gearman_Worker {
-
- /**
- * Copied from Net_Gearman_Worker but with the scary "run any PHP file in
- * the filesystem" feature removed.
- */
- protected function doWork($socket) {
- Net_Gearman_Connection::send($socket, 'grab_job');
-
- $resp = array('function' => 'noop');
- while (count($resp) && $resp['function'] == 'noop') {
- $resp = Net_Gearman_Connection::blockingRead($socket);
- }
-
- if (in_array($resp['function'], array('noop', 'no_job'))) {
- return false;
- }
-
- if ($resp['function'] != 'job_assign') {
- throw new Net_Gearman_Exception('Holy Cow! What are you doing?!');
- }
-
- $name = $resp['data']['func'];
- $handle = $resp['data']['handle'];
- $arg = array();
-
- if (isset($resp['data']['arg']) &&
- Net_Gearman_Connection::stringLength($resp['data']['arg'])) {
- $arg = json_decode($resp['data']['arg'], true);
- }
-
- ### START MW DIFFERENT BIT
- if ( $name != 'mw_job' ) {
- throw new Net_Gearman_Job_Exception('Invalid function');
- }
- $job = new MWGearmanJob($socket, $handle);
- ### END MW DIFFERENT BIT
-
- try {
- $this->start($handle, $name, $arg);
- $res = $job->run($arg);
- if (!is_array($res)) {
- $res = array('result' => $res);
- }
-
- $job->complete($res);
- $this->complete($handle, $name, $res);
- } catch (Net_Gearman_Job_Exception $e) {
- $job->fail();
- $this->fail($handle, $name, $e);
- }
-
- // Force the job's destructor to run
- $job = null;
-
- return true;
- }
-}
-
diff --git a/maintenance/gearman/gearmanRefreshLinks.php b/maintenance/gearman/gearmanRefreshLinks.php
deleted file mode 100644
index 730db96b..00000000
--- a/maintenance/gearman/gearmanRefreshLinks.php
+++ /dev/null
@@ -1,45 +0,0 @@
-<?php
-
-$optionsWithArgs = array( 'fake-job' );
-
-require( dirname( __FILE__ ) . '/../commandLine.inc' );
-require( dirname( __FILE__ ) . '/gearman.inc' );
-
-if ( !$args ) {
- $args = array( 'localhost' );
-}
-$client = new Net_Gearman_Client( $args );
-$batchSize = 1000;
-
-$dbr = wfGetDB( DB_SLAVE );
-$startId = 0;
-$endId = $dbr->selectField( 'page', 'MAX(page_id)', false, __METHOD__ );
-while ( true ) {
- $res = $dbr->select(
- 'page',
- array( 'page_namespace', 'page_title', 'page_id' ),
- array( 'page_id > ' . intval( $startId ) ),
- __METHOD__,
- array( 'LIMIT' => $batchSize )
- );
-
- if ( $res->numRows() == 0 ) {
- break;
- }
- $set = new Net_Gearman_Set;
- foreach ( $res as $row ) {
- $startId = $row->page_id;
- $title = Title::makeTitle( $row->page_namespace, $row->page_title );
- $params = array(
- 'wiki' => wfWikiID(),
- 'title' => $title->getPrefixedDBkey(),
- 'command' => 'refreshLinks',
- 'params' => false,
- );
- $task = new Net_Gearman_Task( 'mw_job', $params );
- $set->addTask( $task );
- }
- $client->runSet( $set );
- print "$startId / $endId\n";
-}
-
diff --git a/maintenance/gearman/gearmanWorker.php b/maintenance/gearman/gearmanWorker.php
deleted file mode 100644
index 3ea10081..00000000
--- a/maintenance/gearman/gearmanWorker.php
+++ /dev/null
@@ -1,43 +0,0 @@
-<?php
-
-$optionsWithArgs = array( 'fake-job', 'procs' );
-require( dirname( __FILE__ ) . '/../commandLine.inc' );
-require( dirname( __FILE__ ) . '/gearman.inc' );
-
-ini_set( 'memory_limit', '150M' );
-
-if ( isset( $options['procs'] ) ) {
- $procs = $options['procs'];
- if ( $procs < 1 || $procs > 1000 ) {
- echo "Invalid number of processes, please specify a number between 1 and 1000\n";
- exit( 1 );
- }
- $fc = new ForkController( $procs, ForkController::RESTART_ON_ERROR );
- if ( $fc->start() != 'child' ) {
- exit( 0 );
- }
-}
-
-if ( !$args ) {
- $args = array( 'localhost' );
-}
-
-if ( isset( $options['fake-job'] ) ) {
- $params = unserialize( $options['fake-job'] );
- MWGearmanJob::runNoSwitch( $params );
-}
-
-$worker = new NonScaryGearmanWorker( $args );
-$worker->addAbility( 'mw_job' );
-$worker->beginWork( 'wfGearmanMonitor' );
-
-function wfGearmanMonitor( $idle, $lastJob ) {
- static $lastSleep = 0;
- $interval = 5;
- $now = time();
- if ( $now - $lastSleep >= $interval ) {
- wfWaitForSlaves();
- $lastSleep = $now;
- }
- return false;
-}
diff --git a/maintenance/generateSitemap.php b/maintenance/generateSitemap.php
index 403e5a24..80d31f97 100644
--- a/maintenance/generateSitemap.php
+++ b/maintenance/generateSitemap.php
@@ -162,6 +162,8 @@ class GenerateSitemap extends Maintenance {
}
private function setNamespacePriorities() {
+ global $wgSitemapNamespacesPriorities;
+
// Custom main namespaces
$this->priorities[self::GS_MAIN] = '0.5';
// Custom talk namesspaces
@@ -183,10 +185,28 @@ class GenerateSitemap extends Maintenance {
$this->priorities[NS_HELP_TALK] = '0.1';
$this->priorities[NS_CATEGORY] = '0.5';
$this->priorities[NS_CATEGORY_TALK] = '0.1';
+
+ // Custom priorities
+ if ( $wgSitemapNamespacesPriorities !== false ) {
+ /**
+ * @var $wgSitemapNamespacesPriorities array
+ */
+ foreach ( $wgSitemapNamespacesPriorities as $namespace => $priority ) {
+ $float = floatval( $priority );
+ if ( $float > 1.0 ) {
+ $priority = '1.0';
+ } elseif ( $float < 0.0 ) {
+ $priority = '0.0';
+ }
+ $this->priorities[$namespace] = $priority;
+ }
+ }
}
/**
* Create directory if it does not exist and return pathname with a trailing slash
+ * @param $fspath string
+ * @return null|string
*/
private static function init_path( $fspath ) {
if ( !isset( $fspath ) ) {
@@ -194,7 +214,7 @@ class GenerateSitemap extends Maintenance {
}
# Create directory if needed
if ( $fspath && !is_dir( $fspath ) ) {
- wfMkdirParents( $fspath ) or die( "Can not create directory $fspath.\n" );
+ wfMkdirParents( $fspath, null, __METHOD__ ) or die( "Can not create directory $fspath.\n" );
}
return realpath( $fspath ) . DIRECTORY_SEPARATOR ;
@@ -327,13 +347,20 @@ class GenerateSitemap extends Maintenance {
* @return Resource
*/
function open( $file, $flags ) {
- return $this->compress ? gzopen( $file, $flags ) : fopen( $file, $flags );
+ $resource = $this->compress ? gzopen( $file, $flags ) : fopen( $file, $flags );
+ if( $resource === false ) {
+ wfDebugDieBacktrace( __METHOD__ . " error opening file $file with flags $flags. Check permissions?" );
+ }
+ return $resource;
}
/**
* gzwrite() / fwrite() wrapper
*/
function write( &$handle, $str ) {
+ if( $handle === true || $handle === false ) {
+ wfDebugDieBacktrace( __METHOD__ . " was passed a boolean as a file handle.\n" );
+ }
if ( $this->compress )
gzwrite( $handle, $str );
else
diff --git a/maintenance/getSlaveServer.php b/maintenance/getSlaveServer.php
index a9d93f1d..3d13bc4e 100644
--- a/maintenance/getSlaveServer.php
+++ b/maintenance/getSlaveServer.php
@@ -32,15 +32,13 @@ class GetSlaveServer extends Maintenance {
global $wgAllDBsAreLocalhost;
if ( $wgAllDBsAreLocalhost ) {
$host = 'localhost';
+ } elseif ( $this->hasOption( 'group' ) ) {
+ $db = wfGetDB( DB_SLAVE, $this->getOption( 'group' ) );
+ $host = $db->getServer();
} else {
- if ( $this->hasOption( 'group' ) ) {
- $db = wfGetDB( DB_SLAVE, $this->getOption( 'group' ) );
- $host = $db->getServer();
- } else {
- $lb = wfGetLB();
- $i = $lb->getReaderIndex();
- $host = $lb->getServerName( $i );
- }
+ $lb = wfGetLB();
+ $i = $lb->getReaderIndex();
+ $host = $lb->getServerName( $i );
}
$this->output( "$host\n" );
}
diff --git a/maintenance/hiphop/extra-files b/maintenance/hiphop/extra-files
index 15f48577..f07f7c7c 100644
--- a/maintenance/hiphop/extra-files
+++ b/maintenance/hiphop/extra-files
@@ -1,5 +1,4 @@
img_auth.php
-includes/AjaxFunctions.php
includes/AutoLoader.php
includes/DefaultSettings.php
includes/Defines.php
diff --git a/maintenance/hiphop/make b/maintenance/hiphop/make
index e792e08b..2bb9951f 100644
--- a/maintenance/hiphop/make
+++ b/maintenance/hiphop/make
@@ -1,9 +1,12 @@
#!/usr/bin/hphpi -f
<?php
+define( 'MW_CONFIG_CALLBACK', 'MakeHipHop::noConfigNeeded' );
require( dirname( __FILE__ ) . '/../Maintenance.php' );
class MakeHipHop extends Maintenance {
+ function noConfigNeeded() {}
+
function execute() {
global $wgHipHopBuildDirectory;
diff --git a/maintenance/ibm_db2/foreignkeys.sql b/maintenance/ibm_db2/foreignkeys.sql
index 81a88eb9..4f1450d9 100644
--- a/maintenance/ibm_db2/foreignkeys.sql
+++ b/maintenance/ibm_db2/foreignkeys.sql
@@ -93,11 +93,6 @@ ALTER TABLE oldimage ADD CONSTRAINT OLDIMAGE_NAME_FK FOREIGN KEY (oi_name) REFER
ALTER TABLE watchlist ADD CONSTRAINT WATCHLIST_USER_FK FOREIGN KEY (wl_user) REFERENCES user(user_id) ON DELETE CASCADE
;
--- good
--- already in MySQL schema
-ALTER TABLE trackbacks ADD CONSTRAINT TRACKBACKS_PAGE_FK FOREIGN KEY (tb_page) REFERENCES page(page_id) ON DELETE CASCADE
-;
-
-- cannot contain null values
-- ALTER TABLE protected_titles ADD CONSTRAINT PROTECTED_TITLES_USER_FK FOREIGN KEY (pt_user) REFERENCES user(user_id) ON DELETE SET NULL
--;
diff --git a/maintenance/ibm_db2/tables.sql b/maintenance/ibm_db2/tables.sql
index 261a3a2b..66fc6564 100644
--- a/maintenance/ibm_db2/tables.sql
+++ b/maintenance/ibm_db2/tables.sql
@@ -1,16 +1,20 @@
--- DB2
+-- IBM DB2
-- SQL to create the initial tables for the MediaWiki database.
-- This is read and executed by the install script; you should
-- not have to run it by itself unless doing a manual install.
--- This is the IBM DB2 version.
--- For information about each table, please see the notes in maintenance/tables.sql
+
+-- Notes:
+-- * DB2 will convert all table and column names to all caps internally.
+-- * DB2 has a 32k limit on SQL filesize, so it may be necessary
+-- to split this into two files soon.
CREATE TABLE user (
-- Needs to start with 0
- user_id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 0),
- user_name VARCHAR(255) NOT NULL UNIQUE,
+ user_id BIGINT
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 0),
+ user_name VARCHAR(255) NOT NULL UNIQUE,
user_real_name VARCHAR(255),
user_password VARCHAR(1024),
user_newpassword VARCHAR(1024),
@@ -21,225 +25,277 @@ CREATE TABLE user (
user_email_token_expires TIMESTAMP(3),
user_email_authenticated TIMESTAMP(3),
-- obsolete, replace by user_properties table
- user_options CLOB(64K) INLINE LENGTH 4096,
+ -- user_options CLOB(64K) INLINE LENGTH 4096,
user_touched TIMESTAMP(3),
user_registration TIMESTAMP(3),
user_editcount INTEGER
);
-CREATE INDEX user_email_token_idx ON user (user_email_token);
---leonsp:
+CREATE INDEX user_email_token_idx
+ ON user (user_email_token);
CREATE UNIQUE INDEX user_include_idx
- ON user(user_id)
- INCLUDE (user_name, user_real_name, user_password, user_newpassword, user_newpass_time, user_token,
- user_email, user_email_token, user_email_token_expires, user_email_authenticated,
- user_touched, user_registration, user_editcount);
+ ON user (user_id)
+ INCLUDE (user_name, user_real_name, user_password, user_newpassword,
+ user_newpass_time, user_token,
+ user_email, user_email_token, user_email_token_expires,
+ user_email_authenticated,
+ user_touched, user_registration, user_editcount);
+CREATE UNIQUE INDEX user_email
+ ON user (user_email);
+
+
-- Create a dummy user to satisfy fk contraints especially with revisions
INSERT INTO user(
-user_name, user_real_name, user_password, user_newpassword, user_newpass_time,
-user_email, user_email_authenticated, user_options, user_token, user_registration, user_editcount)
+ user_name, user_real_name, user_password, user_newpassword, user_newpass_time,
+ user_email, user_email_authenticated, user_token, user_registration, user_editcount
+)
VALUES (
-'Anonymous','', NULL, NULL, CURRENT_TIMESTAMP,
-NULL, NULL, NULL, NULL, CURRENT_timestamp, 0);
+ 'Anonymous', '', NULL, NULL, CURRENT_TIMESTAMP,
+ NULL, NULL, NULL, CURRENT_TIMESTAMP, 0
+);
+
CREATE TABLE user_groups (
ug_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE CASCADE,
- ug_group VARCHAR(255) NOT NULL
+ ug_group VARCHAR(255) NOT NULL
);
-CREATE INDEX user_groups_unique ON user_groups (ug_user, ug_group);
+CREATE INDEX user_groups_unique
+ ON user_groups (ug_user, ug_group);
+
CREATE TABLE user_newtalk (
-- registered users key
- user_id BIGINT NOT NULL DEFAULT 0,
+ user_id BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE CASCADE,
-- anonymous users key
user_ip VARCHAR(40),
user_last_timestamp TIMESTAMP(3)
);
-CREATE INDEX user_newtalk_id_idx ON user_newtalk (user_id);
-CREATE INDEX user_newtalk_ip_idx ON user_newtalk (user_ip);
---leonsp:
+CREATE INDEX user_newtalk_id_idx
+ ON user_newtalk (user_id);
+CREATE INDEX user_newtalk_ip_idx
+ ON user_newtalk (user_ip);
CREATE UNIQUE INDEX user_newtalk_include_idx
- ON user_newtalk(user_id, user_ip)
- INCLUDE (user_last_timestamp);
+ ON user_newtalk (user_id, user_ip)
+ INCLUDE (user_last_timestamp);
+
CREATE TABLE page (
- page_id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- page_namespace SMALLINT NOT NULL,
- page_title VARCHAR(255) NOT NULL,
+ page_id BIGINT
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ page_namespace SMALLINT NOT NULL,
+ page_title VARCHAR(255) NOT NULL,
page_restrictions VARCHAR(1024),
- page_counter BIGINT NOT NULL DEFAULT 0,
- page_is_redirect SMALLINT NOT NULL DEFAULT 0,
- page_is_new SMALLINT NOT NULL DEFAULT 0,
+ page_counter BIGINT NOT NULL DEFAULT 0,
+ page_is_redirect SMALLINT NOT NULL DEFAULT 0,
+ page_is_new SMALLINT NOT NULL DEFAULT 0,
page_random NUMERIC(15,14) NOT NULL,
page_touched TIMESTAMP(3),
- page_latest BIGINT NOT NULL, -- FK?
- page_len BIGINT NOT NULL
-);
-CREATE UNIQUE INDEX page_unique_name ON page (page_namespace, page_title);
-CREATE INDEX page_random_idx ON page (page_random);
-CREATE INDEX page_len_idx ON page (page_len);
---leonsp:
+ page_latest BIGINT NOT NULL, -- FK?
+ page_len BIGINT NOT NULL
+);
+CREATE UNIQUE INDEX page_unique_name
+ ON page (page_namespace, page_title);
+CREATE INDEX page_random_idx
+ ON page (page_random);
+CREATE INDEX page_len_idx
+ ON page (page_len);
CREATE UNIQUE INDEX page_id_include
- ON page (page_id)
- INCLUDE (page_namespace, page_title, page_restrictions, page_counter, page_is_redirect, page_is_new, page_random, page_touched, page_latest, page_len);
+ ON page (page_id)
+ INCLUDE (page_namespace, page_title, page_restrictions, page_counter, page_is_redirect, page_is_new, page_random, page_touched, page_latest, page_len);
CREATE UNIQUE INDEX page_name_include
- ON page (page_namespace, page_title)
- INCLUDE (page_id, page_restrictions, page_counter, page_is_redirect, page_is_new, page_random, page_touched, page_latest, page_len);
+ ON page (page_namespace, page_title)
+ INCLUDE (page_id, page_restrictions, page_counter, page_is_redirect, page_is_new, page_random, page_touched, page_latest, page_len);
+
CREATE TABLE revision (
- rev_id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- rev_page BIGINT NOT NULL DEFAULT 0,
+ rev_id BIGINT
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ rev_page BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page (page_id) ON DELETE CASCADE,
- rev_text_id BIGINT, -- FK
- rev_comment VARCHAR(1024),
- rev_user BIGINT NOT NULL DEFAULT 0,
+ rev_text_id BIGINT, -- FK
+ rev_comment VARCHAR(1024),
+ rev_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE RESTRICT,
- rev_user_text VARCHAR(255) NOT NULL,
- rev_timestamp TIMESTAMP(3) NOT NULL,
- rev_minor_edit SMALLINT NOT NULL DEFAULT 0,
- rev_deleted SMALLINT NOT NULL DEFAULT 0,
- rev_len BIGINT,
- rev_parent_id BIGINT DEFAULT NULL
-);
-CREATE UNIQUE INDEX revision_unique ON revision (rev_page, rev_id);
-CREATE INDEX rev_text_id_idx ON revision (rev_text_id);
-CREATE INDEX rev_timestamp_idx ON revision (rev_timestamp);
-CREATE INDEX rev_user_idx ON revision (rev_user);
-CREATE INDEX rev_user_text_idx ON revision (rev_user_text);
+ rev_user_text VARCHAR(255) NOT NULL,
+ rev_timestamp TIMESTAMP(3) NOT NULL,
+ rev_minor_edit SMALLINT NOT NULL DEFAULT 0,
+ rev_deleted SMALLINT NOT NULL DEFAULT 0,
+ rev_len BIGINT,
+ rev_parent_id BIGINT DEFAULT NULL,
+ rev_sha1 VARCHAR(255) NOT NULL DEFAULT ''
+);
+CREATE UNIQUE INDEX revision_unique
+ ON revision (rev_page, rev_id);
+CREATE INDEX rev_text_id_idx
+ ON revision (rev_text_id);
+CREATE INDEX rev_timestamp_idx
+ ON revision (rev_timestamp);
+CREATE INDEX rev_user_idx
+ ON revision (rev_user);
+CREATE INDEX rev_user_text_idx
+ ON revision (rev_user_text);
CREATE TABLE text ( -- replaces reserved word 'text'
- old_id INTEGER PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ old_id INTEGER
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
old_text CLOB(16M) INLINE LENGTH 4096,
old_flags VARCHAR(1024)
);
+
CREATE TABLE page_restrictions (
- --pr_id INTEGER NOT NULL UNIQUE, --DEFAULT nextval('pr_id_val'),
- --pr_id INTEGER PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- pr_id BIGINT PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- pr_page INTEGER NOT NULL DEFAULT 0,
+ pr_id BIGINT
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ pr_page INTEGER NOT NULL DEFAULT 0,
--(used to be nullable)
-- REFERENCES page (page_id) ON DELETE CASCADE,
- pr_type VARCHAR(60) NOT NULL,
- pr_level VARCHAR(60) NOT NULL,
- pr_cascade SMALLINT NOT NULL,
+ pr_type VARCHAR(60) NOT NULL,
+ pr_level VARCHAR(60) NOT NULL,
+ pr_cascade SMALLINT NOT NULL,
pr_user INTEGER,
pr_expiry TIMESTAMP(3)
--PRIMARY KEY (pr_page, pr_type)
);
---ALTER TABLE page_restrictions ADD CONSTRAINT page_restrictions_pk PRIMARY KEY (pr_page,pr_type);
-CREATE UNIQUE INDEX pr_pagetype ON page_restrictions (pr_page,pr_type);
-CREATE INDEX pr_typelevel ON page_restrictions (pr_type,pr_level);
-CREATE INDEX pr_level ON page_restrictions (pr_level);
-CREATE INDEX pr_cascade ON page_restrictions (pr_cascade);
+--ALTER TABLE page_restrictions ADD CONSTRAINT page_restrictions_pk PRIMARY KEY (pr_page, pr_type);
+CREATE UNIQUE INDEX pr_pagetype
+ ON page_restrictions (pr_page, pr_type);
+CREATE INDEX pr_typelevel
+ ON page_restrictions (pr_type, pr_level);
+CREATE INDEX pr_level
+ ON page_restrictions (pr_level);
+CREATE INDEX pr_cascade
+ ON page_restrictions (pr_cascade);
+
+
CREATE TABLE page_props (
- pp_page INTEGER NOT NULL DEFAULT 0,
+ pp_page INTEGER NOT NULL DEFAULT 0,
-- REFERENCES page (page_id) ON DELETE CASCADE,
- pp_propname VARCHAR(255) NOT NULL,
- pp_value CLOB(64K) INLINE LENGTH 4096 NOT NULL,
- PRIMARY KEY (pp_page,pp_propname)
+ pp_propname VARCHAR(255) NOT NULL,
+ pp_value CLOB(64K) INLINE LENGTH 4096 NOT NULL,
+ PRIMARY KEY (pp_page, pp_propname)
);
---ALTER TABLE page_props ADD CONSTRAINT page_props_pk PRIMARY KEY (pp_page,pp_propname);
-CREATE INDEX page_props_propname ON page_props (pp_propname);
+CREATE INDEX page_props_propname
+ ON page_props (pp_propname);
CREATE TABLE archive (
- ar_namespace SMALLINT NOT NULL,
- ar_title VARCHAR(255) NOT NULL,
+ ar_namespace SMALLINT NOT NULL,
+ ar_title VARCHAR(255) NOT NULL,
ar_text CLOB(16M) INLINE LENGTH 4096,
ar_comment VARCHAR(1024),
ar_user BIGINT NOT NULL,
-- no foreign keys in MySQL
-- REFERENCES user(user_id) ON DELETE SET NULL,
- ar_user_text VARCHAR(255) NOT NULL,
- ar_timestamp TIMESTAMP(3) NOT NULL,
- ar_minor_edit SMALLINT NOT NULL DEFAULT 0,
+ ar_user_text VARCHAR(255) NOT NULL,
+ ar_timestamp TIMESTAMP(3) NOT NULL,
+ ar_minor_edit SMALLINT NOT NULL DEFAULT 0,
ar_flags VARCHAR(1024),
ar_rev_id INTEGER,
ar_text_id INTEGER,
- ar_deleted SMALLINT NOT NULL DEFAULT 0,
+ ar_deleted SMALLINT NOT NULL DEFAULT 0,
ar_len INTEGER,
ar_page_id INTEGER,
- ar_parent_id INTEGER
+ ar_parent_id INTEGER,
+ ar_sha1 VARCHAR(255) NOT NULL DEFAULT ''
);
-CREATE INDEX archive_name_title_timestamp ON archive (ar_namespace,ar_title,ar_timestamp);
-CREATE INDEX archive_user_text ON archive (ar_user_text);
+CREATE INDEX archive_name_title_timestamp
+ ON archive (ar_namespace, ar_title, ar_timestamp);
+CREATE INDEX archive_user_text
+ ON archive (ar_user_text);
CREATE TABLE redirect (
- rd_from BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ rd_from BIGINT NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
--REFERENCES page(page_id) ON DELETE CASCADE,
- rd_namespace SMALLINT NOT NULL DEFAULT 0,
- rd_title VARCHAR(255) NOT NULL DEFAULT '',
- rd_interwiki varchar(32),
+ rd_namespace SMALLINT NOT NULL DEFAULT 0,
+ rd_title VARCHAR(255) NOT NULL DEFAULT '',
+ rd_interwiki VARCHAR(32),
rd_fragment VARCHAR(255)
);
-CREATE INDEX redirect_ns_title ON redirect (rd_namespace,rd_title,rd_from);
+CREATE INDEX redirect_ns_title
+ ON redirect (rd_namespace, rd_title, rd_from);
CREATE TABLE pagelinks (
- pl_from BIGINT NOT NULL DEFAULT 0,
+ pl_from BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page(page_id) ON DELETE CASCADE,
- pl_namespace SMALLINT NOT NULL,
- pl_title VARCHAR(255) NOT NULL
+ pl_namespace SMALLINT NOT NULL,
+ pl_title VARCHAR(255) NOT NULL
);
-CREATE UNIQUE INDEX pagelink_unique ON pagelinks (pl_from,pl_namespace,pl_title);
+CREATE UNIQUE INDEX pagelink_unique
+ ON pagelinks (pl_from, pl_namespace, pl_title);
+
+
CREATE TABLE templatelinks (
- tl_from BIGINT NOT NULL DEFAULT 0,
+ tl_from BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page(page_id) ON DELETE CASCADE,
tl_namespace SMALLINT NOT NULL,
- tl_title VARCHAR(255) NOT NULL
+ tl_title VARCHAR(255) NOT NULL
);
-CREATE UNIQUE INDEX templatelinks_unique ON templatelinks (tl_namespace,tl_title,tl_from);
-CREATE UNIQUE INDEX tl_from_idx ON templatelinks (tl_from,tl_namespace,tl_title);
+CREATE UNIQUE INDEX templatelinks_unique
+ ON templatelinks (tl_namespace, tl_title, tl_from);
+CREATE UNIQUE INDEX tl_from_idx
+ ON templatelinks (tl_from, tl_namespace, tl_title);
+
+
CREATE TABLE imagelinks (
- il_from BIGINT NOT NULL DEFAULT 0,
+ il_from BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page(page_id) ON DELETE CASCADE,
- il_to VARCHAR(255) NOT NULL
+ il_to VARCHAR(255) NOT NULL
);
-CREATE UNIQUE INDEX il_from_idx ON imagelinks (il_to,il_from);
-CREATE UNIQUE INDEX il_to_idx ON imagelinks (il_from,il_to);
+CREATE UNIQUE INDEX il_from_idx
+ ON imagelinks (il_to, il_from);
+CREATE UNIQUE INDEX il_to_idx
+ ON imagelinks (il_from, il_to);
+
+
CREATE TABLE categorylinks (
- cl_from BIGINT NOT NULL DEFAULT 0,
+ cl_from BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page(page_id) ON DELETE CASCADE,
- cl_to VARCHAR(255) NOT NULL,
+ cl_to VARCHAR(255) NOT NULL,
-- cl_sortkey has to be at least 86 wide
-- in order to be compatible with the old MySQL schema from MW 1.10
--cl_sortkey VARCHAR(86),
- cl_sortkey VARCHAR(230) FOR BIT DATA NOT NULL ,
- cl_sortkey_prefix VARCHAR(255) FOR BIT DATA NOT NULL ,
- cl_timestamp TIMESTAMP(3) NOT NULL,
- cl_collation VARCHAR(32) FOR BIT DATA NOT NULL ,
- cl_type VARCHAR(6) FOR BIT DATA NOT NULL
+ cl_sortkey VARCHAR(230) FOR BIT DATA NOT NULL,
+ cl_sortkey_prefix VARCHAR(255) FOR BIT DATA NOT NULL,
+ cl_timestamp TIMESTAMP(3) NOT NULL,
+ cl_collation VARCHAR(32) FOR BIT DATA NOT NULL,
+ cl_type VARCHAR(6) FOR BIT DATA NOT NULL
);
-CREATE UNIQUE INDEX cl_from ON categorylinks (cl_from, cl_to);
-CREATE INDEX cl_sortkey ON categorylinks (cl_to, cl_sortkey, cl_from);
+CREATE UNIQUE INDEX cl_from
+ ON categorylinks (cl_from, cl_to);
+CREATE INDEX cl_sortkey
+ ON categorylinks (cl_to, cl_sortkey, cl_from);
CREATE TABLE externallinks (
- el_from BIGINT NOT NULL DEFAULT 0,
+ el_from BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page(page_id) ON DELETE CASCADE,
- el_to VARCHAR(1024) NOT NULL,
- el_index VARCHAR(1024) NOT NULL
+ el_to VARCHAR(1024) NOT NULL,
+ el_index VARCHAR(1024) NOT NULL
);
-CREATE INDEX externallinks_from_to ON externallinks (el_from,el_to);
-CREATE INDEX externallinks_index ON externallinks (el_index);
+CREATE INDEX externallinks_from_to
+ ON externallinks (el_from, el_to);
+CREATE INDEX externallinks_index
+ ON externallinks (el_index);
+
--
@@ -247,494 +303,636 @@ CREATE INDEX externallinks_index ON externallinks (el_index);
--
CREATE TABLE external_user (
-- Foreign key to user_id
- eu_local_id BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ eu_local_id BIGINT NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
-- Some opaque identifier provided by the external database
- eu_external_id VARCHAR(255) NOT NULL
+ eu_external_id VARCHAR(255) NOT NULL
);
CREATE UNIQUE INDEX eu_external_id_idx
- ON external_user (eu_external_id)
- INCLUDE (eu_local_id);
+ ON external_user (eu_external_id)
+ INCLUDE (eu_local_id);
CREATE UNIQUE INDEX eu_local_id_idx
- ON external_user (eu_local_id)
- INCLUDE (eu_external_id);
+ ON external_user (eu_local_id)
+ INCLUDE (eu_external_id);
CREATE TABLE langlinks (
- ll_from BIGINT NOT NULL DEFAULT 0,
+ ll_from BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page (page_id) ON DELETE CASCADE,
ll_lang VARCHAR(20),
ll_title VARCHAR(255)
);
-CREATE UNIQUE INDEX langlinks_unique ON langlinks (ll_from,ll_lang);
-CREATE INDEX langlinks_lang_title ON langlinks (ll_lang,ll_title);
+CREATE UNIQUE INDEX langlinks_unique
+ ON langlinks (ll_from, ll_lang);
+CREATE INDEX langlinks_lang_title
+ ON langlinks (ll_lang, ll_title);
+
CREATE TABLE site_stats (
- ss_row_id BIGINT NOT NULL UNIQUE,
- ss_total_views BIGINT DEFAULT 0,
- ss_total_edits BIGINT DEFAULT 0,
- ss_good_articles BIGINT DEFAULT 0,
- ss_total_pages INTEGER DEFAULT -1,
- ss_users INTEGER DEFAULT -1,
- ss_active_users INTEGER DEFAULT -1,
- ss_admins INTEGER DEFAULT -1,
- ss_images INTEGER DEFAULT 0
+ ss_row_id BIGINT NOT NULL UNIQUE,
+ ss_total_views BIGINT DEFAULT 0,
+ ss_total_edits BIGINT DEFAULT 0,
+ ss_good_articles BIGINT DEFAULT 0,
+ ss_total_pages INTEGER DEFAULT -1,
+ ss_users INTEGER DEFAULT -1,
+ ss_active_users INTEGER DEFAULT -1,
+ ss_admins INTEGER DEFAULT -1,
+ ss_images INTEGER DEFAULT 0
);
+
+
CREATE TABLE hitcounter (
- hc_id BIGINT NOT NULL
+ hc_id BIGINT NOT NULL
);
+
+
CREATE TABLE ipblocks (
- ipb_id INTEGER NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- --DEFAULT nextval('ipblocks_ipb_id_val'),
+ ipb_id INTEGER NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
ipb_address VARCHAR(1024),
ipb_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
- ipb_by BIGINT NOT NULL DEFAULT 0,
+ ipb_by BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE CASCADE,
- ipb_by_text VARCHAR(255) NOT NULL DEFAULT '',
- ipb_reason VARCHAR(1024) NOT NULL,
- ipb_timestamp TIMESTAMP(3) NOT NULL,
- ipb_auto SMALLINT NOT NULL DEFAULT 0,
- ipb_anon_only SMALLINT NOT NULL DEFAULT 0,
- ipb_create_account SMALLINT NOT NULL DEFAULT 1,
- ipb_enable_autoblock SMALLINT NOT NULL DEFAULT 1,
- ipb_expiry TIMESTAMP(3) NOT NULL,
+ ipb_by_text VARCHAR(255) NOT NULL DEFAULT '',
+ ipb_reason VARCHAR(1024) NOT NULL,
+ ipb_timestamp TIMESTAMP(3) NOT NULL,
+ ipb_auto SMALLINT NOT NULL DEFAULT 0,
+ ipb_anon_only SMALLINT NOT NULL DEFAULT 0,
+ ipb_create_account SMALLINT NOT NULL DEFAULT 1,
+ ipb_enable_autoblock SMALLINT NOT NULL DEFAULT 1,
+ ipb_expiry TIMESTAMP(3) NOT NULL,
ipb_range_start VARCHAR(1024),
ipb_range_end VARCHAR(1024),
- ipb_deleted SMALLINT NOT NULL DEFAULT 0,
- ipb_block_email SMALLINT NOT NULL DEFAULT 0,
- ipb_allow_usertalk SMALLINT NOT NULL DEFAULT 0
+ ipb_deleted SMALLINT NOT NULL DEFAULT 0,
+ ipb_block_email SMALLINT NOT NULL DEFAULT 0,
+ ipb_allow_usertalk SMALLINT NOT NULL DEFAULT 0
);
-CREATE INDEX ipb_address ON ipblocks (ipb_address);
-CREATE INDEX ipb_user ON ipblocks (ipb_user);
-CREATE INDEX ipb_range ON ipblocks (ipb_range_start,ipb_range_end);
+CREATE INDEX ipb_address
+ ON ipblocks (ipb_address);
+CREATE INDEX ipb_user
+ ON ipblocks (ipb_user);
+CREATE INDEX ipb_range
+ ON ipblocks (ipb_range_start, ipb_range_end);
CREATE TABLE image (
- img_name VARCHAR(255) NOT NULL PRIMARY KEY,
- img_size BIGINT NOT NULL,
- img_width INTEGER NOT NULL,
- img_height INTEGER NOT NULL,
- img_metadata CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
+ img_name VARCHAR(255) NOT NULL
+ PRIMARY KEY,
+ img_size BIGINT NOT NULL,
+ img_width INTEGER NOT NULL,
+ img_height INTEGER NOT NULL,
+ img_metadata CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
img_bits SMALLINT,
img_media_type VARCHAR(255),
- img_major_mime VARCHAR(255) DEFAULT 'unknown',
- img_minor_mime VARCHAR(32) DEFAULT 'unknown',
- img_description VARCHAR(1024) NOT NULL DEFAULT '',
+ img_major_mime VARCHAR(255) DEFAULT 'unknown',
+ img_minor_mime VARCHAR(32) DEFAULT 'unknown',
+ img_description VARCHAR(1024) NOT NULL DEFAULT '',
img_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
- img_user_text VARCHAR(255) NOT NULL DEFAULT '',
+ img_user_text VARCHAR(255) NOT NULL DEFAULT '',
img_timestamp TIMESTAMP(3),
- img_sha1 VARCHAR(255) NOT NULL DEFAULT ''
+ img_sha1 VARCHAR(255) NOT NULL DEFAULT ''
);
-CREATE INDEX img_size_idx ON image (img_size);
-CREATE INDEX img_timestamp_idx ON image (img_timestamp);
-CREATE INDEX img_sha1 ON image (img_sha1);
+CREATE INDEX img_size_idx
+ ON image (img_size);
+CREATE INDEX img_timestamp_idx
+ ON image (img_timestamp);
+CREATE INDEX img_sha1
+ ON image (img_sha1);
+
CREATE TABLE oldimage (
- oi_name VARCHAR(255) NOT NULL DEFAULT '',
- oi_archive_name VARCHAR(255) NOT NULL,
- oi_size BIGINT NOT NULL,
- oi_width INTEGER NOT NULL,
- oi_height INTEGER NOT NULL,
- oi_bits SMALLINT NOT NULL,
+ oi_name VARCHAR(255) NOT NULL DEFAULT '',
+ oi_archive_name VARCHAR(255) NOT NULL,
+ oi_size BIGINT NOT NULL,
+ oi_width INTEGER NOT NULL,
+ oi_height INTEGER NOT NULL,
+ oi_bits SMALLINT NOT NULL,
oi_description VARCHAR(1024),
oi_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
- oi_user_text VARCHAR(255) NOT NULL,
- oi_timestamp TIMESTAMP(3) NOT NULL,
- oi_metadata CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
- oi_media_type VARCHAR(255) ,
- oi_major_mime VARCHAR(255) NOT NULL DEFAULT 'unknown',
- oi_minor_mime VARCHAR(255) NOT NULL DEFAULT 'unknown',
- oi_deleted SMALLINT NOT NULL DEFAULT 0,
- oi_sha1 VARCHAR(255) NOT NULL DEFAULT ''
+ oi_user_text VARCHAR(255) NOT NULL,
+ oi_timestamp TIMESTAMP(3) NOT NULL,
+ oi_metadata CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
+ oi_media_type VARCHAR(255),
+ oi_major_mime VARCHAR(255) NOT NULL DEFAULT 'unknown',
+ oi_minor_mime VARCHAR(255) NOT NULL DEFAULT 'unknown',
+ oi_deleted SMALLINT NOT NULL DEFAULT 0,
+ oi_sha1 VARCHAR(255) NOT NULL DEFAULT ''
--FOREIGN KEY (oi_name) REFERENCES image(img_name) ON DELETE CASCADE
);
-CREATE INDEX oi_name_timestamp ON oldimage (oi_name,oi_timestamp);
-CREATE INDEX oi_name_archive_name ON oldimage (oi_name,oi_archive_name);
-CREATE INDEX oi_sha1 ON oldimage (oi_sha1);
+CREATE INDEX oi_name_timestamp
+ ON oldimage (oi_name, oi_timestamp);
+CREATE INDEX oi_name_archive_name
+ ON oldimage (oi_name, oi_archive_name);
+CREATE INDEX oi_sha1
+ ON oldimage (oi_sha1);
CREATE TABLE filearchive (
- fa_id INTEGER NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- --PRIMARY KEY DEFAULT nextval('filearchive_fa_id_seq'),
- fa_name VARCHAR(255) NOT NULL,
+ fa_id INTEGER NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ fa_name VARCHAR(255) NOT NULL,
fa_archive_name VARCHAR(255),
fa_storage_group VARCHAR(255),
- fa_storage_key VARCHAR(64) DEFAULT '',
- fa_deleted_user BIGINT NOT NULL DEFAULT 0,
+ fa_storage_key VARCHAR(64) DEFAULT '',
+ fa_deleted_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
- fa_deleted_timestamp TIMESTAMP(3) NOT NULL,
+ fa_deleted_timestamp TIMESTAMP(3) NOT NULL,
fa_deleted_reason VARCHAR(255),
- fa_size BIGINT NOT NULL,
- fa_width INTEGER NOT NULL,
- fa_height INTEGER NOT NULL,
- fa_metadata CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
+ fa_size BIGINT NOT NULL,
+ fa_width INTEGER NOT NULL,
+ fa_height INTEGER NOT NULL,
+ fa_metadata CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
fa_bits SMALLINT,
fa_media_type VARCHAR(255),
- fa_major_mime VARCHAR(255) DEFAULT 'unknown',
- fa_minor_mime VARCHAR(255) DEFAULT 'unknown',
- fa_description VARCHAR(1024) NOT NULL,
+ fa_major_mime VARCHAR(255) DEFAULT 'unknown',
+ fa_minor_mime VARCHAR(255) DEFAULT 'unknown',
+ fa_description VARCHAR(1024) NOT NULL,
fa_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
- fa_user_text VARCHAR(255) NOT NULL,
+ fa_user_text VARCHAR(255) NOT NULL,
fa_timestamp TIMESTAMP(3),
- fa_deleted SMALLINT NOT NULL DEFAULT 0
+ fa_deleted SMALLINT NOT NULL DEFAULT 0
);
-CREATE INDEX fa_name_time ON filearchive (fa_name, fa_timestamp);
-CREATE INDEX fa_dupe ON filearchive (fa_storage_group, fa_storage_key);
-CREATE INDEX fa_notime ON filearchive (fa_deleted_timestamp);
-CREATE INDEX fa_nouser ON filearchive (fa_deleted_user);
+CREATE INDEX fa_name_time
+ ON filearchive (fa_name, fa_timestamp);
+CREATE INDEX fa_dupe
+ ON filearchive (fa_storage_group, fa_storage_key);
+CREATE INDEX fa_notime
+ ON filearchive (fa_deleted_timestamp);
+CREATE INDEX fa_nouser
+ ON filearchive (fa_deleted_user);
+
CREATE TABLE recentchanges (
- rc_id INTEGER NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- --PRIMARY KEY DEFAULT nextval('rc_rc_id_seq'),
- rc_timestamp TIMESTAMP(3) NOT NULL,
- rc_cur_time TIMESTAMP(3) NOT NULL,
+ rc_id INTEGER NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ rc_timestamp TIMESTAMP(3) NOT NULL,
+ rc_cur_time TIMESTAMP(3) NOT NULL,
rc_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
- rc_user_text VARCHAR(255) NOT NULL,
- rc_namespace SMALLINT NOT NULL,
- rc_title VARCHAR(255) NOT NULL,
+ rc_user_text VARCHAR(255) NOT NULL,
+ rc_namespace SMALLINT NOT NULL,
+ rc_title VARCHAR(255) NOT NULL,
rc_comment VARCHAR(255),
- rc_minor SMALLINT NOT NULL DEFAULT 0,
- rc_bot SMALLINT NOT NULL DEFAULT 0,
- rc_new SMALLINT NOT NULL DEFAULT 0,
+ rc_minor SMALLINT NOT NULL DEFAULT 0,
+ rc_bot SMALLINT NOT NULL DEFAULT 0,
+ rc_new SMALLINT NOT NULL DEFAULT 0,
rc_cur_id BIGINT NOT NULL DEFAULT 0,
-- REFERENCES page(page_id) ON DELETE SET NULL,
- rc_this_oldid BIGINT NOT NULL,
- rc_last_oldid BIGINT NOT NULL,
- rc_type SMALLINT NOT NULL DEFAULT 0,
+ rc_this_oldid BIGINT NOT NULL,
+ rc_last_oldid BIGINT NOT NULL,
+ rc_type SMALLINT NOT NULL DEFAULT 0,
rc_moved_to_ns SMALLINT,
rc_moved_to_title VARCHAR(255),
- rc_patrolled SMALLINT NOT NULL DEFAULT 0,
- rc_ip VARCHAR(40), -- was CIDR type
+ rc_patrolled SMALLINT NOT NULL DEFAULT 0,
+ rc_ip VARCHAR(40), -- was CIDR type
rc_old_len INTEGER,
rc_new_len INTEGER,
- rc_deleted SMALLINT NOT NULL DEFAULT 0,
- rc_logid BIGINT NOT NULL DEFAULT 0,
+ rc_deleted SMALLINT NOT NULL DEFAULT 0,
+ rc_logid BIGINT NOT NULL DEFAULT 0,
rc_log_type VARCHAR(255),
rc_log_action VARCHAR(255),
rc_params CLOB(64K) INLINE LENGTH 4096
);
-CREATE INDEX rc_timestamp ON recentchanges (rc_timestamp);
-CREATE INDEX rc_namespace_title ON recentchanges (rc_namespace, rc_title);
-CREATE INDEX rc_cur_id ON recentchanges (rc_cur_id);
-CREATE INDEX new_name_timestamp ON recentchanges (rc_new, rc_namespace, rc_timestamp);
-CREATE INDEX rc_ip ON recentchanges (rc_ip);
+CREATE INDEX rc_timestamp
+ ON recentchanges (rc_timestamp);
+CREATE INDEX rc_namespace_title
+ ON recentchanges (rc_namespace, rc_title);
+CREATE INDEX rc_cur_id
+ ON recentchanges (rc_cur_id);
+CREATE INDEX new_name_timestamp
+ ON recentchanges (rc_new, rc_namespace, rc_timestamp);
+CREATE INDEX rc_ip
+ ON recentchanges (rc_ip);
CREATE TABLE watchlist (
- wl_user BIGINT NOT NULL DEFAULT 0,
+ wl_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE CASCADE,
- wl_namespace SMALLINT NOT NULL DEFAULT 0,
- wl_title VARCHAR(255) NOT NULL,
+ wl_namespace SMALLINT NOT NULL DEFAULT 0,
+ wl_title VARCHAR(255) NOT NULL,
wl_notificationtimestamp TIMESTAMP(3)
);
-CREATE UNIQUE INDEX wl_user_namespace_title ON watchlist (wl_namespace, wl_title, wl_user);
+CREATE UNIQUE INDEX wl_user_namespace_title
+ ON watchlist (wl_namespace, wl_title, wl_user);
+
CREATE TABLE interwiki (
- iw_prefix VARCHAR(32) NOT NULL UNIQUE,
- iw_url CLOB(64K) INLINE LENGTH 4096 NOT NULL,
- iw_api CLOB(64K) INLINE LENGTH 4096 NOT NULL,
- iw_wikiid varchar(64) NOT NULL,
- iw_local SMALLINT NOT NULL,
- iw_trans SMALLINT NOT NULL DEFAULT 0
+ iw_prefix VARCHAR(32) NOT NULL UNIQUE,
+ iw_url CLOB(64K) INLINE LENGTH 4096 NOT NULL,
+ iw_api CLOB(64K) INLINE LENGTH 4096 NOT NULL,
+ iw_wikiid VARCHAR(64) NOT NULL,
+ iw_local SMALLINT NOT NULL,
+ iw_trans SMALLINT NOT NULL DEFAULT 0
);
+
CREATE TABLE querycache (
- qc_type VARCHAR(255) NOT NULL,
- qc_value BIGINT NOT NULL,
- qc_namespace INTEGER NOT NULL,
- qc_title VARCHAR(255) NOT NULL
+ qc_type VARCHAR(255) NOT NULL,
+ qc_value BIGINT NOT NULL,
+ qc_namespace INTEGER NOT NULL,
+ qc_title VARCHAR(255) NOT NULL
);
-CREATE INDEX querycache_type_value ON querycache (qc_type, qc_value);
+CREATE INDEX querycache_type_value
+ ON querycache (qc_type, qc_value);
-CREATE TABLE querycache_info (
- qci_type VARCHAR(255) UNIQUE NOT NULL,
- qci_timestamp TIMESTAMP(3)
+CREATE TABLE querycache_info (
+ qci_type VARCHAR(255) UNIQUE NOT NULL,
+ qci_timestamp TIMESTAMP(3)
);
+
CREATE TABLE querycachetwo (
- qcc_type VARCHAR(255) NOT NULL,
- qcc_value BIGINT NOT NULL DEFAULT 0,
- qcc_namespace INTEGER NOT NULL DEFAULT 0,
- qcc_title VARCHAR(255) NOT NULL DEFAULT '',
- qcc_namespacetwo INTEGER NOT NULL DEFAULT 0,
- qcc_titletwo VARCHAR(255) NOT NULL DEFAULT ''
+ qcc_type VARCHAR(255) NOT NULL,
+ qcc_value BIGINT NOT NULL DEFAULT 0,
+ qcc_namespace INTEGER NOT NULL DEFAULT 0,
+ qcc_title VARCHAR(255) NOT NULL DEFAULT '',
+ qcc_namespacetwo INTEGER NOT NULL DEFAULT 0,
+ qcc_titletwo VARCHAR(255) NOT NULL DEFAULT ''
);
-CREATE INDEX querycachetwo_type_value ON querycachetwo (qcc_type, qcc_value);
-CREATE INDEX querycachetwo_title ON querycachetwo (qcc_type,qcc_namespace,qcc_title);
-CREATE INDEX querycachetwo_titletwo ON querycachetwo (qcc_type,qcc_namespacetwo,qcc_titletwo);
+CREATE INDEX querycachetwo_type_value
+ ON querycachetwo (qcc_type, qcc_value);
+CREATE INDEX querycachetwo_title
+ ON querycachetwo (qcc_type, qcc_namespace, qcc_title);
+CREATE INDEX querycachetwo_titletwo
+ ON querycachetwo (qcc_type, qcc_namespacetwo, qcc_titletwo);
+
+
CREATE TABLE objectcache (
- keyname VARCHAR(255) NOT NULL UNIQUE, -- was nullable
- value CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
- exptime TIMESTAMP(3) NOT NULL
+ keyname VARCHAR(255) NOT NULL UNIQUE, -- was nullable
+ value CLOB(16M) INLINE LENGTH 4096 NOT NULL DEFAULT '',
+ exptime TIMESTAMP(3) NOT NULL
);
-CREATE INDEX objectcacache_exptime ON objectcache (exptime);
+CREATE INDEX objectcacache_exptime
+ ON objectcache (exptime);
CREATE TABLE transcache (
- tc_url VARCHAR(255) NOT NULL UNIQUE,
- tc_contents CLOB(64K) INLINE LENGTH 4096 NOT NULL,
- tc_time TIMESTAMP(3) NOT NULL
+ tc_url VARCHAR(255) NOT NULL UNIQUE,
+ tc_contents CLOB(64K) INLINE LENGTH 4096 NOT NULL,
+ tc_time TIMESTAMP(3) NOT NULL
);
+
CREATE TABLE logging (
- log_id BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- --PRIMARY KEY DEFAULT nextval('log_log_id_seq'),
- log_type VARCHAR(32) NOT NULL,
- log_action VARCHAR(32) NOT NULL,
- log_timestamp TIMESTAMP(3) NOT NULL,
- log_user BIGINT NOT NULL DEFAULT 0,
+ log_id BIGINT NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ log_type VARCHAR(32) NOT NULL,
+ log_action VARCHAR(32) NOT NULL,
+ log_timestamp TIMESTAMP(3) NOT NULL,
+ log_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
-- Name of the user who performed this action
- log_user_text VARCHAR(255) NOT NULL default '',
- log_namespace SMALLINT NOT NULL,
- log_title VARCHAR(255) NOT NULL,
- log_page BIGINT,
- log_comment VARCHAR(255),
- log_params CLOB(64K) INLINE LENGTH 4096,
- log_deleted SMALLINT NOT NULL DEFAULT 0
-);
-CREATE INDEX logging_type_name ON logging (log_type, log_timestamp);
-CREATE INDEX logging_user_time ON logging (log_timestamp, log_user);
-CREATE INDEX logging_page_time ON logging (log_namespace, log_title, log_timestamp);
-CREATE INDEX log_user_type_time ON logging (log_user, log_type, log_timestamp);
-CREATE INDEX log_page_id_time ON logging (log_page,log_timestamp);
+ log_user_text VARCHAR(255) NOT NULL DEFAULT '',
+ log_namespace SMALLINT NOT NULL,
+ log_title VARCHAR(255) NOT NULL,
+ log_page BIGINT,
+ log_comment VARCHAR(255),
+ log_params CLOB(64K) INLINE LENGTH 4096,
+ log_deleted SMALLINT NOT NULL DEFAULT 0
+);
+CREATE INDEX logging_type_name
+ ON logging (log_type, log_timestamp);
+CREATE INDEX logging_user_time
+ ON logging (log_timestamp, log_user);
+CREATE INDEX logging_page_time
+ ON logging (log_namespace, log_title, log_timestamp);
+CREATE INDEX log_user_type_time
+ ON logging (log_user, log_type, log_timestamp);
+CREATE INDEX log_page_id_time
+ ON logging (log_page, log_timestamp);
+CREATE UNIQUE INDEX type_action
+ ON logging (log_type, log_action, log_timestamp);
CREATE TABLE trackbacks (
- tb_id INTEGER NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- --PRIMARY KEY DEFAULT nextval('trackbacks_tb_id_seq'),
+ tb_id INTEGER NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
-- foreign key also in MySQL
tb_page INTEGER,
-- REFERENCES page(page_id) ON DELETE CASCADE,
- tb_title VARCHAR(255) NOT NULL,
- tb_url CLOB(64K) INLINE LENGTH 4096 NOT NULL,
+ tb_title VARCHAR(255) NOT NULL,
+ tb_url CLOB(64K) INLINE LENGTH 4096 NOT NULL,
tb_ex CLOB(64K) INLINE LENGTH 4096,
tb_name VARCHAR(255)
);
-CREATE INDEX trackback_page ON trackbacks (tb_page);
+CREATE INDEX trackback_page
+ ON trackbacks (tb_page);
CREATE TABLE job (
- job_id BIGINT NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- --PRIMARY KEY DEFAULT nextval('job_job_id_seq'),
- job_cmd VARCHAR(255) NOT NULL,
- job_namespace SMALLINT NOT NULL,
- job_title VARCHAR(255) NOT NULL,
- job_params CLOB(64K) INLINE LENGTH 4096 NOT NULL
+ job_id BIGINT NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ job_cmd VARCHAR(255) NOT NULL,
+ job_namespace SMALLINT NOT NULL,
+ job_title VARCHAR(255) NOT NULL,
+ job_params CLOB(64K) INLINE LENGTH 4096 NOT NULL
);
-CREATE INDEX job_cmd_namespace_title ON job (job_cmd, job_namespace, job_title);
+CREATE INDEX job_cmd_namespace_title
+ ON job (job_cmd, job_namespace, job_title);
+
--TODO
---CREATE FUNCTION add_interwiki (TEXT,INT,SMALLINT) RETURNS INT LANGUAGE SQL AS
+--CREATE FUNCTION add_interwiki (TEXT, INT, SMALLINT) RETURNS INT LANGUAGE SQL AS
--$mw$
-- INSERT INTO interwiki (iw_prefix, iw_url, iw_local) VALUES ($1,$2,$3);
-- SELECT 1;
--$mw$;
+
+
-- hack implementation
-- should be replaced with OmniFind, Contains(), etc
CREATE TABLE searchindex (
- si_page BIGINT NOT NULL,
- si_title varchar(255) NOT NULL default '',
- si_text clob NOT NULL
+ si_page BIGINT NOT NULL,
+ si_title VARCHAR(255) NOT NULL DEFAULT '',
+ si_text CLOB NOT NULL
);
+
+
-- This table is not used unless profiling is turned on
CREATE TABLE profiling (
- pf_count INTEGER NOT NULL DEFAULT 0,
- pf_time NUMERIC(18,10) NOT NULL DEFAULT 0,
- pf_memory NUMERIC(18,10) NOT NULL DEFAULT 0,
- pf_name VARCHAR(255) NOT NULL,
- pf_server VARCHAR(255)
+ pf_count INTEGER NOT NULL DEFAULT 0,
+ pf_time NUMERIC(18,10) NOT NULL DEFAULT 0,
+ pf_memory NUMERIC(18,10) NOT NULL DEFAULT 0,
+ pf_name VARCHAR(255) NOT NULL,
+ pf_server VARCHAR(255)
);
-CREATE UNIQUE INDEX pf_name_server ON profiling (pf_name, pf_server);
+CREATE UNIQUE INDEX pf_name_server
+ ON profiling (pf_name, pf_server);
+
+
CREATE TABLE protected_titles (
- pt_namespace INTEGER NOT NULL,
- pt_title VARCHAR(255) NOT NULL,
+ pt_namespace INTEGER NOT NULL,
+ pt_title VARCHAR(255) NOT NULL,
pt_user BIGINT NOT NULL DEFAULT 0,
-- REFERENCES user(user_id) ON DELETE SET NULL,
pt_reason VARCHAR(1024),
pt_timestamp TIMESTAMP(3) NOT NULL,
- pt_expiry TIMESTAMP(3) ,
- pt_create_perm VARCHAR(60) NOT NULL DEFAULT ''
+ pt_expiry TIMESTAMP(3),
+ pt_create_perm VARCHAR(60) NOT NULL DEFAULT ''
);
-CREATE UNIQUE INDEX protected_titles_unique ON protected_titles(pt_namespace, pt_title);
+CREATE UNIQUE INDEX protected_titles_unique
+ ON protected_titles (pt_namespace, pt_title);
CREATE TABLE updatelog (
- ul_key VARCHAR(255) NOT NULL PRIMARY KEY
+ ul_key VARCHAR(255) NOT NULL
+ PRIMARY KEY
);
+
CREATE TABLE category (
- cat_id INTEGER NOT NULL PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
- --PRIMARY KEY DEFAULT nextval('category_id_seq'),
- cat_title VARCHAR(255) NOT NULL,
- cat_pages INTEGER NOT NULL DEFAULT 0,
- cat_subcats INTEGER NOT NULL DEFAULT 0,
- cat_files INTEGER NOT NULL DEFAULT 0,
- cat_hidden SMALLINT NOT NULL DEFAULT 0
+ cat_id INTEGER NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ cat_title VARCHAR(255) NOT NULL,
+ cat_pages INTEGER NOT NULL DEFAULT 0,
+ cat_subcats INTEGER NOT NULL DEFAULT 0,
+ cat_files INTEGER NOT NULL DEFAULT 0,
+ cat_hidden SMALLINT NOT NULL DEFAULT 0
);
-CREATE UNIQUE INDEX category_title ON category(cat_title);
-CREATE INDEX category_pages ON category(cat_pages);
+CREATE UNIQUE INDEX category_title
+ ON category (cat_title);
+CREATE INDEX category_pages
+ ON category (cat_pages);
+
--- added for 1.15
-- A table to track tags for revisions, logs and recent changes.
CREATE TABLE change_tag (
- ct_rc_id INTEGER,
- ct_log_id INTEGER,
- ct_rev_id INTEGER,
- ct_tag varchar(255) NOT NULL,
- ct_params CLOB(64K) INLINE LENGTH 4096
-);
-CREATE UNIQUE INDEX change_tag_rc_tag ON change_tag (ct_rc_id,ct_tag);
-CREATE UNIQUE INDEX change_tag_log_tag ON change_tag (ct_log_id,ct_tag);
-CREATE UNIQUE INDEX change_tag_rev_tag ON change_tag (ct_rev_id,ct_tag);
+ ct_rc_id INTEGER,
+ ct_log_id INTEGER,
+ ct_rev_id INTEGER,
+ ct_tag VARCHAR(255) NOT NULL,
+ ct_params CLOB(64K) INLINE LENGTH 4096
+);
+CREATE UNIQUE INDEX change_tag_rc_tag
+ ON change_tag (ct_rc_id, ct_tag);
+CREATE UNIQUE INDEX change_tag_log_tag
+ ON change_tag (ct_log_id, ct_tag);
+CREATE UNIQUE INDEX change_tag_rev_tag
+ ON change_tag (ct_rev_id, ct_tag);
-- Covering index, so we can pull all the info only out of the index.
-CREATE INDEX change_tag_tag_id ON change_tag (ct_tag,ct_rc_id,ct_rev_id,ct_log_id);
+CREATE INDEX change_tag_tag_id
+ ON change_tag (ct_tag, ct_rc_id, ct_rev_id, ct_log_id);
+
-- Rollup table to pull a LIST of tags simply
CREATE TABLE tag_summary (
- ts_rc_id INTEGER,
+ ts_rc_id INTEGER,
ts_log_id INTEGER,
ts_rev_id INTEGER,
- ts_tags CLOB(64K) INLINE LENGTH 4096 NOT NULL
+ ts_tags CLOB(64K) INLINE LENGTH 4096 NOT NULL
);
-CREATE UNIQUE INDEX tag_summary_rc_id ON tag_summary (ts_rc_id);
-CREATE UNIQUE INDEX tag_summary_log_id ON tag_summary (ts_log_id);
-CREATE UNIQUE INDEX tag_summary_rev_id ON tag_summary (ts_rev_id);
+CREATE UNIQUE INDEX tag_summary_rc_id
+ ON tag_summary (ts_rc_id);
+CREATE UNIQUE INDEX tag_summary_log_id
+ ON tag_summary (ts_log_id);
+CREATE UNIQUE INDEX tag_summary_rev_id
+ ON tag_summary (ts_rev_id);
+
CREATE TABLE valid_tag (
- vt_tag varchar(255) NOT NULL PRIMARY KEY
+ vt_tag VARCHAR(255) NOT NULL
+ PRIMARY KEY
);
+
+
--
-- User preferences and perhaps other fun stuff. :)
-- Replaces the old user.user_options blob, with a couple nice properties:
--
--- 1) We only store non-default settings, so changes to the defaults
+-- 1) We only store non-default settings, so changes to the DEFAULTs
-- are now reflected for everybody, not just new accounts.
-- 2) We can more easily do bulk lookups, statistics, or modifications of
-- saved options since it's a sane table structure.
--
CREATE TABLE user_properties (
-- Foreign key to user.user_id
- up_user BIGINT NOT NULL,
-
+ up_user BIGINT NOT NULL,
-- Name of the option being saved. This is indexed for bulk lookup.
- up_property VARCHAR(32) FOR BIT DATA NOT NULL,
-
+ up_property VARCHAR(255) FOR BIT DATA NOT NULL,
-- Property value as a string.
- up_value CLOB(64K) INLINE LENGTH 4096
+ up_value CLOB(64K) INLINE LENGTH 4096
);
-CREATE UNIQUE INDEX user_properties_user_property ON user_properties (up_user,up_property);
-CREATE INDEX user_properties_property ON user_properties (up_property);
+CREATE UNIQUE INDEX user_properties_user_property
+ ON user_properties (up_user, up_property);
+CREATE INDEX user_properties_property
+ ON user_properties (up_property);
CREATE TABLE log_search (
-- The type of ID (rev ID, log ID, rev TIMESTAMP(3), username)
- ls_field VARCHAR(32) FOR BIT DATA NOT NULL,
+ ls_field VARCHAR(32) FOR BIT DATA NOT NULL,
-- The value of the ID
- ls_value varchar(255) NOT NULL,
+ ls_value VARCHAR(255) NOT NULL,
-- Key to log_id
- ls_log_id BIGINT NOT NULL default 0
+ ls_log_id BIGINT NOT NULL DEFAULT 0
);
-CREATE UNIQUE INDEX ls_field_val ON log_search (ls_field,ls_value,ls_log_id);
-CREATE INDEX ls_log_id ON log_search (ls_log_id);
+CREATE UNIQUE INDEX ls_field_val
+ ON log_search (ls_field, ls_value, ls_log_id);
+CREATE INDEX ls_log_id
+ ON log_search (ls_log_id);
+
+
-- Table for storing localisation data
CREATE TABLE l10n_cache (
-- Language code
- lc_lang VARCHAR(32) NOT NULL,
+ lc_lang VARCHAR(32) NOT NULL,
-- Cache key
- lc_key VARCHAR(255) NOT NULL,
+ lc_key VARCHAR(255) NOT NULL,
-- Value
- lc_value CLOB(16M) INLINE LENGTH 4096 NOT NULL
+ lc_value CLOB(16M) INLINE LENGTH 4096 NOT NULL
);
-CREATE INDEX lc_lang_key ON l10n_cache (lc_lang, lc_key);
+CREATE INDEX lc_lang_key
+ ON l10n_cache (lc_lang, lc_key);
-CREATE TABLE "MSG_RESOURCE_LINKS"
-(
-"MRL_RESOURCE" VARCHAR(255) FOR BIT DATA NOT NULL ,
-"MRL_MESSAGE" VARCHAR(255) FOR BIT DATA NOT NULL
-)
-;
-CREATE UNIQUE INDEX "UQ61_MSG_RESOURCE_LINKS" ON "MSG_RESOURCE_LINKS"
+CREATE TABLE msg_resource_links
(
-"MRL_MESSAGE",
-"MRL_RESOURCE"
-)
-ALLOW REVERSE SCANS
-;
+ mrl_resource VARCHAR(255) FOR BIT DATA NOT NULL,
+ mrl_message VARCHAR(255) FOR BIT DATA NOT NULL
+);
+CREATE UNIQUE INDEX uq61_msg_resource_links
+ ON msg_resource_links (mrl_message, mrl_resource);
+-- All DB2 indexes DEFAULT to allowing reverse scans
-CREATE TABLE "MSG_RESOURCE"
-(
-"MR_RESOURCE" VARCHAR(255) FOR BIT DATA NOT NULL ,
-"MR_LANG" VARCHAR(32) FOR BIT DATA NOT NULL ,
-"MR_BLOB" BLOB NOT NULL ,
-"MR_TIMESTAMP" TIMESTAMP(3) NOT NULL
-)
-;
-CREATE UNIQUE INDEX "UQ81_MSG_RESOURCE" ON "MSG_RESOURCE"
+
+CREATE TABLE msg_resource
(
-"MR_RESOURCE"
-,"MR_LANG"
-)
-ALLOW REVERSE SCANS
-;
+ mr_resource VARCHAR(255) FOR BIT DATA NOT NULL,
+ mr_lang VARCHAR(32) FOR BIT DATA NOT NULL,
+ mr_blob CLOB(64K) INLINE LENGTH 4096 NOT NULL,
+ mr_timestamp TIMESTAMP(3) NOT NULL
+);
+CREATE UNIQUE INDEX uq81_msg_resource
+ ON msg_resource (mr_resource, mr_lang);
+-- All DB2 indexes DEFAULT to allowing reverse scans
+
+
+
+CREATE TABLE module_deps (
+ md_module VARCHAR(255) FOR BIT DATA NOT NULL,
+ md_skin VARCHAR(32) FOR BIT DATA NOT NULL,
+ md_deps CLOB(16M) INLINE LENGTH 4096 NOT NULL
+);
+CREATE UNIQUE INDEX uq96_module_deps
+ ON module_deps (md_module, md_skin);
+-- All DB2 indexes DEFAULT to allowing reverse scans
-CREATE TABLE "MODULE_DEPS" (
-"MD_MODULE" VARCHAR(255) FOR BIT DATA NOT NULL ,
-"MD_SKIN" VARCHAR(32) FOR BIT DATA NOT NULL ,
-"MD_DEPS" CLOB(16M) INLINE LENGTH 4096 NOT NULL
-)
-;
-CREATE UNIQUE INDEX "UQ96_MODULE_DEPS" ON "MODULE_DEPS"
-(
-"MD_MODULE"
-,"MD_SKIN"
-)
-ALLOW REVERSE SCANS
-;
-CREATE TABLE "IWLINKS"
+CREATE TABLE iwlinks
(
-"IWL_FROM" INT NOT NULL ,
-"IWL_PREFIX" VARCHAR(20) FOR BIT DATA NOT NULL ,
-"IWL_TITLE" VARCHAR(255) FOR BIT DATA NOT NULL
-)
-;
+ iwl_from INTEGER NOT NULL,
+ iwl_prefix VARCHAR(20) FOR BIT DATA NOT NULL,
+ iwl_title VARCHAR(255) FOR BIT DATA NOT NULL
+);
+
+
+
+--
+-- Store information about newly uploaded files before they're
+-- moved into the actual filestore
+--
+CREATE TABLE uploadstash (
+ us_id BIGINT NOT NULL
+ PRIMARY KEY GENERATED BY DEFAULT AS IDENTITY (START WITH 1),
+ -- the user who uploaded the file.
+ us_user BIGINT NOT NULL,
+ -- file key. this is how applications actually search for the file.
+ -- this might go away, or become the primary key.
+ us_key VARCHAR(255) NOT NULL,
+ -- the original path
+ us_orig_path VARCHAR(255) NOT NULL,
+ -- the temporary path at which the file is actually stored
+ us_path VARCHAR(255) NOT NULL,
+ -- which type of upload the file came from (sometimes)
+ us_source_type VARCHAR(50),
+ -- the date/time on which the file was added
+ us_timestamp TIMESTAMP(3) NOT NULL,
+ us_status VARCHAR(50) NOT NULL,
+ -- file properties from File::getPropsFromPath. these may prove unnecessary.
+ --
+ us_size BIGINT NOT NULL,
+ -- this hash comes from File::sha1Base36(), and is 31 characters
+ us_sha1 VARCHAR(31) NOT NULL,
+ us_mime VARCHAR(255),
+ -- Media type as defined by the MEDIATYPE_xxx constants, should duplicate definition in the image table
+ us_media_type VARCHAR(30)
+ CONSTRAINT my_constraint
+ CHECK (
+ us_media_type in (
+ 'UNKNOWN', 'BITMAP', 'DRAWING', 'AUDIO', 'VIDEO', 'MULTIMEDIA',
+ 'OFFICE', 'TEXT', 'EXECUTABLE', 'ARCHIVE'
+ )
+ ) DEFAULT NULL,
+ -- image-specific properties
+ us_image_width BIGINT,
+ us_image_height BIGINT,
+ us_image_bits INTEGER
+);
+-- sometimes there's a delete for all of a user's stuff.
+CREATE INDEX us_user
+ ON uploadstash (us_user);
+-- pick out files by key, enforce key UNIQUEness
+CREATE UNIQUE INDEX us_key
+ ON uploadstash (us_key);
+-- the abandoned upload cleanup script needs this
+CREATE INDEX us_timestamp
+ ON uploadstash (us_timestamp);
+
+
+
+-- Stores the groups the user has once belonged to.
+-- The user may still belong these groups. Check user_groups.
+CREATE TABLE user_former_groups (
+ ufg_user BIGINT NOT NULL DEFAULT 0,
+ ufg_group VARCHAR(16) FOR BIT DATA NOT NULL
+);
+CREATE UNIQUE INDEX ufg_user_group
+ ON user_former_groups (ufg_user, ufg_group);
+
+
+
+-- Table for holding configuration changes
+CREATE TABLE config (
+ cf_name VARCHAR(255) NOT NULL
+ PRIMARY KEY,
+ cf_value CLOB(64K) INLINE LENGTH 4096 NOT NULL
+);
+
diff --git a/maintenance/importDump.php b/maintenance/importDump.php
index 099b7895..2ad0872f 100644
--- a/maintenance/importDump.php
+++ b/maintenance/importDump.php
@@ -57,12 +57,13 @@ TEXT;
$this->stderr = fopen( "php://stderr", "wt" );
$this->addOption( 'report',
'Report position and speed after every n pages processed', false, true );
- $this->addOption( 'namespaces',
+ $this->addOption( 'namespaces',
'Import only the pages from namespaces belonging to the list of ' .
'pipe-separated namespace names or namespace indexes', false, true );
$this->addOption( 'dry-run', 'Parse dump without actually importing pages' );
$this->addOption( 'debug', 'Output extra verbose debug information' );
$this->addOption( 'uploads', 'Process file upload data if included (experimental)' );
+ $this->addOption( 'no-updates', 'Disable link table updates. Is faster but leaves the wiki in an inconsistent state' );
$this->addOption( 'image-base-path', 'Import files from a specified path', false, true );
$this->addArg( 'file', 'Dump file to import [else use stdin]', false );
}
@@ -73,6 +74,10 @@ TEXT;
}
$this->reportingInterval = intval( $this->getOption( 'report', 100 ) );
+ if ( !$this->reportingInterval ) {
+ $this->reportingInterval = 100; // avoid division by zero
+ }
+
$this->dryRun = $this->hasOption( 'dry-run' );
$this->uploads = $this->hasOption( 'uploads' ); // experimental!
if ( $this->hasOption( 'image-base-path' ) ) {
@@ -112,6 +117,10 @@ TEXT;
$this->error( "Unknown namespace text / index specified: $namespace", true );
}
+ /**
+ * @param $obj Title|Revision
+ * @return bool
+ */
private function skippedNamespace( $obj ) {
if ( $obj instanceof Title ) {
$ns = $obj->getNamespace();
@@ -130,6 +139,10 @@ TEXT;
$this->pageCount++;
}
+ /**
+ * @param $rev Revision
+ * @return mixed
+ */
function handleRevision( $rev ) {
$title = $rev->getTitle();
if ( !$title ) {
@@ -149,6 +162,10 @@ TEXT;
}
}
+ /**
+ * @param $revision Revision
+ * @return bool
+ */
function handleUpload( $revision ) {
if ( $this->uploads ) {
if ( $this->skippedNamespace( $revision ) ) {
@@ -186,7 +203,7 @@ TEXT;
}
function showReport() {
- if ( $this->mQuiet ) {
+ if ( !$this->mQuiet ) {
$delta = wfTime() - $this->startTime;
if ( $delta ) {
$rate = sprintf( "%.2f", $this->pageCount / $delta );
@@ -204,7 +221,7 @@ TEXT;
}
wfWaitForSlaves();
// XXX: Don't let deferred jobs array get absurdly large (bug 24375)
- wfDoUpdates( 'commit' );
+ DeferredUpdates::doUpdates( 'commit' );
}
function progress( $string ) {
@@ -214,11 +231,9 @@ TEXT;
function importFromFile( $filename ) {
if ( preg_match( '/\.gz$/', $filename ) ) {
$filename = 'compress.zlib://' . $filename;
- }
- elseif ( preg_match( '/\.bz2$/', $filename ) ) {
+ } elseif ( preg_match( '/\.bz2$/', $filename ) ) {
$filename = 'compress.bzip2://' . $filename;
- }
- elseif ( preg_match( '/\.7z$/', $filename ) ) {
+ } elseif ( preg_match( '/\.7z$/', $filename ) ) {
$filename = 'mediawiki.compress.7z://' . $filename;
}
@@ -243,6 +258,9 @@ TEXT;
if( $this->hasOption( 'debug' ) ) {
$importer->setDebug( true );
}
+ if ( $this->hasOption( 'no-updates' ) ) {
+ $importer->setNoUpdates( true );
+ }
$importer->setPageCallback( array( &$this, 'reportPage' ) );
$this->importCallback = $importer->setRevisionCallback(
array( &$this, 'handleRevision' ) );
diff --git a/maintenance/importImages.php b/maintenance/importImages.php
index f3b10ea9..bd077ff9 100644
--- a/maintenance/importImages.php
+++ b/maintenance/importImages.php
@@ -40,255 +40,265 @@ $processed = $added = $ignored = $skipped = $overwritten = $failed = 0;
echo( "Import Images\n\n" );
# Need a path
-if ( count( $args ) > 0 ) {
-
- $dir = $args[0];
-
- # Check Protection
- if ( isset( $options['protect'] ) && isset( $options['unprotect'] ) )
- die( "Cannot specify both protect and unprotect. Only 1 is allowed.\n" );
-
- if ( isset( $options['protect'] ) && $options['protect'] == 1 )
- die( "You must specify a protection option.\n" );
-
- # Prepare the list of allowed extensions
- global $wgFileExtensions;
- $extensions = isset( $options['extensions'] )
- ? explode( ',', strtolower( $options['extensions'] ) )
- : $wgFileExtensions;
-
- # Search the path provided for candidates for import
- $files = findFiles( $dir, $extensions );
-
- # Initialise the user for this operation
- $user = isset( $options['user'] )
- ? User::newFromName( $options['user'] )
- : User::newFromName( 'Maintenance script' );
- if ( !$user instanceof User )
- $user = User::newFromName( 'Maintenance script' );
- $wgUser = $user;
-
- # Get block check. If a value is given, this specified how often the check is performed
- if ( isset( $options['check-userblock'] ) ) {
- if ( !$options['check-userblock'] ) $checkUserBlock = 1;
- else $checkUserBlock = (int)$options['check-userblock'];
+if ( count( $args ) == 0 ) {
+ showUsage();
+}
+
+$dir = $args[0];
+
+# Check Protection
+if ( isset( $options['protect'] ) && isset( $options['unprotect'] ) ) {
+ die( "Cannot specify both protect and unprotect. Only 1 is allowed.\n" );
+}
+
+if ( isset( $options['protect'] ) && $options['protect'] == 1 ) {
+ die( "You must specify a protection option.\n" );
+}
+
+# Prepare the list of allowed extensions
+global $wgFileExtensions;
+$extensions = isset( $options['extensions'] )
+ ? explode( ',', strtolower( $options['extensions'] ) )
+ : $wgFileExtensions;
+
+# Search the path provided for candidates for import
+$files = findFiles( $dir, $extensions );
+
+# Initialise the user for this operation
+$user = isset( $options['user'] )
+ ? User::newFromName( $options['user'] )
+ : User::newFromName( 'Maintenance script' );
+if ( !$user instanceof User ) {
+ $user = User::newFromName( 'Maintenance script' );
+}
+$wgUser = $user;
+
+# Get block check. If a value is given, this specified how often the check is performed
+if ( isset( $options['check-userblock'] ) ) {
+ if ( !$options['check-userblock'] ) {
+ $checkUserBlock = 1;
} else {
- $checkUserBlock = false;
+ $checkUserBlock = (int)$options['check-userblock'];
}
+} else {
+ $checkUserBlock = false;
+}
- # Get --from
- $from = @$options['from'];
+# Get --from
+$from = @$options['from'];
- # Get sleep time.
- $sleep = @$options['sleep'];
- if ( $sleep ) $sleep = (int)$sleep;
+# Get sleep time.
+$sleep = @$options['sleep'];
+if ( $sleep ) {
+ $sleep = (int)$sleep;
+}
- # Get limit number
- $limit = @$options['limit'];
- if ( $limit ) $limit = (int)$limit;
+# Get limit number
+$limit = @$options['limit'];
+if ( $limit ) {
+ $limit = (int)$limit;
+}
- # Get the upload comment. Provide a default one in case there's no comment given.
- $comment = 'Importing image file';
+# Get the upload comment. Provide a default one in case there's no comment given.
+$comment = 'Importing image file';
- if ( isset( $options['comment-file'] ) ) {
- $comment = file_get_contents( $options['comment-file'] );
- if ( $comment === false || $comment === NULL ) {
- die( "failed to read comment file: {$options['comment-file']}\n" );
- }
- }
- elseif ( isset( $options['comment'] ) ) {
- $comment = $options['comment'];
+if ( isset( $options['comment-file'] ) ) {
+ $comment = file_get_contents( $options['comment-file'] );
+ if ( $comment === false || $comment === null ) {
+ die( "failed to read comment file: {$options['comment-file']}\n" );
}
+} elseif ( isset( $options['comment'] ) ) {
+ $comment = $options['comment'];
+}
+
+$commentExt = isset( $options['comment-ext'] ) ? $options['comment-ext'] : false;
- $commentExt = isset( $options['comment-ext'] ) ? $options['comment-ext'] : false;
+# Get the license specifier
+$license = isset( $options['license'] ) ? $options['license'] : '';
- # Get the license specifier
- $license = isset( $options['license'] ) ? $options['license'] : '';
+# Batch "upload" operation
+$count = count( $files );
+if ( $count > 0 ) {
- # Batch "upload" operation
- if ( ( $count = count( $files ) ) > 0 ) {
+ foreach ( $files as $file ) {
+ $base = wfBaseName( $file );
- foreach ( $files as $file ) {
- $base = wfBaseName( $file );
+ # Validate a title
+ $title = Title::makeTitleSafe( NS_FILE, $base );
+ if ( !is_object( $title ) ) {
+ echo( "{$base} could not be imported; a valid title cannot be produced\n" );
+ continue;
+ }
- # Validate a title
- $title = Title::makeTitleSafe( NS_FILE, $base );
- if ( !is_object( $title ) ) {
- echo( "{$base} could not be imported; a valid title cannot be produced\n" );
+ if ( $from ) {
+ if ( $from == $title->getDBkey() ) {
+ $from = null;
+ } else {
+ $ignored++;
continue;
}
+ }
- if ( $from ) {
- if ( $from == $title->getDBkey() ) {
- $from = NULL;
- } else {
- $ignored++;
- continue;
- }
+ if ( $checkUserBlock && ( ( $processed % $checkUserBlock ) == 0 ) ) {
+ $user->clearInstanceCache( 'name' ); // reload from DB!
+ if ( $user->isBlocked() ) {
+ echo( $user->getName() . " was blocked! Aborting.\n" );
+ break;
}
+ }
- if ( $checkUserBlock && ( ( $processed % $checkUserBlock ) == 0 ) ) {
- $user->clearInstanceCache( 'name' ); // reload from DB!
- if ( $user->isBlocked() ) {
- echo( $user->getName() . " was blocked! Aborting.\n" );
- break;
- }
+ # Check existence
+ $image = wfLocalFile( $title );
+ if ( $image->exists() ) {
+ if ( isset( $options['overwrite'] ) ) {
+ echo( "{$base} exists, overwriting..." );
+ $svar = 'overwritten';
+ } else {
+ echo( "{$base} exists, skipping\n" );
+ $skipped++;
+ continue;
}
+ } else {
+ if ( isset( $options['skip-dupes'] ) ) {
+ $repo = $image->getRepo();
+ $sha1 = File::sha1Base36( $file ); # XXX: we end up calculating this again when actually uploading. that sucks.
- # Check existence
- $image = wfLocalFile( $title );
- if ( $image->exists() ) {
- if ( isset( $options['overwrite'] ) ) {
- echo( "{$base} exists, overwriting..." );
- $svar = 'overwritten';
- } else {
- echo( "{$base} exists, skipping\n" );
+ $dupes = $repo->findBySha1( $sha1 );
+
+ if ( $dupes ) {
+ echo( "{$base} already exists as " . $dupes[0]->getName() . ", skipping\n" );
$skipped++;
continue;
}
- } else {
- if ( isset( $options['skip-dupes'] ) ) {
- $repo = $image->getRepo();
- $sha1 = File::sha1Base36( $file ); # XXX: we end up calculating this again when actually uploading. that sucks.
+ }
- $dupes = $repo->findBySha1( $sha1 );
+ echo( "Importing {$base}..." );
+ $svar = 'added';
+ }
- if ( $dupes ) {
- echo( "{$base} already exists as " . $dupes[0]->getName() . ", skipping\n" );
- $skipped++;
- continue;
- }
+ if ( isset( $options['source-wiki-url'] ) ) {
+ /* find comment text directly from source wiki, through MW's API */
+ $real_comment = getFileCommentFromSourceWiki( $options['source-wiki-url'], $base );
+ if ( $real_comment === false )
+ $commentText = $comment;
+ else
+ $commentText = $real_comment;
+
+ /* find user directly from source wiki, through MW's API */
+ $real_user = getFileUserFromSourceWiki( $options['source-wiki-url'], $base );
+ if ( $real_user === false ) {
+ $wgUser = $user;
+ } else {
+ $wgUser = User::newFromName( $real_user );
+ if ( $wgUser === false ) {
+ # user does not exist in target wiki
+ echo ( "failed: user '$real_user' does not exist in target wiki." );
+ continue;
}
-
- echo( "Importing {$base}..." );
- $svar = 'added';
}
-
- if ( isset( $options['source-wiki-url'] ) ) {
- /* find comment text directly from source wiki, through MW's API */
- $real_comment = getFileCommentFromSourceWiki( $options['source-wiki-url'], $base );
- if ( $real_comment === false )
- $commentText = $comment;
- else
- $commentText = $real_comment;
-
- /* find user directly from source wiki, through MW's API */
- $real_user = getFileUserFromSourceWiki( $options['source-wiki-url'], $base );
- if ( $real_user === false ) {
- $wgUser = $user;
+ } else {
+ # Find comment text
+ $commentText = false;
+
+ if ( $commentExt ) {
+ $f = findAuxFile( $file, $commentExt );
+ if ( !$f ) {
+ echo( " No comment file with extension {$commentExt} found for {$file}, using default comment. " );
} else {
- $wgUser = User::newFromName( $real_user );
- if ( $wgUser === false ) {
- # user does not exist in target wiki
- echo ( "failed: user '$real_user' does not exist in target wiki." );
- continue;
+ $commentText = file_get_contents( $f );
+ if ( !$commentText ) {
+ echo( " Failed to load comment file {$f}, using default comment. " );
}
}
- } else {
- # Find comment text
- $commentText = false;
-
- if ( $commentExt ) {
- $f = findAuxFile( $file, $commentExt );
- if ( !$f ) {
- echo( " No comment file with extension {$commentExt} found for {$file}, using default comment. " );
- } else {
- $commentText = file_get_contents( $f );
- if ( !$f ) {
- echo( " Failed to load comment file {$f}, using default comment. " );
- }
- }
- }
-
- if ( !$commentText ) {
- $commentText = $comment;
- }
}
+ if ( !$commentText ) {
+ $commentText = $comment;
+ }
+ }
- # Import the file
- if ( isset( $options['dry'] ) ) {
- echo( " publishing {$file} by '" . $wgUser->getName() . "', comment '$commentText'... " );
- } else {
- $archive = $image->publish( $file );
- if ( !$archive->isGood() ) {
- echo( "failed. (" .
- $archive->getWikiText() .
- ")\n" );
- $failed++;
- continue;
- }
+ # Import the file
+ if ( isset( $options['dry'] ) ) {
+ echo( " publishing {$file} by '" . $wgUser->getName() . "', comment '$commentText'... " );
+ } else {
+ $archive = $image->publish( $file );
+ if ( !$archive->isGood() ) {
+ echo( "failed. (" .
+ $archive->getWikiText() .
+ ")\n" );
+ $failed++;
+ continue;
}
+ }
+
+ if ( isset( $options['dry'] ) ) {
+ echo( "done.\n" );
+ } elseif ( $image->recordUpload( $archive->value, $commentText, $license ) ) {
+ # We're done!
+ echo( "done.\n" );
$doProtect = false;
- $restrictions = array();
global $wgRestrictionLevels;
$protectLevel = isset( $options['protect'] ) ? $options['protect'] : null;
if ( $protectLevel && in_array( $protectLevel, $wgRestrictionLevels ) ) {
- $restrictions['move'] = $protectLevel;
- $restrictions['edit'] = $protectLevel;
- $doProtect = true;
+ $doProtect = true;
}
if ( isset( $options['unprotect'] ) ) {
- $restrictions['move'] = '';
- $restrictions['edit'] = '';
- $doProtect = true;
+ $protectLevel = '';
+ $doProtect = true;
}
+ if ( $doProtect ) {
+ # Protect the file
+ echo "\nWaiting for slaves...\n";
+ // Wait for slaves.
+ sleep( 2.0 ); # Why this sleep?
+ wfWaitForSlaves();
- if ( isset( $options['dry'] ) ) {
- echo( "done.\n" );
- } elseif ( $image->recordUpload( $archive->value, $commentText, $license ) ) {
- # We're done!
- echo( "done.\n" );
- if ( $doProtect ) {
- # Protect the file
- $article = new Article( $title );
- echo "\nWaiting for slaves...\n";
- // Wait for slaves.
- sleep( 2.0 ); # Why this sleep?
- wfWaitForSlaves();
-
- echo( "\nSetting image restrictions ... " );
- if ( $article->updateRestrictions( $restrictions ) )
- echo( "done.\n" );
- else
- echo( "failed.\n" );
- }
+ echo( "\nSetting image restrictions ... " );
- } else {
- echo( "failed. (at recordUpload stage)\n" );
- $svar = 'failed';
+ $cascade = false;
+ $restrictions = array();
+ foreach( $title->getRestrictionTypes() as $type ) {
+ $restrictions[$type] = $protectLevel;
+ }
+
+ $page = WikiPage::factory( $title );
+ $status = $page->doUpdateRestrictions( $restrictions, array(), $cascade, '', $user );
+ echo( ( $status->isOK() ? 'done' : 'failed' ) . "\n" );
}
- $$svar++;
- $processed++;
+ } else {
+ echo( "failed. (at recordUpload stage)\n" );
+ $svar = 'failed';
+ }
- if ( $limit && $processed >= $limit )
- break;
+ $$svar++;
+ $processed++;
- if ( $sleep )
- sleep( $sleep );
+ if ( $limit && $processed >= $limit ) {
+ break;
}
- # Print out some statistics
- echo( "\n" );
- foreach ( array( 'count' => 'Found', 'limit' => 'Limit', 'ignored' => 'Ignored',
- 'added' => 'Added', 'skipped' => 'Skipped', 'overwritten' => 'Overwritten',
- 'failed' => 'Failed' ) as $var => $desc ) {
- if ( $$var > 0 )
- echo( "{$desc}: {$$var}\n" );
+ if ( $sleep ) {
+ sleep( $sleep );
}
+ }
- } else {
- echo( "No suitable files could be found for import.\n" );
+ # Print out some statistics
+ echo( "\n" );
+ foreach ( array( 'count' => 'Found', 'limit' => 'Limit', 'ignored' => 'Ignored',
+ 'added' => 'Added', 'skipped' => 'Skipped', 'overwritten' => 'Overwritten',
+ 'failed' => 'Failed' ) as $var => $desc ) {
+ if ( $$var > 0 )
+ echo( "{$desc}: {$$var}\n" );
}
} else {
- showUsage();
+ echo( "No suitable files could be found for import.\n" );
}
exit( 0 );
diff --git a/maintenance/importSiteScripts.php b/maintenance/importSiteScripts.php
index 849c7b1b..0dc200ec 100644
--- a/maintenance/importSiteScripts.php
+++ b/maintenance/importSiteScripts.php
@@ -1,8 +1,11 @@
<?php
/**
- * Maintenance script to import all scripts in the MediaWiki namespace from a
+ * Maintenance script to import all scripts in the MediaWiki namespace from a
* local site.
+ * @file
+ * @ingroup Maintenance
*/
+
require_once( dirname( __FILE__ ) . '/Maintenance.php' );
class ImportSiteScripts extends Maintenance {
@@ -16,22 +19,29 @@ class ImportSiteScripts extends Maintenance {
public function execute() {
global $wgUser;
- $wgUser = User::newFromName( $this->getOption( 'username', 'ScriptImporter' ) );
+
+ $user = User::newFromName( $this->getOption( 'username', 'ScriptImporter' ) );
+ $wgUser = $user;
$baseUrl = $this->getArg( 1 );
$pageList = $this->fetchScriptList();
$this->output( 'Importing ' . count( $pageList ) . " pages\n" );
foreach ( $pageList as $page ) {
+ $title = Title::makeTitleSafe( NS_MEDIAWIKI, $page );
+ if ( !$title ) {
+ $this->error( "$page is an invalid title; it will not be imported\n" );
+ continue;
+ }
+
$this->output( "Importing $page\n" );
$url = wfAppendQuery( $baseUrl, array(
'action' => 'raw',
'title' => "MediaWiki:{$page}" ) );
$text = Http::get( $url );
-
- $title = Title::makeTitleSafe( NS_MEDIAWIKI, $page );
- $article = new Article( $title );
- $article->doEdit( $text, "Importing from $url", 0 );
+
+ $wikiPage = WikiPage::factory( $title );
+ $wikiPage->doEdit( $text, "Importing from $url", 0, false, $user );
}
}
diff --git a/maintenance/importTextFile.php b/maintenance/importTextFile.php
index b78ae039..ec9ff001 100644
--- a/maintenance/importTextFile.php
+++ b/maintenance/importTextFile.php
@@ -57,8 +57,8 @@ if ( count( $args ) < 1 || isset( $options['help'] ) ) {
$flags = 0 | ( isset( $options['norc'] ) ? EDIT_SUPPRESS_RC : 0 );
echo( "\nPerforming edit..." );
- $article = new Article( $title );
- $article->doEdit( $text, $comment, $flags );
+ $page = WikiPage::factory( $title );
+ $page->doEdit( $text, $comment, $flags, false, $user );
echo( "done.\n" );
} else {
diff --git a/maintenance/importUseModWiki.php b/maintenance/importUseModWiki.php
deleted file mode 100644
index a28d57a5..00000000
--- a/maintenance/importUseModWiki.php
+++ /dev/null
@@ -1,375 +0,0 @@
-<?php
-/**
- * Import data from a UseModWiki into a MediaWiki wiki
- * 2003-02-09 Brion VIBBER <brion@pobox.com>
- * Based loosely on Magnus's code from 2001-2002
- *
- * Updated limited version to get something working temporarily
- * 2003-10-09
- * Be sure to run the link & index rebuilding scripts!
- *
- * Some more munging for charsets etc
- * 2003-11-28
- *
- * Partial fix for pages starting with lowercase letters (??)
- * and CamelCase and /Subpage link conversion
- * 2004-11-17
- *
- * Rewrite output to create Special:Export format for import
- * instead of raw SQL. Should be 'future-proof' against future
- * schema changes.
- * 2005-03-14
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License along
- * with this program; if not, write to the Free Software Foundation, Inc.,
- * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * http://www.gnu.org/copyleft/gpl.html
- *
- * @todo document
- * @file
- * @ingroup Maintenance
- */
-
-require_once( "Maintenance.php" );
-
-class ImportUseModWiki extends Maintenance {
-
- private $encoding, $rootDirectory = '';
-
- /**
- * Field separators
- * @var String
- */
- private $FS1, $FS2, $FS3 = '';
-
- /**
- * @var Array
- */
- private $usercache, $nowiki = array();
-
- public function __construct() {
- parent::__construct();
- $this->mDescription = "Import pages from UseMod wikis";
- $this->addOption( 'encoding', 'Encoding of the imported text, default CP1252', false, true );
- /**
- * If UseModWiki's New File System is used:
- * $NewFS = 1; # 1 = new multibyte $FS, 0 = old $FS
- * Use "\xb3"; for the Old File System
- * Changed with UTF-8 UseModWiki
- * http://www.usemod.com/cgi-bin/wiki.pl?SupportForUtf8
- * http://www.usemod.com/cgi-bin/wiki.pl?WikiBugs/NewFieldSeparatorWronglyTreated
- * http://www.meatballwiki.org/wiki/WikiEngine#Q_amp_A
- */
- $this->addOption( 'separator', 'Field separator to use, default \x1E\xFF\xFE\x1E', false, true );
- $this->addArg( 'path', 'Path to your UseMod wiki' );
- }
-
- public function execute() {
- $this->rootDirectory = $this->getArg();
- $this->encoding = $this->getOption( 'encoding', 'CP1252' );
- $sep = $this->getOption( 'separator', "\x1E\xFF\xFE\x1E" );
- $this->FS1 = "{$sep}1";
- $this->FS2 = "{$sep}2";
- $this->FS3 = "{$sep}3";
-
- echo <<<XML
-<?xml version="1.0" encoding="UTF-8" ?>
-<mediawiki xmlns="http://www.mediawiki.org/xml/export-0.1/"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://www.mediawiki.org/xml/export-0.1/
- http://www.mediawiki.org/xml/export-0.1.xsd"
- version="0.1"
- xml:lang="en">
-<!-- generated by importUseModWiki.php -->
-
-XML;
- $letters = array(
- 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
- 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
- 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'other' );
- foreach ( $letters as $letter ) {
- $dir = "{$this->rootDirectory}/page/$letter";
- if ( is_dir( $dir ) )
- $this->importPageDirectory( $dir );
- }
- echo <<<XML
-</mediawiki>
-
-XML;
- }
-
- private function importPageDirectory( $dir, $prefix = "" ) {
- echo "\n<!-- Checking page directory " . $this->xmlCommentSafe( $dir ) . " -->\n";
- $mydir = opendir( $dir );
- while ( $entry = readdir( $mydir ) ) {
- $m = array();
- if ( preg_match( '/^(.+)\.db$/', $entry, $m ) ) {
- echo $this->importPage( $prefix . $m[1] );
- } else {
- if ( is_dir( "$dir/$entry" ) ) {
- if ( $entry != '.' && $entry != '..' ) {
- $this->importPageDirectory( "$dir/$entry", "$entry/" );
- }
- } else {
- echo "<!-- File '" . $this->xmlCommentSafe( $entry ) . "' doesn't seem to contain an article. Skipping. -->\n";
- }
- }
- }
- }
-
- private function useModFilename( $title ) {
- $c = substr( $title, 0, 1 );
- if ( preg_match( '/[A-Z]/i', $c ) ) {
- return strtoupper( $c ) . "/$title";
- }
- return "other/$title";
- }
-
- private function fetchPage( $title ) {
- $fname = $this->rootDirectory . "/page/" . $this->useModFilename( $title ) . ".db";
- if ( !file_exists( $fname ) ) {
- echo "Couldn't open file '$fname' for page '$title'.\n";
- die( -1 );
- }
-
- $page = $this->splitHash( $this->FS1, file_get_contents( $fname ) );
- $section = $this->splitHash( $this->FS2, $page["text_default"] );
- $text = $this->splitHash( $this->FS3, $section["data"] );
-
- return $this->array2object( array( "text" => $text["text"] , "summary" => $text["summary"] ,
- "minor" => $text["minor"] , "ts" => $section["ts"] ,
- "username" => $section["username"] , "host" => $section["host"] ) );
- }
-
- private function fetchKeptPages( $title ) {
- $fname = $this->rootDirectory . "/keep/" . $this->useModFilename( $title ) . ".kp";
- if ( !file_exists( $fname ) ) return array();
-
- $keptlist = explode( $this->FS1, file_get_contents( $fname ) );
- array_shift( $keptlist ); # Drop the junk at beginning of file
-
- $revisions = array();
- foreach ( $keptlist as $rev ) {
- $section = $this->splitHash( $this->FS2, $rev );
- $text = $this->splitHash( $this->FS3, $section["data"] );
- if ( $text["text"] && $text["minor"] != "" && ( $section["ts"] * 1 > 0 ) ) {
- array_push( $revisions, $this->array2object( array ( "text" => $text["text"] , "summary" => $text["summary"] ,
- "minor" => $text["minor"] , "ts" => $section["ts"] ,
- "username" => $section["username"] , "host" => $section["host"] ) ) );
- } else {
- echo "<!-- skipped a bad old revision -->\n";
- }
- }
- return $revisions;
- }
-
- private function splitHash( $sep , $str ) {
- $temp = explode ( $sep , $str ) ;
- $ret = array () ;
- for ( $i = 0; $i + 1 < count ( $temp ) ; $i++ ) {
- $ret[$temp[$i]] = $temp[++$i] ;
- }
- return $ret ;
- }
-
- private function checkUserCache( $name, $host ) {
- if ( $name ) {
- if ( in_array( $name, $this->usercache ) ) {
- $userid = $this->usercache[$name];
- } else {
- # If we haven't imported user accounts
- $userid = 0;
- }
- $username = str_replace( '_', ' ', $name );
- } else {
- $userid = 0;
- $username = $host;
- }
- return array( $userid, $username );
- }
-
- private function importPage( $title ) {
- echo "\n<!-- Importing page " . $this->xmlCommentSafe( $title ) . " -->\n";
- $page = $this->fetchPage( $title );
-
- $newtitle = $this->xmlsafe( str_replace( '_', ' ', $this->recodeText( $title ) ) );
-
- $munged = $this->mungeFormat( $page->text );
- if ( $munged != $page->text ) {
- /**
- * Save a *new* revision with the conversion, and put the
- * previous last version into the history.
- */
- $next = $this->array2object( array(
- 'text' => $munged,
- 'minor' => 1,
- 'username' => 'Conversion script',
- 'host' => '127.0.0.1',
- 'ts' => time(),
- 'summary' => 'link fix',
- ) );
- $revisions = array( $page, $next );
- } else {
- /**
- * Current revision:
- */
- $revisions = array( $page );
- }
- $xml = <<<XML
- <page>
- <title>$newtitle</title>
-
-XML;
-
- # History
- $revisions = array_merge( $revisions, $this->fetchKeptPages( $title ) );
- if ( count( $revisions ) == 0 ) {
- return NULL; // Was "$sql", which does not appear to be defined.
- }
-
- foreach ( $revisions as $rev ) {
- $text = $this->xmlsafe( $this->recodeText( $rev->text ) );
- $minor = ( $rev->minor ? '<minor/>' : '' );
- list( /* $userid */ , $username ) = $this->checkUserCache( $rev->username, $rev->host );
- $username = $this->xmlsafe( $this->recodeText( $username ) );
- $timestamp = $this->xmlsafe( $this->timestamp2ISO8601( $rev->ts ) );
- $comment = $this->xmlsafe( $this->recodeText( $rev->summary ) );
-
- $xml .= <<<XML
- <revision>
- <timestamp>$timestamp</timestamp>
- <contributor><username>$username</username></contributor>
- $minor
- <comment>$comment</comment>
- <text>$text</text>
- </revision>
-
-XML;
- }
- $xml .= "</page>\n\n";
- return $xml;
- }
-
- private function recodeText( $string ) {
- # For currently latin-1 wikis
- $string = str_replace( "\r\n", "\n", $string );
- $string = @iconv( $this->encoding, "UTF-8", $string );
- $string = $this->mungeToUtf8( $string ); # Any old &#1234; stuff
- return $string;
- }
-
- /**
- * @todo FIXME: Don't use /e
- */
- private function mungeToUtf8( $string ) {
- $string = preg_replace ( '/&#([0-9]+);/e', 'wfUtf8Sequence($1)', $string );
- $string = preg_replace ( '/&#x([0-9a-f]+);/ie', 'wfUtf8Sequence(0x$1)', $string );
- # Should also do named entities here
- return $string;
- }
-
- private function timestamp2ISO8601( $ts ) {
- # 2003-08-05T18:30:02Z
- return gmdate( 'Y-m-d', $ts ) . 'T' . gmdate( 'H:i:s', $ts ) . 'Z';
- }
-
- /**
- * The page may contain old data which has not been properly normalized.
- * Invalid UTF-8 sequences or forbidden control characters will make our
- * XML output invalid, so be sure to strip them out.
- * @param String $string Text to clean up
- * @return String
- */
- private function xmlsafe( $string ) {
- $string = UtfNormal::cleanUp( $string );
- $string = htmlspecialchars( $string );
- return $string;
- }
-
- private function xmlCommentSafe( $text ) {
- return str_replace( '--', '\\-\\-', $this->xmlsafe( $this->recodeText( $text ) ) );
- }
-
- private function array2object( $arr ) {
- $o = (object)0;
- foreach ( $arr as $x => $y ) {
- $o->$x = $y;
- }
- return $o;
- }
-
- /**
- * Make CamelCase and /Talk links work
- */
- private function mungeFormat( $text ) {
- $this->nowiki = array();
- $staged = preg_replace_callback(
- '/(<nowiki>.*?<\\/nowiki>|(?:http|https|ftp):\\S+|\[\[[^]\\n]+]])/s',
- array( $this, 'nowikiPlaceholder' ), $text );
-
- # This is probably not 100% correct, I'm just
- # glancing at the UseModWiki code.
- $upper = "[A-Z]";
- $lower = "[a-z_0-9]";
- $any = "[A-Za-z_0-9]";
- $camel = "(?:$upper+$lower+$upper+$any*)";
- $subpage = "(?:\\/$any+)";
- $substart = "(?:\\/$upper$any*)";
-
- $munged = preg_replace( "/(?!\\[\\[)($camel$subpage*|$substart$subpage*)\\b(?!\\]\\]|>)/",
- '[[$1]]', $staged );
-
- $final = preg_replace( '/' . preg_quote( $this->placeholder() ) . '/s',
- array( $this, 'nowikiShift' ), $munged );
- return $final;
- }
-
- private function placeholder( $x = null ) {
- return '\xffplaceholder\xff';
- }
-
- public function nowikiPlaceholder( $matches ) {
- $this->nowiki[] = $matches[1];
- return $this->placeholder();
- }
-
- public function nowikiShift() {
- return array_shift( $this->nowiki );
- }
-}
-
-function wfUtf8Sequence( $codepoint ) {
- if ( $codepoint < 0x80 ) {
- return chr( $codepoint );
- }
- if ( $codepoint < 0x800 ) {
- return chr( $codepoint >> 6 & 0x3f | 0xc0 ) .
- chr( $codepoint & 0x3f | 0x80 );
- }
- if ( $codepoint < 0x10000 ) {
- return chr( $codepoint >> 12 & 0x0f | 0xe0 ) .
- chr( $codepoint >> 6 & 0x3f | 0x80 ) .
- chr( $codepoint & 0x3f | 0x80 );
- }
- if ( $codepoint < 0x100000 ) {
- return chr( $codepoint >> 18 & 0x07 | 0xf0 ) . # Double-check this
- chr( $codepoint >> 12 & 0x3f | 0x80 ) .
- chr( $codepoint >> 6 & 0x3f | 0x80 ) .
- chr( $codepoint & 0x3f | 0x80 );
- }
- # Doesn't yet handle outside the BMP
- return "&#$codepoint;";
-}
-
-$maintClass = 'ImportUseModWiki';
-require_once( RUN_MAINTENANCE_IF_MAIN );
diff --git a/maintenance/importUseModWikipedia.php b/maintenance/importUseModWikipedia.php
deleted file mode 100644
index c4b8112f..00000000
--- a/maintenance/importUseModWikipedia.php
+++ /dev/null
@@ -1,892 +0,0 @@
-<?php
-
-/**
- * A script to read a dump of the English Wikipedia from the UseModWiki period, and to
- * generate an XML dump in MediaWiki format.
- *
- * Some relevant code was ported from UseModWiki 0.92.
- *
- */
-
-require_once( dirname( __FILE__ ) . '/Maintenance.php' );
-require_once( dirname( __FILE__ ) .'/../includes/normal/UtfNormalUtil.php' );
-
-
-class ImportUseModWikipedia extends Maintenance {
- var