summaryrefslogtreecommitdiff
path: root/includes/MediaWiki.php
diff options
context:
space:
mode:
Diffstat (limited to 'includes/MediaWiki.php')
-rw-r--r--includes/MediaWiki.php156
1 files changed, 123 insertions, 33 deletions
diff --git a/includes/MediaWiki.php b/includes/MediaWiki.php
index 39551441..2da2f6ce 100644
--- a/includes/MediaWiki.php
+++ b/includes/MediaWiki.php
@@ -56,6 +56,7 @@ class MediaWiki {
/**
* Parse the request to get the Title object
*
+ * @throws MalformedTitleException If a title has been provided by the user, but is invalid.
* @return Title Title object to be $wgTitle
*/
private function parseTitle() {
@@ -115,7 +116,10 @@ class MediaWiki {
}
if ( $ret === null || ( $ret->getDBkey() == '' && !$ret->isExternal() ) ) {
- $ret = SpecialPage::getTitleFor( 'Badtitle' );
+ // If we get here, we definitely don't have a valid title; throw an exception.
+ // Try to get detailed invalid title exception first, fall back to MalformedTitleException.
+ Title::newFromTextThrow( $title );
+ throw new MalformedTitleException( 'badtitletext', $title );
}
return $ret;
@@ -127,7 +131,11 @@ class MediaWiki {
*/
public function getTitle() {
if ( !$this->context->hasTitle() ) {
- $this->context->setTitle( $this->parseTitle() );
+ try {
+ $this->context->setTitle( $this->parseTitle() );
+ } catch ( MalformedTitleException $ex ) {
+ $this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
+ }
}
return $this->context->getTitle();
}
@@ -177,6 +185,11 @@ class MediaWiki {
|| $title->isSpecial( 'Badtitle' )
) {
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
+ try {
+ $this->parseTitle();
+ } catch ( MalformedTitleException $ex ) {
+ throw new BadTitleError( $ex );
+ }
throw new BadTitleError();
}
@@ -222,6 +235,11 @@ class MediaWiki {
$output->redirect( $url, 301 );
} else {
$this->context->setTitle( SpecialPage::getTitleFor( 'Badtitle' ) );
+ try {
+ $this->parseTitle();
+ } catch ( MalformedTitleException $ex ) {
+ throw new BadTitleError( $ex );
+ }
throw new BadTitleError();
}
// Handle any other redirects.
@@ -293,6 +311,8 @@ class MediaWiki {
* - Normalise empty title:
* /wiki/ -> /wiki/Main
* /w/index.php?title= -> /wiki/Main
+ * - Normalise non-standard title urls:
+ * /w/index.php?title=Foo_Bar -> /wiki/Foo_Bar
* - Don't redirect anything with query parameters other than 'title' or 'action=view'.
*
* @param Title $title
@@ -305,8 +325,6 @@ class MediaWiki {
if ( $request->getVal( 'action', 'view' ) != 'view'
|| $request->wasPosted()
- || ( $request->getVal( 'title' ) !== null
- && $title->getPrefixedDBkey() == $request->getVal( 'title' ) )
|| count( $request->getValueNames( array( 'action', 'title' ) ) )
|| !Hooks::run( 'TestCanonicalRedirect', array( $request, $title, $output ) )
) {
@@ -321,7 +339,19 @@ class MediaWiki {
}
// Redirect to canonical url, make it a 301 to allow caching
$targetUrl = wfExpandUrl( $title->getFullURL(), PROTO_CURRENT );
- if ( $targetUrl == $request->getFullRequestURL() ) {
+
+ if ( $targetUrl != $request->getFullRequestURL() ) {
+ $output->setSquidMaxage( 1200 );
+ $output->redirect( $targetUrl, '301' );
+ return true;
+ }
+
+ // If there is no title, or the title is in a non-standard encoding, we demand
+ // a redirect. If cgi somehow changed the 'title' query to be non-standard while
+ // the url is standard, the server is misconfigured.
+ if ( $request->getVal( 'title' ) === null
+ || $title->getPrefixedDBkey() != $request->getVal( 'title' )
+ ) {
$message = "Redirect loop detected!\n\n" .
"This means the wiki got confused about what page was " .
"requested; this sometimes happens when moving a wiki " .
@@ -343,9 +373,7 @@ class MediaWiki {
}
throw new HttpError( 500, $message );
}
- $output->setSquidMaxage( 1200 );
- $output->redirect( $targetUrl, '301' );
- return true;
+ return false;
}
/**
@@ -369,9 +397,8 @@ class MediaWiki {
$this->context->setWikiPage( $article->getPage() );
}
- // NS_MEDIAWIKI has no redirects.
- // It is also used for CSS/JS, so performance matters here...
- if ( $title->getNamespace() == NS_MEDIAWIKI ) {
+ // Skip some unnecessary code if the content model doesn't support redirects
+ if ( !ContentHandler::getForTitle( $title )->supportsRedirects() ) {
return $article;
}
@@ -472,8 +499,7 @@ class MediaWiki {
}
/**
- * Run the current MediaWiki instance
- * index.php just calls this
+ * Run the current MediaWiki instance; index.php just calls this
*/
public function run() {
try {
@@ -484,16 +510,71 @@ class MediaWiki {
// Bug 62091: while exceptions are convenient to bubble up GUI errors,
// they are not internal application faults. As with normal requests, this
// should commit, print the output, do deferred updates, jobs, and profiling.
- wfGetLBFactory()->commitMasterChanges();
+ $this->doPreOutputCommit();
$e->report(); // display the GUI error
}
+ } catch ( Exception $e ) {
+ MWExceptionHandler::handleException( $e );
+ }
+
+ $this->doPostOutputShutdown( 'normal' );
+ }
+
+ /**
+ * This function commits all DB changes as needed before
+ * the user can receive a response (in case commit fails)
+ *
+ * @since 1.26
+ */
+ public function doPreOutputCommit() {
+ // Either all DBs should commit or none
+ ignore_user_abort( true );
+
+ // Commit all changes and record ChronologyProtector positions
+ $factory = wfGetLBFactory();
+ $factory->commitMasterChanges();
+ $factory->shutdown();
+
+ wfDebug( __METHOD__ . ' completed; all transactions committed' );
+ }
+
+ /**
+ * This function does work that can be done *after* the
+ * user gets the HTTP response so they don't block on it
+ *
+ * This manages deferred updates, job insertion,
+ * final commit, and the logging of profiling data
+ *
+ * @param string $mode Use 'fast' to always skip job running
+ * @since 1.26
+ */
+ public function doPostOutputShutdown( $mode = 'normal' ) {
+ // Show visible profiling data if enabled (which cannot be post-send)
+ Profiler::instance()->logDataPageOutputOnly();
+
+ $that = $this;
+ $callback = function () use ( $that, $mode ) {
+ try {
+ $that->restInPeace( $mode );
+ } catch ( Exception $e ) {
+ MWExceptionHandler::handleException( $e );
+ }
+ };
+
+ // Defer everything else...
+ if ( function_exists( 'register_postsend_function' ) ) {
+ // https://github.com/facebook/hhvm/issues/1230
+ register_postsend_function( $callback );
+ } else {
if ( function_exists( 'fastcgi_finish_request' ) ) {
fastcgi_finish_request();
+ } else {
+ // Either all DB and deferred updates should happen or none.
+ // The later should not be cancelled due to client disconnect.
+ ignore_user_abort( true );
}
- $this->triggerJobs();
- $this->restInPeace();
- } catch ( Exception $e ) {
- MWExceptionHandler::handleException( $e );
+
+ $callback();
}
}
@@ -508,7 +589,7 @@ class MediaWiki {
list( $host, $lag ) = wfGetLB()->getMaxLag();
if ( $lag > $maxLag ) {
$resp = $this->context->getRequest()->response();
- $resp->header( 'HTTP/1.1 503 Service Unavailable' );
+ $resp->statusHeader( 503 );
$resp->header( 'Retry-After: ' . max( intval( $maxLag ), 5 ) );
$resp->header( 'X-Database-Lag: ' . intval( $lag ) );
$resp->header( 'Content-Type: text/plain' );
@@ -525,7 +606,7 @@ class MediaWiki {
}
private function main() {
- global $wgTitle;
+ global $wgTitle, $wgTrxProfilerLimits;
$request = $this->context->getRequest();
@@ -557,10 +638,9 @@ class MediaWiki {
if ( !$request->wasPosted()
&& in_array( $action, array( 'view', 'edit', 'history' ) )
) {
- $trxProfiler->setExpectation( 'masterConns', 0, __METHOD__ );
- $trxProfiler->setExpectation( 'writes', 0, __METHOD__ );
+ $trxProfiler->setExpectations( $wgTrxProfilerLimits['GET'], __METHOD__ );
} else {
- $trxProfiler->setExpectation( 'maxAffected', 500, __METHOD__ );
+ $trxProfiler->setExpectations( $wgTrxProfilerLimits['POST'], __METHOD__ );
}
// If the user has forceHTTPS set to true, or if the user
@@ -633,22 +713,23 @@ class MediaWiki {
// Actually do the work of the request and build up any output
$this->performRequest();
- // Either all DB and deferred updates should happen or none.
- // The later should not be cancelled due to client disconnect.
- ignore_user_abort( true );
// Now commit any transactions, so that unreported errors after
- // output() don't roll back the whole DB transaction
- wfGetLBFactory()->commitMasterChanges();
+ // output() don't roll back the whole DB transaction and so that
+ // we avoid having both success and error text in the response
+ $this->doPreOutputCommit();
// Output everything!
$this->context->getOutput()->output();
-
}
/**
* Ends this task peacefully
+ * @param string $mode Use 'fast' to always skip job running
*/
- public function restInPeace() {
+ public function restInPeace( $mode = 'fast' ) {
+ // Assure deferred updates are not in the main transaction
+ wfGetLBFactory()->commitMasterChanges();
+
// Ignore things like master queries/connections on GET requests
// as long as they are in deferred updates (which catch errors).
Profiler::instance()->getTransactionProfiler()->resetExpectations();
@@ -656,6 +737,15 @@ class MediaWiki {
// Do any deferred jobs
DeferredUpdates::doUpdates( 'commit' );
+ // Make sure any lazy jobs are pushed
+ JobQueueGroup::pushLazyJobs();
+
+ // Now that everything specific to this request is done,
+ // try to occasionally run jobs (if enabled) from the queues
+ if ( $mode === 'normal' ) {
+ $this->triggerJobs();
+ }
+
// Log profiling data, e.g. in the database or UDP
wfLogProfilingData();
@@ -672,7 +762,7 @@ class MediaWiki {
* to run a specified number of jobs. This registers a callback to cleanup
* the socket once it's done.
*/
- protected function triggerJobs() {
+ public function triggerJobs() {
$jobRunRate = $this->config->get( 'JobRunRate' );
if ( $jobRunRate <= 0 || wfReadOnly() ) {
return;
@@ -715,7 +805,7 @@ class MediaWiki {
$errno = $errstr = null;
$info = wfParseUrl( $this->config->get( 'Server' ) );
- wfSuppressWarnings();
+ MediaWiki\suppressWarnings();
$sock = fsockopen(
$info['host'],
isset( $info['port'] ) ? $info['port'] : 80,
@@ -725,7 +815,7 @@ class MediaWiki {
// is a problem elsewhere.
0.1
);
- wfRestoreWarnings();
+ MediaWiki\restoreWarnings();
if ( !$sock ) {
$runJobsLogger->error( "Failed to start cron API (socket error $errno): $errstr" );
// Fall back to running the job here while the user waits