*/
use Wikimedia\Rdbms\IDatabase;
+use MediaWiki\Logger\LoggerFactory;
use MediaWiki\MediaWikiServices;
use Wikimedia\ScopedCallback;
// Make sure all links update threads see the changes of each other.
// This handles the case when updates have to batched into several COMMITs.
$scopedLock = self::acquirePageLock( $this->getDB(), $this->mId );
+ if ( !$scopedLock ) {
+ throw new RuntimeException( "Could not acquire lock for page ID '{$this->mId}'." );
+ }
}
// Avoid PHP 7.1 warning from passing $this by reference
* @param IDatabase $dbw
* @param int $pageId
* @param string $why One of (job, atomicity)
- * @return ScopedCallback
- * @throws RuntimeException
+ * @return ScopedCallback|null
* @since 1.27
*/
public static function acquirePageLock( IDatabase $dbw, $pageId, $why = 'atomicity' ) {
$key = "LinksUpdate:$why:pageid:$pageId";
$scopedLock = $dbw->getScopedLockAndFlush( $key, __METHOD__, 15 );
if ( !$scopedLock ) {
- throw new RuntimeException( "Could not acquire lock '$key'." );
+ $logger = LoggerFactory::getInstance( 'SecondaryDataUpdate' );
+ $logger->info( "Could not acquire lock '{key}' for page ID '{page_id}'.", [
+ 'key' => $key,
+ 'page_id' => $pageId,
+ ] );
+ return null;
}
return $scopedLock;
/**
* @param array $images
*/
- private function invalidateImageDescriptions( $images ) {
+ private function invalidateImageDescriptions( array $images ) {
PurgeJobUtils::invalidatePages( $this->getDB(), NS_FILE, array_keys( $images ) );
}
$arr = [];
$diffs = array_diff_key( $this->mExternals, $existing );
foreach ( $diffs as $url => $dummy ) {
- foreach ( wfMakeUrlIndexes( $url ) as $index ) {
+ foreach ( LinkFilter::makeIndexes( $url ) as $index ) {
$arr[] = [
'el_from' => $this->mId,
'el_to' => $url,