Файловый менеджер - Редактировать - /var/www/html/mediawiki-1.43.1/includes/specials/SpecialListDuplicatedFiles.php
Ðазад
<?php /** * Copyright © 2013 Brian Wolff * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program; if not, write to the Free Software Foundation, Inc., * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * http://www.gnu.org/copyleft/gpl.html * * @file */ namespace MediaWiki\Specials; use MediaWiki\Cache\LinkBatchFactory; use MediaWiki\SpecialPage\QueryPage; use MediaWiki\SpecialPage\SpecialPage; use MediaWiki\Title\Title; use Skin; use stdClass; use Wikimedia\Rdbms\IConnectionProvider; use Wikimedia\Rdbms\IDatabase; use Wikimedia\Rdbms\IResultWrapper; /** * List all files where the current version is a duplicate of the current * version of another file. * * @ingroup SpecialPage * @author Brian Wolff */ class SpecialListDuplicatedFiles extends QueryPage { /** * @param IConnectionProvider $dbProvider * @param LinkBatchFactory $linkBatchFactory */ public function __construct( IConnectionProvider $dbProvider, LinkBatchFactory $linkBatchFactory ) { parent::__construct( 'ListDuplicatedFiles' ); $this->setDatabaseProvider( $dbProvider ); $this->setLinkBatchFactory( $linkBatchFactory ); } public function isExpensive() { return true; } public function isSyndicated() { return false; } /** * Get all the duplicates by grouping on sha1s. * * A cheaper (but less useful) version of this * query would be to not care how many duplicates a * particular file has, and do a self-join on image table. * However this version should be no more expensive then * Special:MostLinked, which seems to get handled fine * with however we are doing cached special pages. * @return array */ public function getQueryInfo() { return [ 'tables' => [ 'image' ], 'fields' => [ 'namespace' => NS_FILE, 'title' => 'MIN(img_name)', 'value' => 'count(*)' ], 'options' => [ 'GROUP BY' => 'img_sha1', 'HAVING' => 'count(*) > 1', ], ]; } /** * Pre-fill the link cache * * @param IDatabase $db * @param IResultWrapper $res */ public function preprocessResults( $db, $res ) { $this->executeLBFromResultWrapper( $res ); } /** * @param Skin $skin * @param stdClass $result Result row * @return string */ public function formatResult( $skin, $result ) { // Future version might include a list of the first 5 duplicates // perhaps separated by an "↔". $image1 = Title::makeTitle( $result->namespace, $result->title ); $dupeSearch = SpecialPage::getTitleFor( 'FileDuplicateSearch', $image1->getDBkey() ); $msg = $this->msg( 'listduplicatedfiles-entry' ) ->params( $image1->getText() ) ->numParams( $result->value - 1 ) ->params( $dupeSearch->getPrefixedDBkey() ); return $msg->parse(); } public function execute( $par ) { $this->addHelpLink( 'Help:Managing_files' ); parent::execute( $par ); } protected function getGroupName() { return 'media'; } } /** @deprecated class alias since 1.41 */ class_alias( SpecialListDuplicatedFiles::class, 'SpecialListDuplicatedFiles' );
| ver. 1.1 | |
.
| PHP 8.4.18 | Ð“ÐµÐ½ÐµÑ€Ð°Ñ†Ð¸Ñ Ñтраницы: 0 |
proxy
|
phpinfo
|
ÐаÑтройка