[PATCH 01/18] Follow perlcritic's recommendations - level 5 and 4

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

 



Fix warnings from perlcritic's level 5 and 4. They correspond to the following
cases:
- always end a submodule with a return
- don't use the constant pragma, use the Readonly module instead
- some syntax details for maps, and others.

Signed-off-by: Célestin Matte <celestin.matte@xxxxxxxxxx>
Signed-off-by: Matthieu Moy <matthieu.moy@xxxxxxxxxxxxxxx>
---
 contrib/mw-to-git/git-remote-mediawiki.perl |   81 +++++++++++++++++----------
 1 file changed, 51 insertions(+), 30 deletions(-)

diff --git a/contrib/mw-to-git/git-remote-mediawiki.perl b/contrib/mw-to-git/git-remote-mediawiki.perl
index 410eae9..83cf292 100755
--- a/contrib/mw-to-git/git-remote-mediawiki.perl
+++ b/contrib/mw-to-git/git-remote-mediawiki.perl
@@ -15,32 +15,32 @@ use strict;
 use MediaWiki::API;
 use Git;
 use DateTime::Format::ISO8601;
+use warnings;
 
 # By default, use UTF-8 to communicate with Git and the user
-binmode STDERR, ":utf8";
-binmode STDOUT, ":utf8";
+binmode STDERR, ":encoding(UTF-8)";
+binmode STDOUT, ":encoding(UTF-8)";
 
 use URI::Escape;
 use IPC::Open2;
-
-use warnings;
+use Readonly;
 
 # Mediawiki filenames can contain forward slashes. This variable decides by which pattern they should be replaced
-use constant SLASH_REPLACEMENT => "%2F";
+Readonly my $SLASH_REPLACEMENT => "%2F";
 
 # It's not always possible to delete pages (may require some
 # privileges). Deleted pages are replaced with this content.
-use constant DELETED_CONTENT => "[[Category:Deleted]]\n";
+Readonly my $DELETED_CONTENT => "[[Category:Deleted]]\n";
 
 # It's not possible to create empty pages. New empty files in Git are
 # sent with this content instead.
-use constant EMPTY_CONTENT => "<!-- empty page -->\n";
+Readonly my $EMPTY_CONTENT => "<!-- empty page -->\n";
 
 # used to reflect file creation or deletion in diff.
-use constant NULL_SHA1 => "0000000000000000000000000000000000000000";
+Readonly my $NULL_SHA1 => "0000000000000000000000000000000000000000";
 
 # Used on Git's side to reflect empty edit messages on the wiki
-use constant EMPTY_MESSAGE => '*Empty MediaWiki Message*';
+Readonly my $EMPTY_MESSAGE => '*Empty MediaWiki Message*';
 
 if (@ARGV != 2) {
 	exit_error_usage();
@@ -96,6 +96,9 @@ unless ($fetch_strategy) {
 	$fetch_strategy = "by_page";
 }
 
+# Remember the timestamp corresponding to a revision id.
+my %basetimestamps;
+
 # Dumb push: don't update notes and mediawiki ref to reflect the last push.
 #
 # Configurable with mediawiki.dumbPush, or per-remote with
@@ -198,12 +201,14 @@ sub mw_connect_maybe {
 			exit 1;
 		}
 	}
+	return;
 }
 
 ## Functions for listing pages on the remote wiki
 sub get_mw_tracked_pages {
 	my $pages = shift;
 	get_mw_page_list(\@tracked_pages, $pages);
+	return;
 }
 
 sub get_mw_page_list {
@@ -219,6 +224,7 @@ sub get_mw_page_list {
 		get_mw_first_pages(\@slice, $pages);
 		@some_pages = @some_pages[51..$#some_pages];
 	}
+	return;
 }
 
 sub get_mw_tracked_categories {
@@ -241,6 +247,7 @@ sub get_mw_tracked_categories {
 			$pages->{$page->{title}} = $page;
 		}
 	}
+	return;
 }
 
 sub get_mw_all_pages {
@@ -260,6 +267,7 @@ sub get_mw_all_pages {
 	foreach my $page (@{$mw_pages}) {
 		$pages->{$page->{title}} = $page;
 	}
+	return;
 }
 
 # queries the wiki for a set of pages. Meant to be used within a loop
@@ -290,6 +298,7 @@ sub get_mw_first_pages {
 			$pages->{$page->{title}} = $page;
 		}
 	}
+	return;
 }
 
 # Get the list of pages to be fetched according to configuration.
@@ -358,11 +367,12 @@ sub get_all_mediafiles {
 	foreach my $page (@{$mw_pages}) {
 		$pages->{$page->{title}} = $page;
 	}
+	return;
 }
 
 sub get_linked_mediafiles {
 	my $pages = shift;
-	my @titles = map $_->{title}, values(%{$pages});
+	my @titles = map { $_->{title} } values(%{$pages});
 
 	# The query is split in small batches because of the MW API limit of
 	# the number of links to be returned (500 links max).
@@ -390,11 +400,13 @@ sub get_linked_mediafiles {
 		while (my ($id, $page) = each(%{$result->{query}->{pages}})) {
 			my @media_titles;
 			if (defined($page->{links})) {
-				my @link_titles = map $_->{title}, @{$page->{links}};
+				my @link_titles
+				    = map { $_->{title} } @{$page->{links}};
 				push(@media_titles, @link_titles);
 			}
 			if (defined($page->{images})) {
-				my @image_titles = map $_->{title}, @{$page->{images}};
+				my @image_titles
+				    = map { $_->{title} } @{$page->{images}};
 				push(@media_titles, @image_titles);
 			}
 			if (@media_titles) {
@@ -404,6 +416,7 @@ sub get_linked_mediafiles {
 
 		@titles = @titles[($batch+1)..$#titles];
 	}
+	return;
 }
 
 sub get_mw_mediafile_for_page_revision {
@@ -473,9 +486,6 @@ sub get_last_local_revision {
 	return $lastrevision_number;
 }
 
-# Remember the timestamp corresponding to a revision id.
-my %basetimestamps;
-
 # Get the last remote revision without taking in account which pages are
 # tracked or not. This function makes a single request to the wiki thus
 # avoid a loop onto all tracked pages. This is useful for the fetch-by-rev
@@ -538,7 +548,7 @@ sub mediawiki_clean {
 	$string =~ s/\s+$//;
 	if ($string eq "" && $page_created) {
 		# Creating empty pages is forbidden.
-		$string = EMPTY_CONTENT;
+		$string = $EMPTY_CONTENT;
 	}
 	return $string."\n";
 }
@@ -546,7 +556,7 @@ sub mediawiki_clean {
 # Filter applied on MediaWiki data before adding them to Git
 sub mediawiki_smudge {
 	my $string = shift;
-	if ($string eq EMPTY_CONTENT) {
+	if ($string eq $EMPTY_CONTENT) {
 		$string = "";
 	}
 	# This \n is important. This is due to mediawiki's way to handle end of files.
@@ -555,7 +565,7 @@ sub mediawiki_smudge {
 
 sub mediawiki_clean_filename {
 	my $filename = shift;
-	$filename =~ s/@{[SLASH_REPLACEMENT]}/\//g;
+	$filename =~ s{$SLASH_REPLACEMENT}{/}g;
 	# [, ], |, {, and } are forbidden by MediaWiki, even URL-encoded.
 	# Do a variant of URL-encoding, i.e. looks like URL-encoding,
 	# but with _ added to prevent MediaWiki from thinking this is
@@ -569,7 +579,7 @@ sub mediawiki_clean_filename {
 
 sub mediawiki_smudge_filename {
 	my $filename = shift;
-	$filename =~ s/\//@{[SLASH_REPLACEMENT]}/g;
+	$filename =~ s{/}{$SLASH_REPLACEMENT}g;
 	$filename =~ s/ /_/g;
 	# Decode forbidden characters encoded in mediawiki_clean_filename
 	$filename =~ s/_%_([0-9a-fA-F][0-9a-fA-F])/sprintf("%c", hex($1))/ge;
@@ -579,6 +589,7 @@ sub mediawiki_smudge_filename {
 sub literal_data {
 	my ($content) = @_;
 	print STDOUT "data ", bytes::length($content), "\n", $content;
+	return;
 }
 
 sub literal_data_raw {
@@ -588,7 +599,8 @@ sub literal_data_raw {
 	utf8::downgrade($content);
 	binmode STDOUT, ":raw";
 	print STDOUT "data ", bytes::length($content), "\n", $content;
-	binmode STDOUT, ":utf8";
+	binmode STDOUT, ":encoding(UTF-8)";
+	return;
 }
 
 sub mw_capabilities {
@@ -600,6 +612,7 @@ sub mw_capabilities {
 	print STDOUT "list\n";
 	print STDOUT "push\n";
 	print STDOUT "\n";
+	return;
 }
 
 sub mw_list {
@@ -608,11 +621,13 @@ sub mw_list {
 	print STDOUT "? refs/heads/master\n";
 	print STDOUT "\@refs/heads/master HEAD\n";
 	print STDOUT "\n";
+	return;
 }
 
 sub mw_option {
 	print STDERR "remote-helper command 'option $_[0]' not yet implemented\n";
 	print STDOUT "unsupported\n";
+	return;
 }
 
 sub fetch_mw_revisions_for_page {
@@ -707,7 +722,7 @@ sub import_file_revision {
 	if (!$full_import && $n == 1) {
 		print STDOUT "from refs/mediawiki/$remotename/master^0\n";
 	}
-	if ($content ne DELETED_CONTENT) {
+	if ($content ne $DELETED_CONTENT) {
 		print STDOUT "M 644 inline " .
 		    fe_escape_path($title . ".mw") . "\n";
 		literal_data($content);
@@ -734,6 +749,7 @@ sub import_file_revision {
 	print STDOUT "N inline :$n\n";
 	literal_data("mediawiki_revision: " . $commit{mw_revision});
 	print STDOUT "\n\n";
+	return;
 }
 
 # parse a sequence of
@@ -754,6 +770,7 @@ sub get_more_refs {
 			die("Invalid command in a '$cmd' batch: ". $_);
 		}
 	}
+	return;
 }
 
 sub mw_import {
@@ -763,6 +780,7 @@ sub mw_import {
 		mw_import_ref($ref);
 	}
 	print STDOUT "done\n";
+	return;
 }
 
 sub mw_import_ref {
@@ -806,6 +824,7 @@ sub mw_import_ref {
 		# thrown saying that HEAD is referring to unknown object 0000000000000000000
 		# and the clone fails.
 	}
+	return;
 }
 
 sub mw_import_ref_by_pages {
@@ -817,7 +836,7 @@ sub mw_import_ref_by_pages {
 	my ($n, @revisions) = fetch_mw_revisions(\@pages, $fetch_from);
 
 	@revisions = sort {$a->{revid} <=> $b->{revid}} @revisions;
-	my @revision_ids = map $_->{revid}, @revisions;
+	my @revision_ids = map { $_->{revid} } @revisions;
 
 	return mw_import_revids($fetch_from, \@revision_ids, \%pages_hash);
 }
@@ -888,7 +907,7 @@ sub mw_import_revids {
 
 		my %commit;
 		$commit{author} = $rev->{user} || 'Anonymous';
-		$commit{comment} = $rev->{comment} || EMPTY_MESSAGE;
+		$commit{comment} = $rev->{comment} || $EMPTY_MESSAGE;
 		$commit{title} = mediawiki_smudge_filename($page_title);
 		$commit{mw_revision} = $rev->{revid};
 		$commit{content} = mediawiki_smudge($rev->{'*'});
@@ -1006,14 +1025,14 @@ sub mw_push_file {
 	my $oldrevid = shift;
 	my $newrevid;
 
-	if ($summary eq EMPTY_MESSAGE) {
+	if ($summary eq $EMPTY_MESSAGE) {
 		$summary = '';
 	}
 
 	my $new_sha1 = $diff_info_split[3];
 	my $old_sha1 = $diff_info_split[2];
-	my $page_created = ($old_sha1 eq NULL_SHA1);
-	my $page_deleted = ($new_sha1 eq NULL_SHA1);
+	my $page_created = ($old_sha1 eq $NULL_SHA1);
+	my $page_deleted = ($new_sha1 eq $NULL_SHA1);
 	$complete_file_name = mediawiki_clean_filename($complete_file_name);
 
 	my ($title, $extension) = $complete_file_name =~ /^(.*)\.([^\.]*)$/;
@@ -1032,7 +1051,7 @@ sub mw_push_file {
 			# special privileges. A common
 			# convention is to replace the page
 			# with this content instead:
-			$file_content = DELETED_CONTENT;
+			$file_content = $DELETED_CONTENT;
 		} else {
 			$file_content = run_git("cat-file blob $new_sha1");
 		}
@@ -1112,6 +1131,7 @@ sub mw_push {
 		print STDERR "  git pull --rebase\n";
 		print STDERR "\n";
 	}
+	return;
 }
 
 sub mw_push_revision {
@@ -1229,8 +1249,8 @@ sub get_allowed_file_extensions {
 		siprop => 'fileextensions'
 		};
 	my $result = $mediawiki->api($query);
-	my @file_extensions= map $_->{ext},@{$result->{query}->{fileextensions}};
-	my %hashFile = map {$_ => 1}@file_extensions;
+	my @file_extensions = map { $_->{ext}} @{$result->{query}->{fileextensions}};
+	my %hashFile = map { $_ => 1 } @file_extensions;
 
 	return %hashFile;
 }
@@ -1314,7 +1334,8 @@ sub get_mw_namespace_id {
 }
 
 sub get_mw_namespace_id_for_page {
-	if (my ($namespace) = $_[0] =~ /^([^:]*):/) {
+	my $namespace = shift;
+	if ($namespace =~ /^([^:]*):/) {
 		return get_mw_namespace_id($namespace);
 	} else {
 		return;
-- 
1.7.9.5

--
To unsubscribe from this list: send the line "unsubscribe git" in
the body of a message to majordomo@xxxxxxxxxxxxxxx
More majordomo info at  http://vger.kernel.org/majordomo-info.html




[Index of Archives]     [Linux Kernel Development]     [Gcc Help]     [IETF Annouce]     [DCCP]     [Netdev]     [Networking]     [Security]     [V4L]     [Bugtraq]     [Yosemite]     [MIPS Linux]     [ARM Linux]     [Linux Security]     [Linux RAID]     [Linux SCSI]     [Fedora Users]