At lot more efficient by linearizing a O(N^2) function in
check_safety. I think this can be further optimized by using a temporary hash. Anyway, on a directory with 5327 files swapping to file names had this effect: time mvwrap.old -n mvwrap.old -n 183.40s user 0.11s system 91% cpu 3:20.83 total became: time mvwrap -n mvwrap -n 29.03s user 0.15s system 78% cpu 36.946 total Which still takes too long, but is a lot better. ;)
This commit is contained in:
parent
ad85d0295d
commit
26c1f324d4
1 changed files with 10 additions and 4 deletions
|
|
@ -170,11 +170,16 @@ sub check_unique (@target_list){
|
||||||
# Returns an array of unsafe line numbers
|
# Returns an array of unsafe line numbers
|
||||||
sub check_safety {
|
sub check_safety {
|
||||||
my ($from, $to) = @_;
|
my ($from, $to) = @_;
|
||||||
my ($i, $j, @danger, @unique, %seen);
|
my ($i, $j, @changed, @danger, @unique, %seen);
|
||||||
my ($a, $b);
|
my ($a, $b);
|
||||||
for ($i=0; $i < scalar(@$from); $i++){
|
for ($i=0; $i < scalar(@$from); $i++){
|
||||||
for ( $j=0; $j < scalar(@$to); $j++ ) {
|
|
||||||
$a = $from->[$i]; chomp($a);
|
$a = $from->[$i]; chomp($a);
|
||||||
|
$b = $to->[$i]; chomp($b);
|
||||||
|
push @changed, $i if ($a ne $b);
|
||||||
|
}
|
||||||
|
foreach $i (@changed) {
|
||||||
|
$a = $from->[$i]; chomp($a);
|
||||||
|
for ($j=0; $j < scalar(@$to); $j++){
|
||||||
$b = $to->[$j]; chomp($b);
|
$b = $to->[$j]; chomp($b);
|
||||||
if (($a eq $b) && ($i != $j)) {
|
if (($a eq $b) && ($i != $j)) {
|
||||||
push @danger, $i;
|
push @danger, $i;
|
||||||
|
|
@ -182,6 +187,7 @@ sub check_safety {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
%seen = ();
|
%seen = ();
|
||||||
@unique = grep { ! $seen{$_} ++ } @danger;
|
@unique = grep { ! $seen{$_} ++ } @danger;
|
||||||
return @unique;
|
return @unique;
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue