r8878 - in /scripts/qa/DebianQA: BTS.pm Svn.pm Watch.pm
tincho-guest at users.alioth.debian.org
tincho-guest at users.alioth.debian.org
Mon Nov 5 19:11:20 UTC 2007
Author: tincho-guest
Date: Mon Nov 5 19:11:20 2007
New Revision: 8878
URL: http://svn.debian.org/wsvn/?sc=1&rev=8878
Log:
Watch seems to be working now. Plus a couple of fixes in Svn and BTS
Modified:
scripts/qa/DebianQA/BTS.pm
scripts/qa/DebianQA/Svn.pm
scripts/qa/DebianQA/Watch.pm
Modified: scripts/qa/DebianQA/BTS.pm
URL: http://svn.debian.org/wsvn/scripts/qa/DebianQA/BTS.pm?rev=8878&op=diff
==============================================================================
--- scripts/qa/DebianQA/BTS.pm (original)
+++ scripts/qa/DebianQA/BTS.pm Mon Nov 5 19:11:20 2007
@@ -82,7 +82,7 @@
info("Re-generating consolidated hash");
@pkglist = get_pkglist();
- @pkglist = keys(%bugs) unless(@pkglist);
+ @pkglist = keys(%$cdata) unless(@pkglist);
# TODO: Interesting fields:
# keywords/tags, severity, subject, forwarded, date
Modified: scripts/qa/DebianQA/Svn.pm
URL: http://svn.debian.org/wsvn/scripts/qa/DebianQA/Svn.pm?rev=8878&op=diff
==============================================================================
--- scripts/qa/DebianQA/Svn.pm (original)
+++ scripts/qa/DebianQA/Svn.pm Mon Nov 5 19:11:20 2007
@@ -147,8 +147,9 @@
# Retain lock
my $cdata = update_cache("svn", \%svn, "", $complete, 1);
- my @pkglist = grep({ $svn{$_}{pkgname} } keys(%$cdata));
- my %pkglist = map({ $svn{$_}{pkgname} => 1 } @pkglist);
+ my @pkglist = grep({ ref $cdata->{$_} and $cdata->{$_}{pkgname} }
+ keys(%$cdata));
+ my %pkglist = map({ $cdata->{$_}{pkgname} => 1 } @pkglist);
update_cache("consolidated", \%pkglist, "pkglist", 1, 1);
my %svn2;
@@ -166,11 +167,11 @@
return $cdata;
}
# Returns the hash of svn info. Doesn't download anything.
-sub svn_get() {
+sub svn_get {
return read_cache("svn", shift, 0);
}
# Returns the consolidated hash of svn info. Doesn't download anything.
-sub svn_get_consolidated() {
+sub svn_get_consolidated {
my $path = shift || "";
return read_cache("consolidated", "svn/$path", 0);
}
@@ -253,7 +254,7 @@
push @wspecs, {
line => $_,
unmangled_ver => $unmangled,
- md5 => md5_hex($_),
+ md5 => md5_hex($opts.$_),
opts => \%opts
};
}
Modified: scripts/qa/DebianQA/Watch.pm
URL: http://svn.debian.org/wsvn/scripts/qa/DebianQA/Watch.pm?rev=8878&op=diff
==============================================================================
--- scripts/qa/DebianQA/Watch.pm (original)
+++ scripts/qa/DebianQA/Watch.pm Mon Nov 5 19:11:20 2007
@@ -15,35 +15,85 @@
our @ISA = "Exporter";
our @EXPORT = qw(watch_download watch_get watch_get_consolidated);
-use Digest::MD5 "md5_hex";
use DebianQA::Cache;
use DebianQA::Common;
use DebianQA::Svn;
+use DebianQA::DebVersions;;
+use IO::Scalar;
+use LWP::Simple;
+
+my $ttl = 6;
sub watch_download {
- my($force, @list) = @_;
+ my($force, @pkglist) = @_;
$force ||= 0;
- debug("watch_download($force, (@list))");
+ debug("watch_download($force, (@pkglist))");
- my $complete = ! @list;
- if($complete) {
- @list = get_pkglist();
+ my $complete;
+ if(not @pkglist) {
+ $complete = 1;
+ @pkglist = get_pkglist();
}
my $cdata = watch_get() unless($force);
my(%watch, %watch2);
- foreach my $pkg (@list) {
+ foreach my $pkg (@pkglist) {
debug("Retrieving svn info for $pkg");
- my $data = svn_get($pkg);
- if($data->{watch_error}) {
- $watch2{$pkg} = { error => $data->{watch_error} };
+ my $svndata = svn_get($pkg);
+ if($svndata->{watch_error}) {
+ $watch2{$pkg} = { error => $svndata->{watch_error} };
next;
}
- unless($data->{watch}) {
+ unless($svndata->{watch} and ref $svndata->{watch}
+ and ref $svndata->{watch} eq "ARRAY") {
$watch2{$pkg} = { error => "Missing" };
next;
}
-
+ my @wlines = @{$svndata->{watch}};
+ unless(@wlines) {
+ $watch2{$pkg} = { error => "Empty" };
+ next;
+ }
+ my @wresult;
+ foreach my $wline (@wlines) {
+ my $md5 = $wline->{md5};
+ next unless($md5);
+ if(not $force and $cdata->{$md5}
+ and $ttl * 60 > time - find_stamp($cdata, $md5)) {
+ $watch{$md5} = $cdata->{$md5};
+ } else {
+ my ($watcherr, $ups_ver, $ups_mangled) = uscan($wline);
+ $watch{$md5} = {
+ watch_error => $watcherr,
+ upstream_version => $ups_ver,
+ upstream_mangled => $ups_mangled
+ };
+ }
+ my $diff = deb_compare($wline->{unmangled_ver},
+ $watch{$md5}{upstream_mangled}) if(
+ $watch{$md5}{upstream_mangled});
+ push @wresult, [ $diff, @{$watch{$md5}}{
+ qw/watch_error upstream_version upstream_mangled/ } ];
+ }
+ my @noerror = sort({ deb_compare($a->[3], $b->[3]) }
+ grep({ not $_->[1] } @wresult));
+ unless(@noerror) {
+ $watch2{$pkg} = { error => $wresult[0][1] };
+ next;
+ }
+ my @result;
+ if(@result = grep({ $_->[0] < 0 } @noerror)) {
+ $watch2{$pkg} = {
+ error => "NeedsUpgrade", upstream_ver => $result[-1][2] };
+ } elsif(@result = grep( { not $_->[0] } @noerror)) {
+ $watch2{$pkg} = { upstream_ver => $result[0][2] }; # OK
+ } else {
+ $watch2{$pkg} = { error => "MissingUpstream",
+ upstream_ver => $noerror[0][2] };
+ }
}
+ update_cache("watch", \%watch, "", $complete, 1);
+ update_cache("consolidated", \%watch2, "watch", $complete, 0);
+ unlock_cache("watch");
}
# Returns the hash of bugs. Doesn't download anything.
sub watch_get {
@@ -54,4 +104,67 @@
my $path = shift || "";
return read_cache("consolidated", "watch/$path", 0);
}
+sub uscan($) {
+ my %watch = %{$_[0]};
+ my $wline = $watch{line};
+ info("Processing watch line $wline");
+ unless($wline =~ m{^((?:http|ftp)://\S+)/}) {
+ warn("Invalid watch line: $wline");
+ return("Invalid");
+ }
+ my $url = $1;
+ $url =~ s{^http://sf.net/}{http://sf.net.projects/};
+
+ $wline =~ s{^http://sf\.net/(\S+)}{http://qa.debian.org/watch/sf.php/$1};
+ my @items = split(/\s+/, $wline);
+
+ my($dir, $filter);
+ # Either we have single URL/pattern
+ # or URL/pattern + extra
+ if($items[0] =~ /\(/) {
+ # Since '+' is greedy, the second capture has no slashes
+ ($dir, $filter) = $items[0] =~ m{^(.+)/(.+)$};
+ } elsif(@items >= 2 and $items[1] =~ /\(/) {
+ # or, we have a homepage plus pattern
+ # (plus optional other non-interesting stuff)
+ ($dir, $filter) = @items[0,1];
+ }
+ unless($dir and $filter) {
+ return("Invalid");
+ }
+ debug("uscan $dir $filter");
+ $url ||= $dir; # NO-OP
+ my $page = LWP::Simple::get($dir);
+ unless($page) {
+ error("Unable to get $dir");
+ return ("Network");
+ }
+ my $page_io = IO::Scalar->new(\$page);
+ my @vers;
+ while(<$page_io>) {
+ if( $dir =~ /^http/ ) {
+ while( s/<a [^>]*href="([^"]+)"[^>]*>//i ) {
+ my $href = $1;
+ push @vers, [$1, $1] if $href =~ $filter;
+ }
+ } else {
+ while( s/(?:^|\s+)$filter(?:\s+|$)// ) {
+ push @vers, [$1, $1];
+ }
+ }
+ }
+ foreach my $uver (@vers) {
+ next unless($watch{opts}{uversionmangle});
+ foreach(split(/;/, @{$watch{opts}{uversionmangle}})) {
+ eval "\$uver[1] =~ $_";
+ if($@) {
+ error("Invalid watchfile: $@");
+ return "Invalid";
+ }
+ }
+ }
+ @vers = sort({ deb_compare($a->[1], $b->[1]) } @vers);
+ return(undef, @{$vers[-1]});
+}
+1;
More information about the Pkg-perl-cvs-commits
mailing list