read 'from' headers too; preparing for filtering on posters

This commit is contained in:
Ward Wouts 2006-09-12 20:06:54 +00:00
parent 04c478bb15
commit 7720fca967
2 changed files with 31 additions and 17 deletions

View file

@ -99,7 +99,7 @@ class Article
Debuglevel = 1
Message = Struct.new(:messid, :id, :server, :subject)
Message = Struct.new(:messid, :id, :from, :server, :subject)
def initialize(nntpservers, groupname, newsrc="~/.newsrc")
@messageinfo = []
@ -134,8 +134,8 @@ def initialize(nntpservers, groupname, newsrc="~/.newsrc")
@serverpasses[server]['pass'] = nil
end
}
p @serverlist
p @serverpasses
#p @serverlist
#p @serverpasses
#@serverlist = nntpservers.split('|')
@connections = {}
@ -147,11 +147,11 @@ p @serverpasses
# p Time.now
begin
timeout(60) do
p "connecting"
#p "connecting"
@connections[server]["nntp"] = Net::KANNTP.new(server, 119, @serverpasses[server]['user'], @serverpasses[server]['pass'])
end
resp = @connections[server]["nntp"].mode_reader
p resp
#p resp
rescue TimeoutError, Errno::ECONNRESET
sleep 3
retry
@ -227,8 +227,8 @@ def preselect(subject)
# return ( subject =~ @preselectpattern )
end
def add(id, messid, subject, server)
@messageinfo.push(Message.new(messid, id, server, subject))
def add(id, messid, from, subject, server)
@messageinfo.push(Message.new(messid, id, from, server, subject))
@grouped = false
end
@ -303,6 +303,7 @@ def get_articles(cachedir=false)
# resp, xover_lines = get_xover(server, i)
resp, subj_lines = get_xhdr(server, i, "subject")
resp, messid_lines = get_xhdr(server, i, "message-id")
resp, from_lines = get_xhdr(server, i, "from")
rescue TempError
printerr(server)
next
@ -326,17 +327,22 @@ def get_articles(cachedir=false)
art[x[0]]["messid"] = x[1]
print "art id: #{x[0]} messid: #{x[1]}\n" if Debuglevel > 2
}
from_lines.collect{|x|
art[x[0]] = {} unless art.has_key?(x[0])
art[x[0]]["from"] = x[1]
print "art id: #{x[0]} from: #{x[1]}\n" if Debuglevel > 2
}
for id in art.keys
if art[id].has_key?("subject") and art[id].has_key?("messid")
print "adding: #{art[id]["messid"]}, #{id}, #{server}, #{art[id]["subject"]}\n" if Debuglevel > 2
if art[id].has_key?("subject") and art[id].has_key?("messid") and art[id].has_key?("from")
print "adding: #{art[id]["messid"]}, #{id}, #{server}, #{art[id]["from"]}, #{art[id]["subject"]}\n" if Debuglevel > 2
# @newids[server][id.to_i] = true
# dit wellicht alleen doen indien preselector hem uitkiest
# en anders een leuk regeltje aan de cache toevoegen,
# maar niet in het geheugen houden
if preselect(art[id]["subject"])
add(id.to_i, art[id]["messid"], art[id]["subject"], server)
add(id.to_i, art[id]["messid"], art[id]["from"], art[id]["subject"], server)
end
cache_add(cachedir, id, art[id]["messid"], art[id]["subject"], server)
cache_add(cachedir, id, art[id]["messid"], art[id]["from"], art[id]["subject"], server)
end
end
end
@ -541,6 +547,7 @@ def get_group_body(subj)
print "full subject: #{@groups[subj]["messageinfo"][i][:subject]}\n" if Debuglevel > 0
print "message id: #{@groups[subj]["messageinfo"][i][:messid]}\n" if Debuglevel > 1
print "id: #{@groups[subj]["messageinfo"][i][:id]}\n" if Debuglevel > 1
print "from: #{@groups[subj]["messageinfo"][i][:from]}\n" if Debuglevel > 1
print "server: #{@groups[subj]["messageinfo"][i][:server]}\n" if Debuglevel > 0
resp = false
while resp == false
@ -585,6 +592,7 @@ def get_group_body_first(subj)
print "full subject: #{@groups[subj]['messageinfo'][0][:subject]}\n" if Debuglevel > 0
print "message id: #{@groups[subj]['messageinfo'][i][:messid]}\n" if Debuglevel > 1
print "id: #{@groups[subj]['messageinfo'][i][:id]}\n" if Debuglevel > 1
print "from: #{@groups[subj]['messageinfo'][i][:from]}\n" if Debuglevel > 1
print "server: #{@groups[subj]['messageinfo'][0][:server]}\n" if Debuglevel > 0
resp = false
while resp == false
@ -762,11 +770,11 @@ def save_newsrc()
end
end
def cache_add(cachedir, id, messid, subject, server)
def cache_add(cachedir, id, messid, from, subject, server)
if @cache_buf.has_key?(server)
@cache_buf[server].push("#{id}|#{messid}|#{subject}\n")
@cache_buf[server].push("#{id}|#{messid}|#{from}|#{subject}\n")
else
@cache_buf[server] = [ "#{id}|#{messid}|#{subject}\n" ]
@cache_buf[server] = [ "#{id}|#{messid}|#{from}|#{subject}\n" ]
end
if @cache_buf[server].length > 100
cache_save(cachedir, server)
@ -783,7 +791,7 @@ def cache_read(cachedir)
puts "#{Time.now} Reading & scrubbing caches"
filename = "#{cachedir}/#{@group}.ripnewscache"
excludes = {}
regexp = Regexp.new('^([^\|]*)\|([^\|]*)\|(.*)')
regexp = Regexp.new('^([^\|]*)\|([^\|]*)\|([^\|]*)\|(.*)')
for server in @connections.keys
first = @connections[server]["first"]
last = @connections[server]["last"]
@ -797,12 +805,13 @@ puts " #{Time.now} Reading cache for #{server}"
line =~ regexp
id_i = $1.to_i
messid = $2
subject = $3
from = $2
subject = $4
if first <= id_i and id_i <= last
unless excludes[server].has_key?(id_i)
outfile.puts(line)
if preselect(subject)
add(id_i, messid, subject, server)
add(id_i, messid, from, subject, server)
end
# XXX alle traagheid van de cache_read zit in deze regel:
@connections[server]["skip_ids"].insert(id_i)

View file

@ -0,0 +1,5 @@
#!/bin/sh
for i in *;do
sed -e 's/\([^|]*\|[^|]*\|\)/\1\|/' < $i > $i.new
mv $i.new $i
done