- xover weer weggehaald, de oude manier was sneller

- code om niet alle gaten te proberen te vulln weer aangestoken
This commit is contained in:
Ward Wouts 2005-02-07 08:53:43 +00:00
parent e73d35d463
commit 78a364aae4

View file

@ -1,4 +1,4 @@
# $Dwarf: article.rb,v 1.107 2005/02/05 12:35:50 ward Exp $
# $Dwarf: article.rb,v 1.108 2005/02/06 13:42:03 ward Exp $
# $Source$
#
@ -180,20 +180,20 @@ def get_articles(cachedir=false)
# spul dat echt te oud is gaat nooit gevuld worden, dus doe ook geen poging het op te halen
# wil wel wat ophalen aangezien logging aantoont dat er wel oudere articles gedownload worden
# for server in @connections.keys
# if @connections[server]["skip_ids"].max && @connections[server]["skip_ids"].max
# articles = @connections[server]["last"] - @connections[server]["first"]
# if articles > 10000
# fillerend = (@connections[server]["skip_ids"].max - (articles/10)).to_i
# else
# fillerend = @connections[server]["skip_ids"].max - 1000
# end
# if @connections[server]["skip_ids"].min && fillerend > @connections[server]["skip_ids"].min
# @connections[server]["skip_ids"] = @connections[server]["skip_ids"].union("#{@connections[server]["skip_ids"].min}-#{fillerend}")
# # p "filling #{@connections[server]["skip_ids"].min}-#{fillerend}"
# end
# end
# end
for server in @connections.keys
if @connections[server]["skip_ids"].max && @connections[server]["skip_ids"].max
articles = @connections[server]["last"] - @connections[server]["first"]
if articles > 10000
fillerend = (@connections[server]["skip_ids"].max - (articles/10)).to_i
else
fillerend = @connections[server]["skip_ids"].max - 1000
end
if @connections[server]["skip_ids"].min && fillerend > @connections[server]["skip_ids"].min
@connections[server]["skip_ids"] = @connections[server]["skip_ids"].union("#{@connections[server]["skip_ids"].min}-#{fillerend}")
# p "filling #{@connections[server]["skip_ids"].min}-#{fillerend}"
end
end
end
for server in @connections.keys
print " reading articles from server: #{server}\n"
@ -210,32 +210,32 @@ def get_articles(cachedir=false)
for i in rangelist.split(',')
print "i: #{i}\n" if Debuglevel > 2
begin
resp, xover_lines = get_xover(server, i)
# resp, subj_lines = get_xhdr(server, i, "subject")
# resp, messid_lines = get_xhdr(server, i, "message-id")
# resp, xover_lines = get_xover(server, i)
resp, subj_lines = get_xhdr(server, i, "subject")
resp, messid_lines = get_xhdr(server, i, "message-id")
rescue TempError
printerr(server)
next
end
art = {}
xover_lines.collect{|x|
art[x[0]] = {} unless art.has_key?(x[0])
art[x[0]]["subject"] = x[1]
art[x[0]]["messid"] = x[4]
print "art id: #{x[0]} subj: #{x[1]}\n" if Debuglevel > 2
print "art id: #{x[0]} messid: #{x[4]}\n" if Debuglevel > 2
}
# subj_lines.collect{|x|
# xover_lines.collect{|x|
# art[x[0]] = {} unless art.has_key?(x[0])
# art[x[0]]["subject"] = x[1]
# art[x[0]]["messid"] = x[4]
# print "art id: #{x[0]} subj: #{x[1]}\n" if Debuglevel > 2
# print "art id: #{x[0]} messid: #{x[4]}\n" if Debuglevel > 2
# }
# messid_lines.collect{|x|
# art[x[0]] = {} unless art.has_key?(x[0])
# art[x[0]]["messid"] = x[1]
# print "art id: #{x[0]} messid: #{x[1]}\n" if Debuglevel > 2
# }
subj_lines.collect{|x|
art[x[0]] = {} unless art.has_key?(x[0])
art[x[0]]["subject"] = x[1]
print "art id: #{x[0]} subj: #{x[1]}\n" if Debuglevel > 2
}
messid_lines.collect{|x|
art[x[0]] = {} unless art.has_key?(x[0])
art[x[0]]["messid"] = x[1]
print "art id: #{x[0]} messid: #{x[1]}\n" if Debuglevel > 2
}
for id in art.keys
if art[id].has_key?("subject") and art[id].has_key?("messid")
print "adding: #{art[id]["messid"]}, #{id}, #{server}, #{art[id]["subject"]}\n" if Debuglevel > 2
@ -249,7 +249,8 @@ def get_articles(cachedir=false)
cache_add(cachedir, id, art[id]["messid"], art[id]["subject"], server)
end
end
headerlines += xover_lines.length
# headerlines += xover_lines.length
headerlines += subj_lines.length
if headerlines >= 500
cache_save(cachedir, server)
headerlines = 0