change cacheing to a file per server model

This commit is contained in:
Ward Wouts 2003-07-14 22:12:18 +00:00
parent cf1c476d54
commit dfd4648b01

View file

@ -1,6 +1,6 @@
#################################
#
# $Dwarf: article.rb,v 1.72 2003/07/13 10:38:53 ward Exp $
# $Dwarf: article.rb,v 1.73 2003/07/13 11:28:32 ward Exp $
# $Source$
#
# article.rb
@ -113,10 +113,10 @@ def get_articles(cachedir=false)
end
end
read_cache(cachedir)
for server in @connections.keys
print "############################################################\n"
print "skip_ids #{server}: #{@connections[server]["skip_ids"].run_list}\n"
end
# for server in @connections.keys
# print "############################################################\n"
# print "skip_ids #{server}: #{@connections[server]["skip_ids"].run_list}\n"
# end
for server in @connections.keys
print " reading articles from server: #{server}\n"
range = Set::IntSpan.new("#{@connections[server]["first"]}-#{@connections[server]["last"]}")
@ -155,8 +155,8 @@ def get_articles(cachedir=false)
end
end
headerlines += subj_lines.length
if headerlines >= 10000
save_cache(cachedir)
if headerlines >= 10000 # hmmm, dit lijkt niet te werken...
save_cache(cachedir, server)
headerlines = 0
end
end
@ -166,7 +166,7 @@ def get_articles(cachedir=false)
del_server(server)
next
end
save_cache(cachedir)
save_cache(cachedir, server)
end
end
@ -528,48 +528,53 @@ def read_cache(cachedir)
for server in @connections.keys
excludes[server] = {}
@connections[server]["skip_ids"].elements.collect!{|x| excludes[server][x]=true}
end
if FileTest.directory?( cachedir) and FileTest.file?( filename ) and FileTest.readable?( filename )
file = File.new( filename )
lines = file.readlines
lines.collect{|line|
if line =~ /^(\d+)\|(.*?)\|(.*?)\|(.*)$/
if @connections.has_key?($3)
unless excludes.has_key?($3) and excludes[$3].has_key?($1.to_i) or
$1.to_i < @connections[$3]["first"].to_i or
$1.to_i > @connections[$3]["last"].to_i
add($2, $1, $3, $4)
@connections[$3]["skip_ids"].insert($1.to_i)
if FileTest.directory?( cachedir) and FileTest.file?( "#{filename}.#{server}" ) and FileTest.readable?( "#{filename}.#{server}" )
file = File.new( "#{filename}.#{server}" )
lines = file.readlines
lines.collect{|line|
# id | messageid | subject
if line =~ /^(\d+)\|(.*?)\|(.*)$/
unless excludes.has_key?(server) and excludes[server].has_key?($1.to_i) or
$1.to_i < @connections[server]["first"].to_i or
$1.to_i > @connections[server]["last"].to_i
add($2, $1, server, $3)
@connections[server]["skip_ids"].insert($1.to_i)
end
end
end
}
file.close
}
file.close
end
end
end
def save_cache(cachedir)
def save_cache(cachedir, server)
filename = "#{cachedir}/#{@group}.ripnewscache"
if FileTest.directory?( cachedir )
file = File.new( "#{filename}.new", "w" ) or print "couldn't open cachefile for writing\n"
cache = []
file = File.new( "#{filename}.#{server}.new", "w" ) or print "couldn't open cachefile for writing\n"
print "Updating cache...\n"
# cache = []
# for i in (0...@subjects.length)
# cache.push("#{@ids[i]}|#{@messids[i]}|#{@servers[i]}|#{@subjects[i]}\n")
# end
# cache.sort!
# file.print cache
for i in (0...@subjects.length)
cache.push("#{@ids[i]}|#{@messids[i]}|#{@servers[i]}|#{@subjects[i]}\n")
if @servers[i] == server
file.print "#{@ids[i]}|#{@messids[i]}|#{@subjects[i]}\n"
end
end
cache.sort!
file.print cache
file.close
if ( File.move("#{filename}.new", filename) )
print "Cache updated\n"
if ( File.move("#{filename}.#{server}.new", "#{filename}.#{server}") )
print "Cache updated for #{server}\n"
else
print "Couldn't update cache\n"
print "Couldn't update #{server} cache\n"
end
end
end
###############################################################
# a bas64 decoder...
# a base64 decoder...
def decode64(str)
string = ''
for line in str.split("\n")