2007-07-24 20:35:02 +00:00
|
|
|
#!/usr/bin/env ruby
|
|
|
|
|
|
|
|
|
|
# $Id$
|
|
|
|
|
# $URL$
|
|
|
|
|
|
|
|
|
|
#
|
2008-10-01 13:04:33 +00:00
|
|
|
# Copyright (c) 2007, 2008 Ward Wouts <ward@wouts.nl>
|
2007-07-24 20:35:02 +00:00
|
|
|
#
|
|
|
|
|
# Permission to use, copy, modify, and distribute this software for any
|
|
|
|
|
# purpose with or without fee is hereby granted, provided that the above
|
|
|
|
|
# copyright notice and this permission notice appear in all copies.
|
|
|
|
|
#
|
|
|
|
|
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
|
|
|
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
|
|
|
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
|
|
|
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
|
|
|
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
|
|
|
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|
|
|
|
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
|
|
|
#
|
|
|
|
|
|
|
|
|
|
require 'net/https'
|
|
|
|
|
require 'uri'
|
|
|
|
|
require 'rexml/document'
|
|
|
|
|
require 'date'
|
|
|
|
|
require 'getoptlong'
|
|
|
|
|
|
2008-10-01 13:04:33 +00:00
|
|
|
class DSParse
|
|
|
|
|
# info over DS_Store files is te vinden op https://wiki.mozilla.org/DS_Store_File_Format
|
|
|
|
|
# iemand heeft ooit ook een 'dsdump.c' programma geschreven wat wel wat info bevat
|
|
|
|
|
def initialize
|
|
|
|
|
@store = Array.new
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def arr2long(arr)
|
|
|
|
|
return (arr[0]<<24) + (arr[1]<<16) + (arr[2]<<8) + arr[3]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def arr2string(arr)
|
|
|
|
|
string = ""
|
|
|
|
|
(0...arr.length).step(2){|i|
|
|
|
|
|
string += ((arr[i]<<8) + (arr[i+1])).chr
|
|
|
|
|
}
|
|
|
|
|
return string
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def readfile(filename)
|
|
|
|
|
@store = Array.new
|
|
|
|
|
File.open(filename).each_byte{|byte|
|
|
|
|
|
@store.push byte
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def readstring(string)
|
|
|
|
|
@store = Array.new
|
|
|
|
|
string.each_byte{|byte|
|
|
|
|
|
@store.push byte
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def isds?
|
|
|
|
|
# 00 00 00 01 42 75 64 31
|
|
|
|
|
# kan vast netter, don't care
|
|
|
|
|
@store[0] == 0x00 && @store[1] == 0x00 && @store[2] == 0x00 && @store[3] == 0x01 &&
|
|
|
|
|
@store[4] == 0x42 && @store[5] == 0x75 && @store[6] == 0x64 && @store[7] == 0x31
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def getfilenames
|
|
|
|
|
filenames = Array.new
|
|
|
|
|
|
|
|
|
|
counter = 0
|
|
|
|
|
|
|
|
|
|
offset = arr2long(@store[0x14, 4])
|
|
|
|
|
offset &= ~15 # tim zegt dat dit de laatste zoveel bits op 0 zet.
|
|
|
|
|
|
|
|
|
|
structcount = arr2long(@store[offset+8, 4])
|
|
|
|
|
|
|
|
|
|
pointer = offset+12
|
|
|
|
|
|
|
|
|
|
(0...structcount).each{|structcounter|
|
|
|
|
|
objnamelength = arr2long(@store[pointer, 4])
|
|
|
|
|
pointer+=4
|
|
|
|
|
|
|
|
|
|
objnamebuf = @store[pointer, objnamelength*2]
|
|
|
|
|
pointer+=objnamelength*2
|
|
|
|
|
filenames.push arr2string(objnamebuf)
|
|
|
|
|
|
|
|
|
|
objtag = @store[pointer, 4].map{|x| x.chr}.join('')
|
|
|
|
|
pointer+=4
|
|
|
|
|
|
|
|
|
|
objtype = @store[pointer, 4].map{|x| x.chr}.join('')
|
|
|
|
|
pointer+=4
|
|
|
|
|
|
|
|
|
|
datasize = 0
|
|
|
|
|
case objtype
|
|
|
|
|
when 'bool'
|
|
|
|
|
datasize = 1
|
|
|
|
|
when 'shor', 'long', 'type'
|
|
|
|
|
datasize = 4
|
|
|
|
|
when 'ustr'
|
|
|
|
|
datasize = arr2long(@store[pointer, 4])
|
|
|
|
|
pointer += 4
|
|
|
|
|
datasize *= 2
|
|
|
|
|
when 'blob'
|
|
|
|
|
datasize = arr2long(@store[pointer, 4])
|
|
|
|
|
pointer += 4
|
|
|
|
|
else
|
|
|
|
|
puts "Unrecognized data type"
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if datasize > 0
|
|
|
|
|
data = @store[pointer, datasize]
|
|
|
|
|
pointer += datasize
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
filenames.uniq!
|
|
|
|
|
return filenames
|
|
|
|
|
end
|
|
|
|
|
end # class DSParse
|
|
|
|
|
|
2007-07-24 20:35:02 +00:00
|
|
|
def usage
|
|
|
|
|
puts <<EOT
|
|
|
|
|
Usage: #{$0.sub(/.*\//, "")} [options] -u <baseurl>
|
|
|
|
|
|
|
|
|
|
-h, --help show this message
|
2008-10-01 13:04:33 +00:00
|
|
|
-m <svn|cvs|ds> check for one of subversion, CVS or .DS_Store (default: all)
|
2007-07-24 20:35:02 +00:00
|
|
|
-u <baseurl> set baseurl
|
2008-09-30 13:52:40 +00:00
|
|
|
-s use ssl
|
2007-07-24 20:35:02 +00:00
|
|
|
EOT
|
|
|
|
|
exit
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def cmdline
|
|
|
|
|
options = Hash.new
|
|
|
|
|
begin
|
|
|
|
|
opts = GetoptLong.new(
|
|
|
|
|
[ "-h", "--help", GetoptLong::NO_ARGUMENT ],
|
2008-10-01 13:04:33 +00:00
|
|
|
[ "-m", GetoptLong::REQUIRED_ARGUMENT ],
|
2008-09-30 13:52:40 +00:00
|
|
|
[ "-u", GetoptLong::REQUIRED_ARGUMENT ],
|
|
|
|
|
[ "-s", GetoptLong::NO_ARGUMENT ]
|
2007-07-24 20:35:02 +00:00
|
|
|
)
|
|
|
|
|
opts.quiet=true
|
|
|
|
|
|
|
|
|
|
opts.each do |opt, arg|
|
|
|
|
|
options[opt] = arg
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
rescue
|
|
|
|
|
print "#{$!}\n"
|
|
|
|
|
usage
|
|
|
|
|
end
|
|
|
|
|
if options["-h"]
|
|
|
|
|
usage
|
|
|
|
|
end
|
|
|
|
|
return options
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def fetch(uri_str, limit = 10)
|
|
|
|
|
# You should choose better exception.
|
|
|
|
|
raise ArgumentError, 'HTTP redirect too deep' if limit == 0
|
2009-03-02 08:27:44 +00:00
|
|
|
if URI.parse(uri_str).scheme.to_s == "file"
|
|
|
|
|
response = ""
|
|
|
|
|
if FileTest.exists?(URI.parse(uri_str).path)
|
|
|
|
|
File.open(URI.parse(uri_str).path).each_line{|line|
|
|
|
|
|
response += line
|
|
|
|
|
}
|
|
|
|
|
else
|
|
|
|
|
raise "Not found"
|
|
|
|
|
end
|
|
|
|
|
response
|
2008-09-30 13:52:40 +00:00
|
|
|
else
|
2009-03-02 08:27:44 +00:00
|
|
|
host = URI.parse(uri_str).host
|
|
|
|
|
port = URI.parse(uri_str).port
|
|
|
|
|
path = URI.parse(uri_str).path
|
|
|
|
|
query = URI.parse(uri_str).query
|
|
|
|
|
|
|
|
|
|
http = Net::HTTP.new(host, port)
|
|
|
|
|
http.use_ssl = @use_ssl
|
|
|
|
|
if query
|
|
|
|
|
req = Net::HTTP::Get.new("#{URI.escape(path)}?#{URI.escape(query)}")
|
|
|
|
|
else
|
|
|
|
|
req = Net::HTTP::Get.new("#{URI.escape(path)}")
|
|
|
|
|
end
|
|
|
|
|
req.basic_auth @user, @pass
|
|
|
|
|
response = http.request(req)
|
|
|
|
|
case response
|
|
|
|
|
when Net::HTTPSuccess then response.body
|
|
|
|
|
when Net::HTTPRedirection then fetch(response['location'], limit - 1)
|
|
|
|
|
when Net::HTTPUnauthorized then puts "401 Authorization Required #{uri_str}"
|
|
|
|
|
when Net::HTTPUnauthorized then puts "401 Authorization Required #{uri_str}"
|
|
|
|
|
when Net::HTTPForbidden then puts "403 Forbidden #{uri_str}"
|
|
|
|
|
# when Net::HTTPNotFound then puts "404 Not Found #{uri_str}"
|
|
|
|
|
else
|
|
|
|
|
response.error!
|
|
|
|
|
end
|
2008-09-30 13:52:40 +00:00
|
|
|
end
|
2009-03-02 08:27:44 +00:00
|
|
|
end
|
2007-07-24 20:35:02 +00:00
|
|
|
|
|
|
|
|
def svnparse(url)
|
|
|
|
|
puts "\n#{url}"
|
|
|
|
|
begin
|
2009-03-02 08:27:44 +00:00
|
|
|
body = fetch("#{url}/.svn/entries")
|
2007-07-24 20:35:02 +00:00
|
|
|
rescue
|
|
|
|
|
end
|
2008-09-30 12:38:04 +00:00
|
|
|
if body.nil?
|
|
|
|
|
return
|
|
|
|
|
end
|
2007-07-24 20:35:02 +00:00
|
|
|
dirs = Array.new
|
2008-10-01 13:04:33 +00:00
|
|
|
case body[0].chr
|
|
|
|
|
when '<'
|
2008-09-30 12:38:04 +00:00
|
|
|
xmldoc = REXML::Document.new(body)
|
|
|
|
|
xmldoc.elements.each("wc-entries/entry") {|item|
|
|
|
|
|
case item.attribute("kind").to_s
|
|
|
|
|
when "dir" then
|
|
|
|
|
if item.attribute("name").to_s == ""
|
|
|
|
|
next
|
|
|
|
|
end
|
|
|
|
|
puts "#{item.attribute("name")}/"
|
|
|
|
|
dirs.push(item.attribute("name").to_s)
|
|
|
|
|
when "file" then
|
|
|
|
|
puts "#{item.attribute("name")} #{item.attribute("last-author")} #{item.attribute("committed-date")}"
|
|
|
|
|
else
|
|
|
|
|
puts " Strange kind #{item.attribute("kind")}"
|
2007-07-24 20:35:02 +00:00
|
|
|
end
|
2008-09-30 12:38:04 +00:00
|
|
|
}
|
2008-10-01 13:04:33 +00:00
|
|
|
when '8'
|
2008-09-30 12:38:04 +00:00
|
|
|
lastline = ""
|
|
|
|
|
commitdate = ""
|
|
|
|
|
author = ""
|
|
|
|
|
bodyarr = Array.new
|
|
|
|
|
body.each_line{|line|
|
|
|
|
|
bodyarr.push line
|
|
|
|
|
}
|
|
|
|
|
(0...bodyarr.length).each{|count|
|
|
|
|
|
line = bodyarr[count]
|
|
|
|
|
line.chomp!
|
|
|
|
|
if line.match(/\d\d\d\d-\d\d-\d\dT/)
|
|
|
|
|
commitdate = line
|
|
|
|
|
author = bodyarr[count+2]
|
|
|
|
|
end
|
|
|
|
|
case line
|
|
|
|
|
when 'dir'
|
|
|
|
|
if lastline == ""
|
|
|
|
|
next
|
|
|
|
|
end
|
|
|
|
|
puts "#{lastline}/"
|
|
|
|
|
dirs.push lastline
|
|
|
|
|
when 'file'
|
|
|
|
|
puts "#{lastline} #{author} #{commitdate}"
|
|
|
|
|
else
|
|
|
|
|
lastline = line
|
|
|
|
|
end
|
|
|
|
|
}
|
2008-10-01 13:04:33 +00:00
|
|
|
else
|
|
|
|
|
$stderr.puts "Unknown SVN entries format found."
|
2008-09-30 12:38:04 +00:00
|
|
|
end
|
2007-07-24 20:35:02 +00:00
|
|
|
dirs.each{|dir|
|
2008-09-30 12:38:04 +00:00
|
|
|
#p "#{url}/#{dir}"
|
2007-07-24 20:35:02 +00:00
|
|
|
svnparse("#{url}/#{dir}")
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
2007-07-24 21:11:27 +00:00
|
|
|
def cvsparse(url)
|
|
|
|
|
puts "\n#{url}"
|
2007-07-24 21:28:01 +00:00
|
|
|
body=[]
|
2007-07-24 21:11:27 +00:00
|
|
|
begin
|
2009-03-02 08:27:44 +00:00
|
|
|
body = fetch("#{url}/CVS/Entries")
|
2007-07-24 21:11:27 +00:00
|
|
|
rescue
|
2008-09-30 14:10:13 +00:00
|
|
|
end
|
|
|
|
|
if body.nil?
|
|
|
|
|
return
|
2007-07-24 21:11:27 +00:00
|
|
|
end
|
|
|
|
|
dirs = Array.new
|
|
|
|
|
body.each{|line|
|
|
|
|
|
case line
|
|
|
|
|
when /^D\/?(.*?)\/.*/ then
|
|
|
|
|
puts "#{$1}/"
|
|
|
|
|
dirs.push($1)
|
|
|
|
|
when /^\/(.*?)\/(.*?)\/(.*?)\/.*/ then
|
|
|
|
|
puts "#{$1} #{$2} #{$3}"
|
|
|
|
|
end
|
|
|
|
|
}
|
|
|
|
|
dirs.each{|dir|
|
|
|
|
|
cvsparse("#{url}/#{dir}")
|
|
|
|
|
}
|
|
|
|
|
end
|
|
|
|
|
|
2008-10-01 13:04:33 +00:00
|
|
|
def dsparse(url)
|
|
|
|
|
puts "\n#{url}"
|
|
|
|
|
ds = DSParse.new
|
|
|
|
|
dirs = Array.new
|
|
|
|
|
begin
|
2009-03-02 08:27:44 +00:00
|
|
|
body = fetch("#{url}/.DS_Store")
|
2008-10-01 13:04:33 +00:00
|
|
|
rescue
|
|
|
|
|
end
|
|
|
|
|
if body.nil?
|
|
|
|
|
return
|
|
|
|
|
end
|
|
|
|
|
ds.readstring(body)
|
|
|
|
|
if ! ds.isds?
|
|
|
|
|
$stderr.puts "Not a .DS_Store format file"
|
|
|
|
|
return
|
|
|
|
|
end
|
|
|
|
|
entries = ds.getfilenames
|
|
|
|
|
(0...entries.length).each{|x|
|
|
|
|
|
begin
|
|
|
|
|
if entries[x] == '.' or entries[x] == '..'
|
|
|
|
|
next
|
|
|
|
|
end
|
2009-03-02 08:27:44 +00:00
|
|
|
body = fetch("#{url}/#{entries[x]}/.DS_Store")
|
2008-10-01 13:04:33 +00:00
|
|
|
dscheck = DSParse.new
|
|
|
|
|
dscheck.readstring(body)
|
|
|
|
|
if dscheck.isds?
|
|
|
|
|
dirs.push entries[x]
|
|
|
|
|
entries[x] = "#{entries[x]}/"
|
|
|
|
|
end
|
|
|
|
|
rescue
|
|
|
|
|
end
|
|
|
|
|
}
|
|
|
|
|
puts entries
|
|
|
|
|
dirs.each{|dir|
|
|
|
|
|
dsparse("#{url}/#{dir}")
|
|
|
|
|
}
|
2007-07-24 20:35:02 +00:00
|
|
|
end
|
|
|
|
|
|
2008-10-01 13:04:33 +00:00
|
|
|
def checksvn(url)
|
|
|
|
|
begin
|
|
|
|
|
puts "==================================================="
|
2009-03-02 08:27:44 +00:00
|
|
|
body = fetch("#{url}/.svn/entries")
|
2008-10-01 13:04:33 +00:00
|
|
|
puts "Subversion info found:"
|
|
|
|
|
svnparse(url)
|
|
|
|
|
rescue
|
|
|
|
|
end
|
2008-09-30 13:52:40 +00:00
|
|
|
end
|
|
|
|
|
|
2008-10-01 13:04:33 +00:00
|
|
|
def checkcvs(url)
|
2007-07-24 20:35:02 +00:00
|
|
|
begin
|
2008-10-01 13:04:33 +00:00
|
|
|
puts "==================================================="
|
2009-03-02 08:27:44 +00:00
|
|
|
body = fetch("#{url}/CVS/Entries")
|
2008-10-01 13:04:33 +00:00
|
|
|
puts "CVS info found:"
|
|
|
|
|
cvsparse(url)
|
2007-07-24 20:35:02 +00:00
|
|
|
rescue
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2008-10-01 13:04:33 +00:00
|
|
|
def checkds(url)
|
2007-07-24 20:35:02 +00:00
|
|
|
begin
|
2008-10-01 13:04:33 +00:00
|
|
|
puts "==================================================="
|
2009-03-02 08:27:44 +00:00
|
|
|
body = fetch("#{url}/.DS_Store")
|
2008-10-01 13:04:33 +00:00
|
|
|
puts ".DS_Store file found:"
|
|
|
|
|
dsparse(url)
|
2007-07-24 20:35:02 +00:00
|
|
|
rescue
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2008-10-01 13:04:33 +00:00
|
|
|
options = cmdline
|
|
|
|
|
if options["-u"].nil?
|
|
|
|
|
usage
|
|
|
|
|
else
|
2009-03-02 08:27:44 +00:00
|
|
|
if ! options["-u"].match(/^(http|file)/)
|
2008-10-01 13:04:33 +00:00
|
|
|
options["-u"] = "http://#{options["-u"]}"
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if options["-s"]
|
|
|
|
|
@use_ssl = true
|
|
|
|
|
else
|
|
|
|
|
@use_ssl = false
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if options["-m"].nil?
|
|
|
|
|
checksvn(options["-u"])
|
|
|
|
|
checkcvs(options["-u"])
|
|
|
|
|
checkds(options["-u"])
|
2007-07-24 20:35:02 +00:00
|
|
|
else
|
2008-10-01 13:04:33 +00:00
|
|
|
case options["-m"]
|
|
|
|
|
when "svn"
|
|
|
|
|
checksvn(options["-u"])
|
|
|
|
|
when "cvs"
|
|
|
|
|
checkcvs(options["-u"])
|
|
|
|
|
when "ds"
|
|
|
|
|
checkds(options["-u"])
|
2007-07-24 20:35:02 +00:00
|
|
|
end
|
|
|
|
|
end
|