|
| 1 | +require "net/http" |
| 2 | +require "uri" |
| 3 | +require "digest/sha1" |
| 4 | + |
| 5 | +def vendor(*args) |
| 6 | + return File.join("vendor", *args) |
| 7 | +end |
| 8 | + |
| 9 | +directory "vendor/" => ["vendor"] do |task, args| |
| 10 | + mkdir task.name |
| 11 | +end |
| 12 | + |
| 13 | +def fetch(url, sha1, output) |
| 14 | + |
| 15 | + puts "Downloading #{url}" |
| 16 | + actual_sha1 = download(url, output) |
| 17 | + |
| 18 | + if actual_sha1 != sha1 |
| 19 | + fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')" |
| 20 | + end |
| 21 | +end # def fetch |
| 22 | + |
| 23 | +def file_fetch(url, sha1) |
| 24 | + filename = File.basename( URI(url).path ) |
| 25 | + output = "vendor/#{filename}" |
| 26 | + task output => [ "vendor/" ] do |
| 27 | + begin |
| 28 | + actual_sha1 = file_sha1(output) |
| 29 | + if actual_sha1 != sha1 |
| 30 | + fetch(url, sha1, output) |
| 31 | + end |
| 32 | + rescue Errno::ENOENT |
| 33 | + fetch(url, sha1, output) |
| 34 | + end |
| 35 | + end.invoke |
| 36 | + |
| 37 | + return output |
| 38 | +end |
| 39 | + |
| 40 | +def file_sha1(path) |
| 41 | + digest = Digest::SHA1.new |
| 42 | + fd = File.new(path, "r") |
| 43 | + while true |
| 44 | + begin |
| 45 | + digest << fd.sysread(16384) |
| 46 | + rescue EOFError |
| 47 | + break |
| 48 | + end |
| 49 | + end |
| 50 | + return digest.hexdigest |
| 51 | +ensure |
| 52 | + fd.close if fd |
| 53 | +end |
| 54 | + |
| 55 | +def download(url, output) |
| 56 | + uri = URI(url) |
| 57 | + digest = Digest::SHA1.new |
| 58 | + tmp = "#{output}.tmp" |
| 59 | + Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http| |
| 60 | + request = Net::HTTP::Get.new(uri.path) |
| 61 | + http.request(request) do |response| |
| 62 | + fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code) |
| 63 | + size = (response["content-length"].to_i || -1).to_f |
| 64 | + count = 0 |
| 65 | + File.open(tmp, "w") do |fd| |
| 66 | + response.read_body do |chunk| |
| 67 | + fd.write(chunk) |
| 68 | + digest << chunk |
| 69 | + if size > 0 && $stdout.tty? |
| 70 | + count += chunk.bytesize |
| 71 | + $stdout.write(sprintf("\r%0.2f%%", count/size * 100)) |
| 72 | + end |
| 73 | + end |
| 74 | + end |
| 75 | + $stdout.write("\r \r") if $stdout.tty? |
| 76 | + end |
| 77 | + end |
| 78 | + |
| 79 | + File.rename(tmp, output) |
| 80 | + |
| 81 | + return digest.hexdigest |
| 82 | +rescue SocketError => e |
| 83 | + puts "Failure while downloading #{url}: #{e}" |
| 84 | + raise |
| 85 | +ensure |
| 86 | + File.unlink(tmp) if File.exist?(tmp) |
| 87 | +end # def download |
| 88 | + |
| 89 | +def untar(tarball, &block) |
| 90 | + require "archive/tar/minitar" |
| 91 | + tgz = Zlib::GzipReader.new(File.open(tarball)) |
| 92 | + # Pull out typesdb |
| 93 | + tar = Archive::Tar::Minitar::Input.open(tgz) |
| 94 | + tar.each do |entry| |
| 95 | + path = block.call(entry) |
| 96 | + next if path.nil? |
| 97 | + parent = File.dirname(path) |
| 98 | + |
| 99 | + mkdir_p parent unless File.directory?(parent) |
| 100 | + |
| 101 | + # Skip this file if the output file is the same size |
| 102 | + if entry.directory? |
| 103 | + mkdir path unless File.directory?(path) |
| 104 | + else |
| 105 | + entry_mode = entry.instance_eval { @mode } & 0777 |
| 106 | + if File.exists?(path) |
| 107 | + stat = File.stat(path) |
| 108 | + # TODO(sissel): Submit a patch to archive-tar-minitar upstream to |
| 109 | + # expose headers in the entry. |
| 110 | + entry_size = entry.instance_eval { @size } |
| 111 | + # If file sizes are same, skip writing. |
| 112 | + next if stat.size == entry_size && (stat.mode & 0777) == entry_mode |
| 113 | + end |
| 114 | + puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}" |
| 115 | + File.open(path, "w") do |fd| |
| 116 | + # eof? check lets us skip empty files. Necessary because the API provided by |
| 117 | + # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an |
| 118 | + # IO object. Something about empty files in this EntryStream causes |
| 119 | + # IO.copy_stream to throw "can't convert nil into String" on JRuby |
| 120 | + # TODO(sissel): File a bug about this. |
| 121 | + while !entry.eof? |
| 122 | + chunk = entry.read(16384) |
| 123 | + fd.write(chunk) |
| 124 | + end |
| 125 | + #IO.copy_stream(entry, fd) |
| 126 | + end |
| 127 | + File.chmod(entry_mode, path) |
| 128 | + end |
| 129 | + end |
| 130 | + tar.close |
| 131 | + File.unlink(tarball) if File.file?(tarball) |
| 132 | +end # def untar |
| 133 | + |
| 134 | +def ungz(file) |
| 135 | + |
| 136 | + outpath = file.gsub('.gz', '') |
| 137 | + tgz = Zlib::GzipReader.new(File.open(file)) |
| 138 | + begin |
| 139 | + File.open(outpath, "w") do |out| |
| 140 | + IO::copy_stream(tgz, out) |
| 141 | + end |
| 142 | + File.unlink(file) |
| 143 | + rescue |
| 144 | + File.unlink(outpath) if File.file?(outpath) |
| 145 | + raise |
| 146 | + end |
| 147 | + tgz.close |
| 148 | +end |
| 149 | + |
| 150 | +desc "Process any vendor files required for this plugin" |
| 151 | +task "vendor" do |task, args| |
| 152 | + |
| 153 | + @files.each do |file| |
| 154 | + download = file_fetch(file['url'], file['sha1']) |
| 155 | + if download =~ /.tar.gz/ |
| 156 | + prefix = download.gsub('.tar.gz', '').gsub('vendor/', '') |
| 157 | + untar(download) do |entry| |
| 158 | + if !file['files'].nil? |
| 159 | + next unless file['files'].include?(entry.full_name.gsub(prefix, '')) |
| 160 | + out = entry.full_name.split("/").last |
| 161 | + end |
| 162 | + File.join('vendor', out) |
| 163 | + end |
| 164 | + elsif download =~ /.gz/ |
| 165 | + ungz(download) |
| 166 | + end |
| 167 | + end |
| 168 | + |
| 169 | +end |
0 commit comments