forked from a4agarwal/dropzone-user-scripts
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathDownload.dropzone
executable file
·179 lines (150 loc) · 6.07 KB
/
Download.dropzone
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
#!/usr/bin/ruby
# Dropzone Destination Info
# Name: Download
# Description: Download files
# Handles: NSStringPboardType
# Events: Dragged
# Creator: Marc E.
# URL: http://www.jardinpresente.com.ar/svn/utiles/trunk/dropzone/Download.dropzone
# IconURL: http://aptonic.com/destinations/icons/network.png
# OptionsNIB: ChooseFolder
require 'net/http'
# We put net/http/stats here instead of require
# Source: http://blog.obloh.com/posts/show/5
# Add methods to have stats and rules when downloading.
class Net::HTTP
# This methods has the same behavior than get_response. However the block takes
# 2 arguments (response and a string of bytes). Bytes must not be retrieve
# via response.bytes_read, because they have already been read to compute stats.
#
# require 'net/http'
# require 'net/http/stats'
#
# content = ''
# uri = URI.parse('http://ftp.ruby-lang.org/pub/ruby/1.8/ruby-1.8.6.tar.gz')
# response = Net::HTTP.get_response_with_stats(uri) do |resp, bytes|
# content << bytes
# puts "#{resp.bytes_percent}% downloaded at #{(resp.bytes_rate / 1024).to_i} Ko/s, remaining #{resp.left_time.to_i} seconds"
# end
# This script will print something like:
# 13% downloaded at 59 Ko/s, remaining 65 seconds
# See Net::HTTP::Stats to know more about attributes.
def self.get_response_with_stats(url, &block)
response = Net::HTTP.get_response(url) do |resp|
resp.extend(Net::HTTP::Stats)
resp.initialize_stats
resp.read_body do |bytes|
resp.bytes_read += bytes.size
block.call(resp, bytes) if block
end
end
response
end
# This method works exactly as Net::HTTP.get_response_with_stats, but it takes
# a 2nd Hash argument which describes rules.
# Raises Net::HTTP::Stats::MaxSizeExceed, Net::HTTP::Stats::MaxSpentTimeExceed
# or Net::HTTP::Stats::MaxLeftTimeExceed if a rule isn't respected.
#
# require 'net/http'
# require 'net/http/stats'
#
# rules = {
# # Download is interrupted if spent time is greater (in sec).
# :max_time => 5 * 60,
# # Download is interrupted if estimated time is greater (in sec).
# :max_left_time => 5 * 60,
# # Download is interrupted if body is greater (in byte).
# :max_size => 50 * 1024 * 1024,
# # Wait some time before checking max_time and max_left_time (in sec).
# # something between 5 and 20 seconds should be good.
# :min_time => 15
# }
#
# content = ''
# uri = URI.parse('http://ftp.ruby-lang.org/pub/ruby/1.8/ruby-1.8.6.tar.gz')
# response = Net::HTTP.get_response_with_rules(uri, rules) do |resp, bytes|
# content << bytes
# end
def self.get_response_with_rules(url, rules, &block)
response = Net::HTTP.get_response_with_stats(url) do |resp, bytes|
msg = ''
if resp.bytes_count > rules[:max_size]
raise Stats::MaxSizeExceed.new("Response size (#{resp.bytes_count}) exceed maximun content length allowed (#{rules[:max_size]}).", resp)
elsif resp.spent_time > rules[:max_time]
raise Stats::MaxSpentTimeExceed.new("Maximun time allowed (#{rules[:max_time]}) have been exceed", resp)
elsif resp.spent_time > rules[:min_time] and resp.left_time > rules[:max_left_time]
raise Stats::MaxLeftTimeExceed.new("Left time expected (#{resp.left_time.to_i}) exceed maximun left time allowed (#{rules[:max_left_time]})", resp)
end
block.call(resp, bytes) if block
end
response
end
module Stats
# Body size
attr_accessor :bytes_count
# Bytes already downloaded
attr_accessor :bytes_read
# Rate (byte/sec)
attr_accessor :bytes_rate
# Percent of bytes downloaded
attr_accessor :bytes_percent
# Spent time (second)
attr_accessor :spent_time
# Remaining time (second)
attr_accessor :left_time
# When the download started (Time)
attr_accessor :started_at
# Called by Net::HTTP.get_response_with_stats.
def initialize_stats #:nodoc:
@bytes_count = self['content-length'].to_i
@bytes_read = 0
@bytes_rate = 0
@bytes_percent = 0
@spend_time = 0
@left_time = 0
@started_at = Time.now
end
# Updates stats.
def bytes_read=(bytes_read) #:nodoc:
@bytes_read = bytes_read
@spent_time = Time.now - @started_at
@bytes_rate = @bytes_read / @spent_time if @spent_time != 0
@left_time = (@bytes_count - @bytes_read) / @bytes_rate if @bytes_rate != 0
@bytes_percent = @bytes_read * 100 / @bytes_count if @bytes_count != 0
@bytes_read
end
# Generic error class. Should not be instanced.
class Error < Net::HTTPError
end
# Raised if downloaded file is too big.
class MaxSizeExceed < Net::HTTP::Stats::Error
end
# Raised if spent time is to long.
class MaxSpentTimeExceed < Net::HTTP::Stats::Error
end
# Raised if remaining time is too long.
class MaxLeftTimeExceed < Net::HTTP::Stats::Error
end
end
end
def dragged
urls = $items[0].split("\n")
$dz.determinate(false)
urls.each_with_index do |url, index|
$dz.begin("Downloading file #{index+1} of #{urls.length}...")
item = url.gsub(/\[/, '%5B').gsub(/\]/, '%5D').chomp
uri = URI.parse(URI.escape(item))
filename = url.split(File::SEPARATOR)[-1].chomp
$dz.determinate(true)
tmp_file = File.join('/tmp/', filename)
File.open(tmp_file, 'w') do |file|
response = Net::HTTP.get_response_with_stats(uri) do |resp, bytes|
file.write(bytes)
$dz.percent(resp.bytes_percent)
end
end
Rsync.do_copy(tmp_file, ENV['EXTRA_PATH'], true)
end
$dz.finish("Download complete")
$dz.url(false)
end