-
Notifications
You must be signed in to change notification settings - Fork 16
/
google_finder.rb
executable file
·259 lines (228 loc) · 6.74 KB
/
google_finder.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
#!/usr/bin/env ruby
# == Bucket Finder - Trawl Google Cloud buckets for interesting files
#
# Each group of files on Google have to be contained in a bucket and each bucket has to have a unique
# name across the system. This means that it is possible to bruteforce names, this script does this and more
#
# == Version
#
# 1.0 - Released
#
# == Usage
#
# google_finder.rb <wordlist>
#
# -l, --log-file <file name>:
# filename to log output to
# -d, --download:
# download any public files found
# -h, --help:
# show help
#
# <wordlist>: the names to brute force
#
# Author:: Robin Wood (robin@digi.ninja)
# Copyright:: Copyright (c) Robin Wood 2018
# Licence:: Creative Commons Attribution-Share Alike Licence
#
require 'rexml/document'
require 'net/http'
require 'uri'
require 'getoptlong'
require 'fileutils'
# This is needed because the standard parse can't handle square brackets
# so this encodes them before parsing
module URI
class << self
def parse_with_safety(uri)
parse_without_safety uri.gsub('[', '%5B').gsub(']', '%5D').gsub(" ", '%20')
end
alias parse_without_safety parse
alias parse parse_with_safety
end
end
# Display the usage
def usage
puts"google_finder 1.0 Robin Wood (robin@digi.ninja) (https://digi.ninja)
Usage: google_finder [OPTION] ... wordlist
--help, -h: show help
--download, -d: download the files
--log-file, -l: filename to log output to
-v: verbose
wordlist: the wordlist to use
"
exit
end
def get_page host, page
uri = URI.parse(host)
begin
res = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') {|http|
http.get("/" + page)
}
rescue Timeout::Error
puts "Timeout"
@logging.puts "Timeout" unless @logging.nil?
return ''
rescue => e
puts "Error requesting page: " + e.to_s
@logging.puts "Error requesting page: " + e.to_s unless @logging.nil?
return ''
end
return res.body
end
def parse_results doc, bucket_name, host, download, depth = 0
tabs = ''
depth.times {
tabs += "\t"
}
if !doc.elements['ListBucketResult'].nil?
puts tabs + "Bucket Found: " + bucket_name + " ( " + host + "/" + bucket_name + " )"
@logging.puts tabs + "Bucket Found: " + bucket_name + " ( " + host + "/" + bucket_name + " )" unless @logging.nil?
doc.elements.each('ListBucketResult/Contents') do |ele|
protocol = ''
dir = bucket_name + '/'
if host !~ /^http/
protocol = 'https://'
dir = ''
end
filename = ele.elements['Key'].text
url = protocol + host + '/' + dir + filename
response = nil
parsed_uri = URI.parse(url)
downloaded = false
readable = false
# the directory listing contains directory names as well as files
# so if a filename ends in a / then it is actually a directory name
# so don't try to download it
if download and filename != '' and filename[-1].chr != '/'
fs_dir = File.dirname(URI.parse(url).path)[1..-1]
# If the depth is 0 then it is top level and the bucket name is the first part of the directory
# If it is greater than 0 then we've done a redirection to the path runs from / so we need to
# manually add the bucket name on
if depth > 0
fs_dir = bucket_name + '/' + fs_dir
end
if !File.exists? fs_dir
FileUtils.mkdir_p fs_dir
end
Net::HTTP.start(parsed_uri.host, parsed_uri.port, :use_ssl => parsed_uri.scheme == 'https') {|http|
response = http.get(parsed_uri.path)
if response.code.to_i == 200
open(fs_dir + '/' + File.basename(filename), 'wb') { |file|
file.write(response.body)
}
downloaded = true
readable = true
else
readable = false
downloaded = false
end
}
else
Net::HTTP.start(parsed_uri.host, parsed_uri.port, :use_ssl => parsed_uri.scheme == 'https') {|http|
response = http.head(parsed_uri.path)
}
readable = (response.code.to_i == 200)
downloaded = false
end
if (readable)
if downloaded
puts tabs + "\t" + "<Downloaded> " + url
@logging.puts tabs + "\t" + "<Downloaded> " + url unless @logging.nil?
else
puts tabs + "\t" + "<Public> " + url
@logging.puts tabs + "\t" + "<Public> " + url unless @logging.nil?
end
else
puts tabs + "\t" + "<Private> " + url
@logging.puts tabs + "\t" + "<Private> " + url unless @logging.nil?
end
end
elsif doc.elements['Error']
err = doc.elements['Error']
if !err.elements['Code'].nil?
case err.elements['Code'].text
when "NoSuchKey"
puts tabs + "The specified key does not exist: " + bucket_name
@logging.puts tabs + "The specified key does not exist: " + bucket_name unless @logging.nil?
when "AccessDenied"
puts tabs + "Bucket found but access denied: " + bucket_name
@logging.puts tabs + "Bucket found but access denied: " + bucket_name unless @logging.nil?
when "NoSuchBucket"
puts tabs + "Bucket does not exist: " + bucket_name
@logging.puts tabs + "Bucket does not exist: " + bucket_name unless @logging.nil?
when "PermanentRedirect"
if !err.elements['Endpoint'].nil?
puts tabs + "Bucket " + bucket_name + " redirects to: " + err.elements['Endpoint'].text
@logging.puts tabs + "Bucket " + bucket_name + " redirects to: " + err.elements['Endpoint'].text unless @logging.nil?
data = get_page 'http://' + err.elements['Endpoint'].text, ''
if data != ''
doc = REXML::Document.new(data)
parse_results doc, bucket_name, err.elements['Endpoint'].text, download, depth + 1
end
else
puts tabs + "Redirect found but can't find where to: " + bucket_name
@logging.puts tabs + "Redirect found but can't find where to: " + bucket_name unless @logging.nil?
end
end
else
# puts res.body
end
else
puts tabs + ' No data returned'
@logging.puts tabs + ' No data returned' unless @logging.nil?
end
end
opts = GetoptLong.new(
[ '--help', '-h', GetoptLong::NO_ARGUMENT ],
[ '--log-file', '-l', GetoptLong::REQUIRED_ARGUMENT ],
[ '--download', '-d', GetoptLong::NO_ARGUMENT ],
[ "-v" , GetoptLong::NO_ARGUMENT ]
)
# setup the defaults
download = false
verbose = false
@logging = nil
begin
opts.each do |opt, arg|
case opt
when '--help'
usage
when '--download'
download = true
when "--log-file"
begin
@logging = File.open(arg, "w")
rescue
puts "Could not open the logging file\n"
exit
end
end
end
rescue
usage
end
if ARGV.length != 1
puts "Missing wordlist (try --help)"
exit 0
end
filename = ARGV.shift
if !File.exists? filename
puts "Wordlist file doesn't exist"
puts
usage
exit
end
File.open(filename, 'r').each { |name|
name.strip!
if name == ""
next
end
host = 'https://storage.googleapis.com'
data = get_page host, name
if data != ''
doc = REXML::Document.new(data)
parse_results doc, name, host, download, 0
end
}
@logging.close unless @logging.nil?