forked from vpistis/s3-inspector
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy paths3inspector.py
executable file
·336 lines (287 loc) · 11.1 KB
/
s3inspector.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
import os
import re
import sys
import warnings
from datetime import datetime, timedelta
from os.path import expanduser
from collections import defaultdict
# ENTER VALID SNS RESOURCE ARN IF YOU WANT TO USE CODE AS LAMBDA.
SNS_RESOURCE_ARN = "******************************************************"
SEP = "-" * 40
EXPLAINED = {
"READ": "readable",
"WRITE": "writable",
"READ_ACP": "permissions readable",
"WRITE_ACP": "permissions writeable",
"FULL_CONTROL": "Full Control"
}
GROUPS_TO_CHECK = {
"http://acs.amazonaws.com/groups/global/AllUsers": "Everyone",
"http://acs.amazonaws.com/groups/global/AuthenticatedUsers": "Authenticated AWS users"
}
def get_s3_obj(is_lambda=False):
"""
Gets and returns s3 resource and client.
:param is_lambda: If True - defines that code has been launched as lambda.
:return: s3 resource and client instances.
"""
if is_lambda:
s3 = boto3.resource("s3")
s3_client = boto3.client("s3")
else:
if os.path.exists(os.path.join(expanduser("~"), ".aws", "credentials")) or os.path.exists(
os.path.join(expanduser("~"), ".aws", "config")):
profile_name = raw_input("Enter your AWS profile name [default]: ") or "default"
session = boto3.Session(profile_name=profile_name)
s3 = session.resource("s3")
s3_client = session.client("s3")
else:
access_key = raw_input("Enter your AWS access key ID: ")
secret_key = raw_input("Enter your AWS secret key: ")
s3 = boto3.resource("s3", aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
s3_client = boto3.client("s3", aws_access_key_id=access_key,
aws_secret_access_key=secret_key)
return s3, s3_client
def tidy(path):
"""
Removes file described by path.
:param path: Path to file needs to be removed.
"""
try:
os.remove(path)
except OSError:
pass
def check_acl(acl):
"""
Checks if the Access Control List is public.
:param acl: Acl instance that describes bucket's.
:return: Bucket's public indicator and dangerous grants parsed from acl instance.
"""
dangerous_grants = defaultdict(list)
for grant in acl.grants:
grantee = grant["Grantee"]
if grantee["Type"] == "Group" and grantee["URI"] in GROUPS_TO_CHECK:
dangerous_grants[grantee["URI"]].append(grant["Permission"])
public_indicator = True if dangerous_grants else False
return public_indicator, dangerous_grants
def get_location(bucket_name, s3_client):
"""
Returns the bucket location.
:param bucket_name: Name of the bucket.
:param s3_client: s3_client instance.
:return: String with bucket's region.
"""
loc = s3_client.get_bucket_location(
Bucket=bucket_name)["LocationConstraint"]
if loc is None:
loc = "None(probably Northern Virginia)"
return loc
def install_and_import(pkg):
"""
Installs latest versions of required packages.
:param pkg: Package name.
"""
import importlib
try:
importlib.import_module(pkg)
except ImportError:
import pip
pip.main(["install", pkg])
finally:
globals()[pkg] = importlib.import_module(pkg)
def scan_bucket_urls(bucket_name):
"""
Scans standard bucket urls.
Returns only publicly accessible urls.
:param bucket_name: Name of the bucket.
:return: List that contains publicly accessible urls.
"""
domain = "s3.amazonaws.com"
access_urls = []
urls_to_scan = [
"https://{}.{}".format(bucket_name, domain),
"http://{}.{}".format(bucket_name, domain),
"https://{}/{}".format(domain, bucket_name),
"http://{}/{}".format(domain, bucket_name)
]
warnings.filterwarnings("ignore")
for url in urls_to_scan:
try:
content = requests.get(url).text
except requests.exceptions.SSLError:
continue
if not re.search("Access Denied", content):
access_urls.append(url)
return access_urls
def add_to_output(msg, path):
"""
Displays msg or writes it to file.
:param msg: Message to handle.
:param path: Path to lambda report file.
"""
if path is not None:
with open(path, "a") as f:
f.write(msg + '\n')
else:
termcolor.cprint(msg)
def analyze_buckets(s3, s3_client, report_path=None):
"""
Analyses buckets permissions. Sends results to defined output.
:param s3: s3 resource instance.
:param s3_client: s3 client instance.
:param report_path: Path to lambda report file.
"""
buckets = s3.buckets.all()
try:
bucketcount = 0
for bucket in buckets:
location = get_location(bucket.name, s3_client)
add_to_output(SEP, report_path)
bucket_acl = bucket.Acl()
public, grants = check_acl(bucket_acl)
if public:
if report_path:
msg = "Bucket {}: {}".format(bucket.name, "PUBLIC!")
else:
bucket_line = termcolor.colored(
bucket.name, "blue", attrs=["bold"])
public_ind = termcolor.colored(
"PUBLIC!", "red", attrs=["bold"])
msg = "Bucket {}: {}".format(
bucket_line, public_ind)
add_to_output(msg, report_path)
add_to_output("Location: {}".format(location), report_path)
if grants:
for grant in grants:
permissions = grants[grant]
perm_to_print = [EXPLAINED[perm]
for perm in permissions]
if report_path:
msg = "Permission: {} by {}".format(" & ".join(perm_to_print),
(GROUPS_TO_CHECK[grant]))
else:
msg = "Permission: {} by {}".format(
termcolor.colored(
" & ".join(perm_to_print), "red"),
termcolor.colored(GROUPS_TO_CHECK[grant], "red"))
add_to_output(msg, report_path)
urls = scan_bucket_urls(bucket.name)
add_to_output("URLs:", report_path)
if urls:
add_to_output("\n".join(urls), report_path)
else:
add_to_output("Nothing found", report_path)
else:
if report_path:
msg = "Bucket {}: {}".format(bucket.name, "Not public")
else:
bucket_line = termcolor.colored(
bucket.name, "blue", attrs=["bold"])
public_ind = termcolor.colored(
"Not public", "green", attrs=["bold"])
msg = "Bucket {}: {}".format(
bucket_line, public_ind)
add_to_output(msg, report_path)
add_to_output("Location: {}".format(location), report_path)
bucketcount += 1
if not bucketcount:
add_to_output("No buckets found", report_path)
if report_path:
msg = "You are safe"
else:
msg = termcolor.colored("You are safe", "green")
add_to_output(msg, report_path)
except botocore.exceptions.ClientError as e:
resolve_exception(e, report_path)
def lambda_handler(event, context):
"""
Checks buckets permissions and sends report.
Will be invoked only by AWS Lambda service.
:param event: Not used.
:param context: Not used.
"""
import boto3
import botocore
globals()['boto3'] = boto3
globals()['botocore'] = botocore
globals()['requests'] = botocore.vendored.requests
report_path = "/tmp/report.txt"
tidy(report_path)
s3, s3_client = get_s3_obj(is_lambda=True)
analyze_buckets(s3, s3_client, report_path)
send_report(report_path)
tidy(report_path)
def send_report(path):
"""
Sends report generated by script via sns service.
:param path: Path to report file.
"""
sns = boto3.resource("sns")
platform_endpoint = sns.PlatformEndpoint(SNS_RESOURCE_ARN)
today = datetime.now() + timedelta(days=1)
with open(path, "r") as f:
rts = f.read()
platform_endpoint.publish(
Message=rts,
Subject="S3 Monitor Report: " + str(today),
MessageStructure="string"
)
def resolve_exception(exception, report_path):
"""
Handles exceptions that appears during bucket check run.
:param exception: Exception instance.
:param report_path: Path to report path.
"""
msg = str(exception)
if report_path:
if "AccessDenied" in msg:
add_to_output("""Access Denied
I need permission to access S3
Check if the Lambda Execution Policy at least has AmazonS3ReadOnlyAccess, SNS Publish & Lambda Execution policies attached
To find the list of policies attached to your user, perform these steps:
1. Go to IAM (https://console.aws.amazon.com/iam/home)
2. Click "Roles" on the left hand side menu
3. Click the role lambda is running with
4. Here it is
""", report_path)
else:
add_to_output("""{}
Something has gone very wrong, please check the Cloudwatch Logs Stream for further details""".format(msg),
report_path)
else:
if "InvalidAccessKeyId" in msg and "does not exist" in msg:
add_to_output("The Access Key ID you provided does not exist", report_path)
add_to_output("Please, make sure you give me the right credentials", report_path)
elif "SignatureDoesNotMatch" in msg:
add_to_output("The Secret Access Key you provided is incorrect", report_path)
add_to_output("Please, make sure you give me the right credentials", report_path)
elif "AccessDenied" in msg:
add_to_output("""Access Denied
I need permission to access S3
Check if the IAM user at least has AmazonS3ReadOnlyAccess policy attached
To find the list of policies attached to your user, perform these steps:
1. Go to IAM (https://console.aws.amazon.com/iam/home)
2. Click "Users" on the left hand side menu
3. Click the user, whose credentials you give me
4. Here it is
""", report_path)
else:
add_to_output("""{}
Check your credentials in ~/.aws/credentials file
The user also has to have programmatic access enabled
If you didn't enable it(when you created the account), then:
1. Click the user
2. Go to "Security Credentials" tab
3. Click "Create Access key"
4. Use these credentials""".format(msg), report_path)
def main():
if sys.version[0] == "3":
raw_input = input
packages = ["boto3", "botocore", "termcolor", "requests"]
for package in packages:
install_and_import(package)
s3, s3_client = get_s3_obj()
analyze_buckets(s3, s3_client)
if __name__ == "__main__":
main()