-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathfetch-data.py
executable file
·69 lines (58 loc) · 2.54 KB
/
fetch-data.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
#!/usr/bin/env python3
import os
import requests
import asyncio
from concurrent import futures
# Function to fetch the content from a URL, and swallow any
# exceptions that the fetching raises. If an exception is
# raised, the function returns None. This is used to make
# sure waiting on all the requests in a list doesn't raise
# exceptions but instead puts in None in the response list.
def get_url_or_none(url):
try:
return requests.get(url)
except:
return None
# Fetch the content of a set of URLs asynchronously. The
# return value is a list of Response objects and Nones, in
# the same order as the list or URLs passed as argument.
async def fetch_images_from_urls(urls):
with futures.ThreadPoolExecutor(max_workers=16) as executor:
event_loop = asyncio.get_event_loop()
responses = [ event_loop.run_in_executor(executor, get_url_or_none, url)
for url in urls ]
return await asyncio.gather(*responses)
# Hardcoded four image types, with URL lists shipped in files
classes = ['kitty', 'puppy', 'creepies', 'ungulate']
urllists = map(lambda s: 'data/{}-urls.txt'.format(s), classes)
async def fetch():
for cls, urllist in zip(classes, urllists):
# The downloaded images will be saved under data/raw
os.makedirs('data/raw/{}'.format(cls), exist_ok=True)
print(cls)
with open(urllist, 'r') as f:
urls = f.read().splitlines()
invalid_urls = []
responses = await fetch_images_from_urls(urls)
counter = 0
for url, response in zip(urls, responses):
# Each image gets a unique name with a running number
filename = 'data/raw/{0}/{0}{1:04}.jpg'.format(cls, counter)
# An image is saved only if the server returns a
# successful response with a JPEG content type. All
# other URLs are added to the list of invalid URLs.
if (response is not None
and response.status_code == 200
and response.headers['content-type'] == 'image/jpeg'):
with open(filename, 'wb') as f:
f.write(response.content)
counter += 1
else:
invalid_urls.append(url)
# All URLs that didn't return an image are saved and written into
# the invalid URLs file afterwards.
if invalid_urls:
with open('data/{}-invalid-urls.txt'.format(cls), 'w') as f:
for url in invalid_urls:
f.write('{}\n'.format(url))
asyncio.run(fetch())