|
import json |
|
import os |
|
import time |
|
import urllib.request as ureq |
|
|
|
def download_image_with_retries(url, output_file, max_retries=3, backoff_factor=1): |
|
"""Attempt to download an image with retries upon failure. |
|
|
|
Args: |
|
url (str): The URL of the image to download. |
|
output_file (str): The local file path to save the downloaded image. |
|
max_retries (int): The maximum number of retry attempts. |
|
backoff_factor (float): The factor to calculate the delay between retries. |
|
""" |
|
attempt = 0 |
|
while attempt < max_retries: |
|
try: |
|
ureq.urlretrieve(url, output_file) |
|
print(f"Successfully downloaded: {output_file}") |
|
return True |
|
except Exception as e: |
|
print(f"Attempt {attempt + 1} failed for {url}: {e}") |
|
time.sleep(backoff_factor * (2 ** attempt)) |
|
attempt += 1 |
|
print(f"Failed to download {url} after {max_retries} attempts.") |
|
return False |
|
|
|
def verify_and_download_images(data): |
|
"""Verify if images are downloaded; if not, download them. |
|
|
|
Args: |
|
data (dict): The dataset containing image URLs and additional information. |
|
""" |
|
images_directory = './images' |
|
os.makedirs(images_directory, exist_ok=True) |
|
|
|
for key, value in data.items(): |
|
image_url = value['imageURL'] |
|
ext = os.path.splitext(image_url)[1] |
|
output_file = f'{images_directory}/{key}{ext}' |
|
|
|
if not os.path.exists(output_file): |
|
print(f"Image {key}{ext} not found, attempting to download...") |
|
if not download_image_with_retries(image_url, output_file): |
|
print(f"Warning: Could not download image {image_url}") |
|
|
|
|
|
|
|
with open('dataset.json', 'r') as fp: |
|
data = json.load(fp) |
|
|
|
|
|
verify_and_download_images(data) |
|
|