#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @linux-aarhus - root.nix.dk
# SPDX-FileCopyrightText: 2024 Frede Hundewadt <fh at manjaro dot org>
# SPDX-License-Identifier: MIT
import argparse
import os
import shutil
import subprocess
import sys
import requests.exceptions
import tempfile

try:
    import requests
except ImportError:
    print("Please install requests package")
    exit(1)

PROG_VERSION = "0.17.0"
PROG_NAME = os.path.basename(__file__)
ISO_RELEASE_URL = \
    "https://gitlab.manjaro.org/webpage/iso-info/-/raw/master/file-info.json"
LICENSE_URL = "https://opensource.org/license/mit"
REL_REVIEW_URL = \
    "https://api.github.com/repos/manjaro/release-review/releases/latest"
DEV_PREVIEW_URL = \
    "https://api.github.com/repos/manjaro-edition/download/releases/latest"

REVIEW_EDITIONS = ["gnome", "plasma", "xfce"]
HOME = os.path.expanduser("~")
SYSTMP = tempfile.mkdtemp()
USRTMP = f"{HOME}/.cache/get-iso"
RED = "\033[1;31m"
GREEN = "\033[1;32m"
YELLOW = "\033[1;33m"
BLUE = "\033[1;34m"
WHITE = "\033[1;37m"
RESET = "\033[1;m"


def msg(message: str, end: str="\n") -> None:
    print(f"{GREEN}==> {RESET}{message}", end=end)


def msg2(message: str, end: str="\n") -> None:
    print(f"{BLUE} --> {RESET}{message}", end=end)


def info(message: str, end: str="\n") -> None:
    print(f"{YELLOW}   -> {RESET}{message}", end=end)


def warn(message: str, end: str="\n") -> None:
    print(f"{YELLOW}<!!!> {RESET}{message}", end=end)


def error(message: str, end: str="\n") -> None:
    print(f"{RED}<###> {RESET}{message}", end=end)


def get_definitions(url: str) -> dict:
    """
    Fetches JSON data from the given URL.

    This function sends a GET request to the specified URL and retrieves the
    JSON response. If the request encounters any issues such as a timeout or
    an unsuccessful HTTP response, the program terminates after printing an
    error message.

    :param url: The URL to fetch the JSON data from.
    :type url: string
    :return: A dictionary containing the JSON data from the response.
    :rtype: dictionary
    :raises: Terminates the program with an error message if the request fails.
    """
    try:
        resp = requests.get(url=url, timeout=10)
        resp.raise_for_status()
        return resp.json()
    except Exception as e:
        error(f"Network Error: {e}")
        msg2("Terminated!")
        sys.exit(1)


def get_download_size(url: str) -> int:
    """
    Retrieve the download size of a resource from the provided URL.

    This function sends a HEAD request to the given URL and extracts the
    value of the "Content-Length" header to determine the size of the
    associated resource in bytes. If the "Content-Length" header is not
    present or cannot be retrieved, it defaults to 0.

    :param url: A string representing the URL of the resource to check.
    :return: An integer representing the size of the resource in bytes.
    """
    try:
        resp = requests.head(url=url, timeout=10, allow_redirects=True)
        resp.raise_for_status()
        return int(resp.headers.get("content-length", 0))
    except Exception as e:
        error(f"Network Error: {e}")
        return 0
        # msg2("Terminated!")
        # sys.exit(1)

def init_iso_list(url: str, review: bool = False, preview: bool = False) -> list:
    """
    Initializes a list of ISO files based on the given URL and mode of operation.

    This function determines whether to initialize a list of preview, review,
    or release ISO files based on the provided parameters. The URL is required for
    fetching the respective data, and the mode of operation is decided by the
    boolean flags `review` and `preview`. If neither `review` nor `preview`
    is specified, it defaults to initializing the release ISO list.

    :param url: The URL used to fetch the desired type of ISO list.
    :type url: string
    :param review: A flag indicating whether to initialize the review ISO list.
    :type review: bool
    :param preview: A flag indicating whether to initialize the preview ISO list.
    :type preview: bool
    :return: A list of initialized ISO files based on the selected mode.
    :rtype: list
    """
    if preview:
        return init_preview_iso_list(url)

    if review:
        return init_review_iso_list(url)

    return init_release_iso_list(url)


def init_release_iso_list(url: str) -> list:
    """
    Generates a list of release ISO details for both official and community editions
    by fetching and processing the data from the specified URL.

    The function retrieves release definitions from the provided URL, parses official
    and community editions, and aggregates relevant metadata such as image links,
    signatures, and checksums. It omits entries where essential keys are missing.

    :param url: The URL from which to fetch release definitions.
    :type url: string
    :return: A list of dictionaries containing metadata for both official and community editions,
        structured by "full" and "minimal" variants with details like image paths, signatures,
        and checksums.
    :rtype: list
    """
    result_releases = []
    data = get_definitions(url)
    official_editions: dict = data.get("official")
    community_editions: dict = data.get("community")
    # parse official editions
    for o_name, o_data in official_editions.items():
        result_releases.append(extract_iso_info(o_name, [o_data], review=False))

    # parse community editions
    for c_name, c_data in community_editions.items():
        try:
            if c_data["custom"]:
                continue
        except KeyError:
            pass

        result_releases.append(extract_iso_info(c_name, [c_data], review=False))

    # return result
    return result_releases


def init_review_iso_list(url: str) -> list:
    """
    Extracts and organizes the browser download URLs of ISO assets from a given
    data source URL into a structured format based on specific review editions.

    This function is different from the `init_preview_iso_list` function in that it uses the same url for all review edition.

    :param url: The URL containing the data from which ISO asset information
        will be extracted.
    :return: A list of dictionaries where each dictionary organizes the ISO
        assets' information for a specific edition, including parts and SHA256
        checksum URLs.
    :rtype: list
    """
    # from the asset list we want to extract
    # the browser_download_url propoerty for each asset
    result = []
    # get definitions
    data = get_definitions(url)
    # extract the asset dictionary
    data_assets: dict = data.get("assets")
    # initilise and empty asset list
    available_assets = []
    for asset in data_assets:
        # extract the url and the size of an asset and append to the asset list
        available_assets.append({
            "url": asset["browser_download_url"],
            "size": asset["size"]
        })

    for edition in REVIEW_EDITIONS:
        # extract iso assets from the list
        result.append(extract_iso_info(edition, available_assets, review=True))

    return result


def init_preview_iso_list(url: str) -> list:
    """
    Extracts and organizes download URLs for multiple editions of assets from a given URL.

    This function processes the assets for different editions by substituting the edition
    placeholders in the provided base URL. It retrieves asset data from the resulting URLs,
    parses the asset list for each edition to identify the URLs for full and minimal ISO
    downloads, and groups the relevant parts and SHA256 checksum files. It returns a structured
    list containing the download information for each edition.

    This function is different from the `init_review_iso_list` function in that it uses a different url for each preview edition.

    :param url: A string representing the base URL containing an "edition" placeholder to be
        replaced by specific edition keys.
    :return: A list of dictionaries where each dictionary contains the download information for
        a specific edition. The information includes the URLs for the full ISO parts, minimal ISO
        parts, and their corresponding SHA256 checksum files.
    """
    # from the asset list we want to extract
    # the browser_download_url propoerty for each asset
    result_preview_iso = []

    for edition in REVIEW_EDITIONS:
        # replace edition in generic url
        edition_url = url.replace("edition", edition)

        data = get_definitions(edition_url)
        data_assets: dict = data.get("assets")

        available_assets = []
        for asset in data_assets:
            available_assets.append({
                "url": asset["browser_download_url"],
                "size": asset["size"]
            })

        # extract iso assets from the list of available assets
        result_preview_iso.append(extract_iso_info(edition, available_assets, review=True))

    return result_preview_iso


def extract_iso_info(edition: str, iso_data: list, review: bool) -> dict:
    """
    Extracts ISO information based on the given edition and ISO data, delegating the task
    to specific functions depending on whether the review process is enabled or not. When
    the `review` flag is set to `True`, the extraction is handled through
    `extract_iso_info_github`. Otherwise, it is delegated to `extract_iso_info_manjaro`.

    :param edition: The edition string to filter or process ISO information.
    :type edition: string
    :param iso_data: A list containing ISO data to process.
    :type iso_data: list
    :param review: A boolean flag indicating whether the review process is enabled.
    :type review: bool
    :return: A dictionary containing the processed ISO information.
    :rtype: dictionary
    """
    if review:
        return extract_iso_info_github(edition, iso_data)
    else:
        return extract_iso_info_manjaro(edition, iso_data)


def extract_iso_info_manjaro(edition: str, iso_data: list) -> dict:
    """
    Extract ISO information for the Manjaro edition.

    This function takes details of an edition and a list of ISO data dictionaries and
    extracts relevant information such as image names, signatures, and checksum values.
    It organizes this data into a structured dictionary format.

    :param edition: A dictionary containing `image` and `signature` keys for the full
       and minimal versions of the edition.
    :param iso_data: A list of dictionaries where each dictionary includes a "checksum"
       key that contains checksum file information (e.g., `.sha256` or `.sha512`).
    :return: A dictionary with the `name` of the edition along with nested `full` and
       `minimal` dictionaries. Each nested dictionary includes `img` (image filename),
       `sig` (signature filename), `sha256` (SHA-256 checksum), and `sha512` (SHA-512 checksum).
    """
    data = iso_data[0]
    try:
        return {
            "name": edition,
            "full": {
                "image": data["image"],
                "signature": data["signature"],
                "checksum": data["checksum"],
                "size": get_download_size(data["image"]),
            },
            "minimal": {
                "image": data["minimal"]["image"],
                "signature": data["minimal"]["signature"],
                "checksum": data["minimal"]["checksum"],
                "size": get_download_size(data["minimal"]["image"]),
            }
        }
    except (IndexError, KeyError) as e:
        error(f"Error parsing review iso list: {e}")
        sys.exit(1)


def extract_iso_info_github(edition: str, iso_data: list) -> dict:
    """
    Extracts detailed ISO information from the provided list of ISO data for the given edition. It categorizes the
    data into full and minimal ISO versions, retrieving respective URLs for file parts, SHA256, and SHA512 checksums,
    along with size information.

    This function is specifically designed to work with structured ISO metadata, filtering relevant entries based on
    the edition and preparing a detailed dictionary with categorized URLs and associated size data.

    :param edition: The edition of the ISO to process (e.g., 'plasma', 'xfce').
    :type edition: string
    :param iso_data: A list of dictionaries containing ISO metadata, including URLs and file sizes.
    :type iso_data: list
    :return: A dictionary with structured information about the ISO, categorized into 'full' and 'minimal' ISO
        details including URLs for file parts, SHA256, SHA512 checksums, and size.
    :rtype: dictionary
    """
    minimal = "minimal"
    sha256sum = ".iso.sha256"
    part = ".iso.z"
    if edition == "plasma":
        edition_data = [x for x in iso_data if "kde" in x["url"]]
    else:
        edition_data = [x for x in iso_data if edition in x["url"]]

    full_iso = [x for x in edition_data if minimal not in x["url"]]
    minimal_iso = [x for x in edition_data if minimal in x["url"]]

    f_part = [x for x in full_iso if part in x["url"]]
    f_256sum = [x for x in full_iso if sha256sum in x["url"]]

    m_part = [x for x in minimal_iso if part in x["url"]]
    m_256sum = [x for x in minimal_iso if sha256sum in x["url"]]

    try:
        result = {
            "name": edition,
            "full": {
                "parts": f_part,
                "checksum": f_256sum[0]["url"],
                "size": sum(f_part["size"] for f_part in f_part),
                "signature": "",
            },
            "minimal": {
                "parts": m_part,
                "checksum": m_256sum[0]["url"],
                "size": sum(mpart["size"] for mpart in m_part),
                "signature": ""
            }
        }
        return result
    except IndexError:
        error("Error parsing preview iso list.")
        sys.exit(1)


def download(url: str, out_dir: str, binary=True) -> bool:
    """
    Downloads a file from the specified URL to the desired output directory. The function takes
    an optional parameter to specify if the file should be downloaded in binary format or not.

    :param url: The URL of the file to be downloaded.
    :type url: string
    :param out_dir: The output directory where the file will be saved.
    :type out_dir: string
    :param binary: Determines if the file should be downloaded in binary format.
                   The default is `True`.
    :type binary: bool
    :return: Returns `True` if the file was successfully downloaded, otherwise returns `False`.
    :rtype: bool
    """
    msg2(f'Download: {url.split("/")[-1]}')
    return download_file(url, f"{out_dir}", binary=binary)


def download_file(url: str, folder_name: str, binary=True) -> bool:
    """
    Downloads a file from the given URL and saves it to the specified folder. The method
    supports both binary and non-binary file types. For binary files, it uses chunked
    downloading with progress tracking.

    :param url: The URL from where the file will be downloaded.
    :type url: string
    :param folder_name: The directory where the downloaded file will be saved.
    :type folder_name: string
    :param binary: Determines if the file should be downloaded in binary mode. Defaults to True.
    :type binary: bool
    :return: Boolean value indicating whether the download operation was successful.
    :rtype: bool
    """
    filename: str = url.split("/")[-1]
    path = os.path.join("{}/{}".format(folder_name, filename))
    try:
        if binary:
            response = requests.get(url, stream=True, timeout=10)
            response.raise_for_status()
            total_size_in_bytes = int(response.headers.get("content-length", 0))
            block_size = 1024 * 1024
            if total_size_in_bytes < block_size:
                block_size = total_size_in_bytes

            with open(path, "wb") as f:
                progress = 0
                for data in response.iter_content(block_size):
                    f.write(data)
                    if len(data) < block_size:
                        progress += len(data)
                    else:
                        progress += block_size
                    info(f"Downloading {round(progress / 1024 / 1024)}MiB of "
                          f"{round(total_size_in_bytes / 1024 / 1024)}MiB", end="\r")
        else:
            response = requests.get(url, timeout=10)
            response.raise_for_status()
            with open(path, "w") as f:
                f.write(response.text)
    except Exception as e:
        error(f"Network Error: {e}")
        return False
    return True


def check_directory_writable(path: str) -> bool:
    """
    Determines whether the given directory path is writable. This function checks
    if the provided path exists as a directory and verifies write access by
    attempting to create and delete a temporary test file inside the directory. If
    the directory is not writable or is not a valid directory, appropriate feedback
    is provided via print statements, and the function returns False.

    :param path: The file path to the directory to be checked for writability.
    :type path: string
    :return: True if the directory is writable, otherwise False.
    :rtype: bool
    """
    if os.path.isdir(path):
        try:
            # test if the path is writable
            with open(os.path.join(path, ".keep"), "w") as f:
                f.write("")
            # remove test
            os.remove(os.path.join(path, ".keep"))
            return True
        except IOError:
            return False
    else:
        return False


def check_7zip():
    """
    Checks for the presence of the 7-Zip utility (`7z`) on the system.

    This function verifies if the 7-Zip executable is available in the system's
    PATH. If not found, it issues a warning message to the user and terminates
    the program.

    :raises SystemExit: Exits the program with a status code 1 if the `7z`
        utility is not found.
    """
    if shutil.which("7z") is None:
        warn("7z is missing. Please install using your package manager.")
        exit(1)


def clean_work_dir(path: str):
    """
    Cleans up a temporary directory.

    This function removes the specified temporary directory if it resides under
    the `/tmp` directory. Otherwise, it leaves the directory in place and warns
    the user about the non-standard location of the temporary directory. It is
    designed to manage temporary files in a controlled manner.

    :param path: Path to the temporary directory to be cleaned up. If the path
        does not start with `/tmp`, no action will be taken, and a warning will be
        displayed.
    :type path: string
    :return: None
    """
    if not path.startswith("/tmp"):
        warn("User defined temp folder. Leaving temp files in place.")
    else:
        subprocess.run(["rm", "-r", f"{path}"])


def check_work_dir(path: str, min_req: int) -> str:
    """
    Checks the given directory path for sufficient free space and determines its usability as a temporary
    directory. The function assesses whether the provided directory path is writable and meets the
    minimum required free disk space. Fallback to a default directory occurs if the provided path is
    unusable or unwritable.

    :param path: The path to the directory to be validated.
    :type path: string
    :param min_req: The minimum required free space in bytes for the directory to be considered usable.
    :type min_req: integer
    :return: The validated directory path that meets the criteria for usage.
    :rtype: string
    """
    # system tmp folder is always writable
    if path.startswith("/tmp"):
        if check_space_required(path, min_req):
            return path
    # user-defined tmp must be writable
    if check_directory_writable(path):
        # and have enough space
        if check_space_required(path, min_req):
            return path
    # usually we don't get here
    # if the user wants us to create the folder,
    # we will attempt to do so
    if not os.path.isdir(path):
        try:
            os.makedirs(path, exist_ok=True)
        except PermissionError:
            warn(f"Working dir '{path}' is not writable.")

    # create a fallback in the user's home/.cache folder
    os.makedirs(USRTMP, exist_ok=True)
    msg2(f"Fall back to '{USRTMP}'")
    return f"{USRTMP}"


def check_space_required(path: str, size: int) -> bool:
    """
    Checks if the available disk space at the specified path is greater than the
    given size.

    :param path: The file system path for which the disk space should
                 be checked.
    :type path: string
    :param size: The minimum required free space in bytes.
    :type size: integer
    :return: True if the available disk space is greater than the specified
             size, False otherwise.
    :rtype: bool
    """
    return shutil.disk_usage(path).free > size


def main():
    """
    Main function to handle the downloading of a specified Manjaro ISO edition. This function
    performs tasks including parsing command-line arguments, deciding download options, and
    verifying the downloaded files through checksum and signature verification.

    The flow of the function includes:
    - Setting up the working directory.
    - Initializing the ISO's data from predefined sources.
    - Parsing and validating command-line arguments.
    - Handling normal, review, and developer preview downloads.
    - Downloading ISO files or parts, along with associated checksum/signature files.
    - Performing validation and cleanup tasks.

    The function ensures that the working directory is writable before proceeding. Based on the
    download mode (normal, review, or developer preview), it selects appropriate data sources
    and validates options accordingly.

    :raises SystemExit: Exits the program on various invalid conditions such as:
        - Non-writable working directory.
        - Invalid 'edition' input not matching available choices.
        - Runtime errors during downloading or file validation.

    :return: No return value, terminates the program execution with exit code 0 on success.
    """
    # initialize a list of release ISO
    iso_files = init_iso_list(ISO_RELEASE_URL, review=False)
    choices = []
    for c in iso_files:
        choices.append(c["name"])
    parser = argparse.ArgumentParser(
        prog=f"{PROG_NAME}",
        description="This tool will download the latest ISO for a named Manjaro Edition (default: minimal)",
        epilog=f"{PROG_NAME} v. {PROG_VERSION} - MIT License <{LICENSE_URL}>")
    parser.add_argument("edition",
                        type=str,
                        help=f"Edition name (only {', '.join(REVIEW_EDITIONS)} is valid for (p)review)",
                        choices=choices)
    parser.add_argument("-f", "--full",
                        required=False,
                        action="store_true",
                        help="Download full ISO")
    parser.add_argument("-o", "--out-dir",
                        type=str,
                        default=os.getcwd(),
                        help="Folder to store dowloaded ISO files (default: current directory)")
    parser.add_argument("-t", "--temp-dir",
                        type=str,
                        default=SYSTMP,
                        help=f"Location to store temporary files (random tmp folder: {SYSTMP})")
    previews = parser.add_argument_group("(P)review")
    preview = previews.add_mutually_exclusive_group()
    preview.add_argument("-p", "--preview",
                         required=False,
                         action="store_true",
                         help=f"[Github] Developer Preview")
    preview.add_argument("-r", "--review",
                         required=False,
                         action="store_true",
                         help=f"[Github] Release Review ISO"),

    args = parser.parse_args()

    msg(f"Storage dir: {args.out_dir}")
    msg(f"Working dir: {args.temp_dir}")

    storage_dir = args.out_dir
    working_dir = args.temp_dir

    if storage_dir == working_dir:
        error("Storage dir must be different from working dir.")
        exit(1)

    if not check_directory_writable(storage_dir):
        warn(f"Storage dir '{storage_dir}' is not writable.")
        msg2(f"Changing to '{HOME}'")
        storage_dir = HOME

    if args.review:
        if args.edition in REVIEW_EDITIONS:
            iso_files = init_iso_list(REL_REVIEW_URL, review=True)
        else:
            warn(f"Invalid review edition. Valid editions: {', '.join(REVIEW_EDITIONS)}")
            sys.exit(1)

    if args.preview:
        if args.edition in REVIEW_EDITIONS:
            iso_files = init_iso_list(DEV_PREVIEW_URL, preview=True)
        else:
            warn(f"Invalid preview edition. Valid editions: {', '.join(REVIEW_EDITIONS)}")
            sys.exit(1)

    if len(iso_files) == 0:
        error("Could not get iso file list.")
        sys.exit(1)

    iso_data = {}
    try:
        result = [x for x in iso_files if args.edition == x["name"]]
        if args.full:
            iso_data = result[0]["full"]
        else:
            iso_data = result[0]["minimal"]
        if iso_data["size"] == 0:
            if args.full:
                error(f"The full {args.edition} ISO could net be found.")
            else:
                error(f"The minimal {args.edition} ISO could net be found.")
            sys.exit(1)
    except (IndexError, TypeError):
        error("Could not extract edition from data")
        sys.exit(1)

    info (f"Processing {args.edition} ISO")
    if args.review or args.preview:
        check_7zip()  # the check will abort execution with an error message if 7z is not found
        zip_ok = False

        checksum_ok = download(iso_data["checksum"], out_dir=working_dir, binary=False)
        isozip = [x for x in iso_data["parts"] if ".iso.zip" in x["url"]]
        zipfile = isozip[0]["url"].split("/")[-1]
        isofile = isozip[0]["url"].split("/")[-1][:-4]
        # temp space required
        temp_space = round(iso_data['size'] / 1024 / 1024 * 2) + 1
        msg2(f"Required space on tmpfs: {temp_space} MiB")
        # storage space required
        storage_space = round(iso_data['size'] / 1024 / 1040) + 1
        msg2(f"Required space on storage: {storage_space} MiB")
        # verify temp dir has the required space
        working_dir = check_work_dir(working_dir, temp_space)
        # verify work dir has the required space
        if not check_space_required(storage_dir, storage_space):
            error("Not enough space on storage device.")
            exit(1)

        for part in iso_data["parts"]:
            zip_ok = download(part["url"], out_dir=working_dir)
            if not zip_ok:
                break

        if zip_ok and checksum_ok:
            shafile = iso_data["checksum"].split("/")[-1]

            # test archive
            info("Testing archive integrity...")
            result = subprocess.run(["7z", "-bso0", "-y", "t", f"{zipfile}"], cwd=f"{working_dir}")
            if result.returncode != 0:
                error("Archive integrity check failed... exiting.")
                clean_work_dir(working_dir)
                sys.exit(1)

            # extract archive
            info(f"Unpacking ISO to {working_dir}...")
            result = subprocess.run(["7z", "-bso0", "-y", "x", f"-o{working_dir}", f"{zipfile}"], cwd=f"{working_dir}")
            if result.returncode != 0:
                error("Archive unpacking failed... cleanup and exit!")
                clean_work_dir(working_dir)
                sys.exit(1)

            # move the checksum file to storage_dir
            info(f"Moving '{shafile}' to '{storage_dir}' ...", end="\r")
            subprocess.run(["mv", f"{shafile}", f"{storage_dir}"], cwd=f"{working_dir}")
            msg2(f"Moved '{shafile}' to '{storage_dir}'      ", end="\n")

            # move iso file to storage_dir
            info(f"Moving '{isofile}' to '{storage_dir}' ...", end="\r")
            subprocess.run(["mv", f"{isofile}", f"{storage_dir}"], cwd=f"{working_dir}")
            msg2(f"Moved '{isofile}' to '{storage_dir}'      ", end="\n")

            # verify iso checksum on storage_dir
            info("Wait for checksum to complete...")
            result = subprocess.run(["sha256sum", "-c", f"{iso_data["checksum"].split("/")[-1]}"],
                                    cwd=f"{storage_dir}", capture_output=True)

            if result.returncode != 0:
                error("Checksum verification failed... cleanup and exit!")
                clean_work_dir(storage_dir)
                sys.exit(1)
            msg2(f"Checksum verified. {result.stdout.decode('utf-8')}", end="\r")

            info("Cleaning up...")
            clean_work_dir(working_dir)
        else:
            # error out and remove temp dir
            error("Download failed... cleanup up and exit!")
            clean_work_dir(working_dir)
            sys.exit(1)

    else:
        # storage space requirement
        storage_space = round(iso_data['size'] / 1024 / 1024) + 1
        msg2(f"Required space on tmpfs: {storage_space} MiB")
        msg2(f"Required space on storage: {storage_space} MiB")
        # check temp dir for adequate storage
        working_dir = check_work_dir(working_dir, storage_space)
        # check storage dir is matching requirements
        if not check_space_required(storage_dir, storage_space):
            error("Not enough space on storage device.")
            exit(1)

        sha256 = True
        if iso_data["checksum"].endswith(".sha512"):
            sha256 = False
            checksum_ok = download(iso_data["checksum"], out_dir=working_dir, binary=False)
        else:
            checksum_ok = download(iso_data["checksum"], out_dir=working_dir, binary=False)

        image_ok = download(iso_data["image"], out_dir=working_dir)
        signature_ok = download(iso_data["signature"], out_dir=working_dir)
        isofile = iso_data["image"].split("/")[-1]

        if image_ok and checksum_ok and signature_ok:
            shafile = iso_data["checksum"].split("/")[-1]

            # verify signature
            info("Wait for signature verification to complete...")
            sigfile = iso_data["signature"].split("/")[-1]
            result = subprocess.run(["gpg", "--verify", f'{sigfile}'], cwd=f"{working_dir}", capture_output=True)
            if result.returncode != 0:
                error("Signature verification failed... cleanup and exit!")
                clean_work_dir(working_dir)
                sys.exit(1)
            msg2(f"Signature verified. {result.stdout.decode('utf-8')}", end="\n")

            # move the checksum file to storage_dir
            info(f"Moving '{shafile}' to '{storage_dir}' ...", end="\r")
            subprocess.run(["mv", f"{shafile}", f"{storage_dir}"], cwd=f"{working_dir}")
            msg2(f"Moved '{shafile}' to '{storage_dir}'      ", end="\n")

            # move the signature file to storage_dir
            info(f"Moving '{sigfile}' to '{storage_dir}' ...", end="\r")
            subprocess.run(["mv", f"{sigfile}", f"{storage_dir}"], cwd=f"{working_dir}")
            msg2(f"Moved '{sigfile}' to '{storage_dir}'      ", end="\n")

            # move iso file to storage_dir
            info(f"Moving '{isofile}' to '{storage_dir}' ...", end="\r")
            subprocess.run(["mv", f"{isofile}", f"{storage_dir}"], cwd=f"{working_dir}")
            msg2(f"Moved '{isofile}' to '{storage_dir}'      ", end="\n")

            # verify iso checksum on storage_dir
            info("Wait for checksum to complete...")
            if sha256:
                result = subprocess.run(["sha256sum", "-c", f"{shafile}"],
                                        cwd=f"{storage_dir}", capture_output=True)
            else:
                result = subprocess.run(["sha512sum", "-c", f"{shafile}"],
                                        cwd=f"{storage_dir}", capture_output=True)

            if result.returncode != 0:
                error("Checksum verification failed... cleanup and exit!")
                clean_work_dir(storage_dir)
                sys.exit(1)
            msg2(f"Checksum verified. {result.stdout.decode('utf-8')}", end="\r")

            # remove temp dir
            info("Cleaning up...")
            clean_work_dir(working_dir)
        else:
            # error out and remove temp dir
            error("Download failed... cleanup up and exit!")
            clean_work_dir(working_dir)
            sys.exit(1)

    msg(f"ISO file: {isofile}")
    msg(f"Storage : {storage_dir}")
    sys.exit(0)


if __name__ == '__main__':
    try:
        main()
    except KeyboardInterrupt:
        print("\n" + "Exit: interrupted by the user.")
        sys.exit(1)
