import os
import sys
from argparse import ArgumentParser

from orionclient.session import APISession
from orionclient.types import Dataset, WorkFloeJob, WorkFloeSpec
from floe.constants import PROMOTED, SINK_CUBE, SOURCE_CUBE, PARAMETER_TYPE
from orionplatform.constants import (  # DATASET_IN only exists in orion-platform>=1.0.1
    DATASET_IN,
    DATASET_OUT,
)


def run_workfloe(workfloe, output_name, input_param, output_params, datasets):
    orion_datasets = []
    for dataset in datasets:
        orion_datasets.append(
            Dataset.upload(APISession, os.path.basename(dataset), dataset)
        )
    kwargs = {
        "promoted": {
            input_param["promoted_name"]: {
                "datasets": [{"id": ds.id} for ds in orion_datasets]
            }
        }
    }
    for cube, param in output_params:
        kwargs["promoted"][param["promoted_name"]] = "{} - {}".format(
            output_name, cube["name"]
        )
    job = WorkFloeJob.start(APISession, workfloe, "{}".format(workfloe.name), kwargs)
    return job


def parse_workfloe_specification(specification):
    """Parses out the input dataset parameter and the output dataset parameters

    :param dict specification: Workfloe Specification
    :return: (dict, [(cube, dict)])
    """
    params = [
        (cube, parm)
        for cube in specification["cubes"]
        for parm in cube["parameters"]
        if cube["type"] in (SOURCE_CUBE, SINK_CUBE)
    ]
    dataset_input = None
    dataset_output = []
    for cube, param in params:
        if param[PARAMETER_TYPE] == DATASET_IN and param[PROMOTED]:
            if dataset_input is not None:
                raise RuntimeError(
                    "Found more than one dataset input parameter, unable to proceed."
                )
            dataset_input = param
        elif param[PARAMETER_TYPE] == DATASET_OUT and param[PROMOTED]:
            dataset_output.append((cube, param))
    if dataset_input is None:
        raise RuntimeError(
            "Unable to find a promoted dataset input parameter, exiting.."
        )
    if not len(dataset_output):
        raise RuntimeError(
            "Unable to find a promoted dataset output parameter, exiting.."
        )
    return dataset_input, dataset_output


def main():
    parser = ArgumentParser(description="Start a floe from local datasets")
    parser.add_argument("workfloe", type=int, help="ID of the workfloe to run")
    parser.add_argument("output_name", type=str, help="Name of the dataset to output")
    parser.add_argument(
        "molecule_files",
        nargs="+",
        help="List of molecular files to use as input to the floe",
    )
    parser.add_argument(
        "--split",
        action="store_true",
        help="Indicates to run one floe per molecular file, rather than one floe with multiple molecular files as input",
    )
    args = parser.parse_args()
    workfloe = APISession.get_resource(WorkFloeSpec, args.workfloe)
    try:
        input_param, output_param = parse_workfloe_specification(workfloe.specification)
    except RuntimeError as e:
        print(str(e))
        sys.exit(1)
    if args.split:
        for dataset in args.datasets:
            job = run_workfloe(
                workfloe, args.output_name, input_param, output_param, [dataset]
            )
            print("Started job {}".format(job.id))
    else:
        job = run_workfloe(
            workfloe, args.output_name, input_param, output_param, args.datasets
        )
        print("Started job {}".format(job.id))


if __name__ == "__main__":
    main()
