hpcflow.sdk.submission package#

Subpackages#

Submodules#

hpcflow.sdk.submission.jobscript module#

class hpcflow.sdk.submission.jobscript.Jobscript(task_insert_IDs, task_actions, task_elements, EARs, EAR_idx, resources, task_loop_idx, dependencies, submit_time=None, scheduler_job_ID=None, version_info=None)#

Bases: JSONLike

Parameters:
  • task_insert_IDs (List[int]) –

  • task_actions (List[Tuple]) –

  • task_elements (Dict[int, List[int]]) –

  • EARs (Dict[Tuple[int]:Tuple[int]]) –

  • EAR_idx (NDArray) –

  • resources (ElementResources) –

  • task_loop_idx (List[Dict]) –

  • dependencies (Dict[int:Dict]) –

  • submit_time (Optional[datetime]) –

  • scheduler_job_ID (Optional[str]) –

  • version_info (Optional[Tuple[str]]) –

property EAR_idx#
property EARs#
compose_jobscript()#

Prepare the jobscript file string.

Return type:

str

property dependencies#
property element_run_dir_file_name#
property element_run_dir_file_path#
classmethod from_json_like(json_like, shared_data=None)#
Parameters:
  • json_like

  • shared_data (dict of (str: ObjectList)) –

get_EAR_ID_array()#
get_EAR_run_idx_array()#
get_commands_file_name(js_action_idx)#
get_task_element_idx_array()#
get_task_insert_IDs_array()#
get_task_loop_idx_array()#
property index#
property is_array#
property jobscript_name#
property jobscript_path#
make_artifact_dirs(task_artifacts_path)#
property need_EAR_file_name#
property need_EAR_file_path#
property num_actions#
property num_elements#
property os_name#
property resources#
property scheduler#
property scheduler_job_ID#
property scheduler_name: str | None#
property scheduler_version_info#
property shell#
property shell_name: str#
property submission#
submit(task_artifacts_path, scheduler_refs, print_stdout=False)#
Parameters:
  • task_artifacts_path (Path) –

  • scheduler_refs (Dict[int, str]) –

  • print_stdout (bool | None) –

Return type:

str

property submit_time#
property task_actions#
property task_elements#
property task_insert_IDs#
property task_loop_idx#
to_dict()#
property workflow#
property workflow_app_alias#
write_element_run_dir_file(run_dirs)#

Write a text file with num_elements lines and num_actions delimited tokens per line, representing the working directory for each EAR.

We assume a given task element’s actions all run in the same directory, but in general a jobscript “element” may cross task boundaries, so we need to provide the directory for each jobscript-element/jobscript-action combination.

Parameters:

run_dirs (List[List[Path]]) –

write_jobscript()#
write_need_EARs_file()#

Write a text file with num_elements lines and num_actions delimited tokens per line, representing whether a given EAR must be executed.

hpcflow.sdk.submission.jobscript.generate_EAR_resource_map(task, loop_idx)#

Generate an integer array whose rows represent actions and columns represent task elements and whose values index unique resources.

Parameters:
Return type:

Tuple[List[ElementResources], List[int], NDArray, NDArray]

hpcflow.sdk.submission.jobscript.group_resource_map_into_jobscripts(resource_map, none_val=-1)#
Parameters:
  • resource_map (Union[List, NDArray]) –

  • none_val (Any) –

hpcflow.sdk.submission.jobscript.jobscripts_to_list(jobscripts)#

Convert the jobscripts dict to a list, normalising jobscript indices so they refer to list indices; also remove resource_hash.

Parameters:

jobscripts (Dict[int, Dict]) –

Return type:

List[Dict]

hpcflow.sdk.submission.jobscript.merge_jobscripts_across_tasks(jobscripts)#

Try to merge jobscripts between tasks.

This is possible if two jobscripts share the same resources and have an array dependency (i.e. one-to-one element dependency mapping).

Parameters:

jobscripts (Dict) –

Return type:

Dict

hpcflow.sdk.submission.jobscript.resolve_jobscript_dependencies(jobscripts, element_deps)#

hpcflow.sdk.submission.submission module#

class hpcflow.sdk.submission.submission.Submission(index, jobscripts, workflow, submission_attempts=None, JS_parallelism=None)#

Bases: JSONLike

Parameters:
  • index (int) –

  • jobscripts (List[Jobscript]) –

  • workflow (Workflow) –

  • submission_attempts (Optional[List]) –

  • JS_parallelism (Optional[bool]) –

property JS_parallelism#
get_EAR_run_dirs()#
Return type:

Dict[Tuple(int, int, int), Path]

get_unique_schedulers()#

Get a unique schedulers and which jobscripts they correspond to.

Return type:

Dict[Tuple[int], Scheduler]

get_unique_shells()#

Get unique shells and which jobscripts they correspond to.

Return type:

Dict[Tuple[int], Shell]

property index: int#
property jobscript_indices: Tuple[int]#

All associated jobscript indices.

property jobscripts: List#
property needs_submit#
property outstanding_jobscripts: Tuple[int]#

Jobscript indices that have not yet been successfully submitted.

property path#
prepare_EAR_submission_idx_update()#

For all EARs in this submission (across all jobscripts), return a tuple of indices that can be passed to Workflow.set_EAR_submission_index.

Return type:

List[Tuple[int, int, int, int]]

property status#
property submission_attempts: List#
submit(task_artifacts_path, ignore_errors=False, print_stdout=False)#

Generate and submit the jobscripts of this submission.

Return type:

List[int]

property submitted_jobscripts: Tuple[int]#

Jobscript indices that have been successfully submitted.

to_dict()#
property workflow: List#
class hpcflow.sdk.submission.submission.SubmissionStatus(value)#

Bases: Enum

An enumeration.

PARTIALLY_SUBMITTED = 2#
PENDING = 0#
SUBMITTED = 1#
hpcflow.sdk.submission.submission.timedelta_format(td)#
Parameters:

td (timedelta) –

Return type:

str

hpcflow.sdk.submission.submission.timedelta_parse(td_str)#
Parameters:

td_str (str) –

Return type:

timedelta

Module contents#