Source code for schrodinger.application.desmond.stage.fep_mapper

import copy
import glob
import os
import shutil
from typing import TYPE_CHECKING

from schrodinger import structure
from schrodinger.application.desmond import cmj
from schrodinger.application.desmond import constants
from schrodinger.application.desmond import license as desmond_license
from schrodinger.application.desmond import measurement
from schrodinger.application.desmond import struc
from schrodinger.application.desmond import util
from schrodinger.application.desmond.picklejar import PickleState
from schrodinger.application.desmond.stage import launcher
from schrodinger.utils import sea
from schrodinger.utils import subprocess

if TYPE_CHECKING:
    from schrodinger.application.scisol.packages.fep import graph

__all__ = [
    'CovalentFepMapper', 'ProteinFepMapper', 'FepMapper', 'FepMapperCleanup',
    'FepMapperReport', 'VacuumReport', 'CalcDdg'
]

GRAPH_TAG = 'GRAPH'
PRIME_TAG = 'PRIME'


# TODO function should not set private member on object
def _copy_to_base_dir_and_register_transfering(stage, src, target, base_dir):
    # type: (cmj.StageBase, str, str, str) -> None
    fn = os.path.join(base_dir, target)
    shutil.copyfile(src, fn)
    stage._files4copy.append(fn)


[docs]class CovalentFepMapper(cmj.StageBase): """ """ NAME = "covalent_fep_mapper" PARAM = cmj._create_param_when_needed([ """ DATA = { graph_file = "" } VALIDATE = { graph_file = {type = str1} } """, ])
[docs] def __init__(self, *arg, **kwarg): """ """ cmj.StageBase.__init__(self, *arg, **kwarg) self.main_jobname = sea.Atom("$MAINJOBNAME").val
# __init__
[docs] def crunch(self): """ """ from schrodinger.application.scisol.packages.fep import fepmae from schrodinger.application.scisol.packages.fep import graph from schrodinger.application.scisol.packages.fep import utils self._print("debug", "In CovalentFepMapper.crunch") for pj in self.get_prejobs(): jobname, dir = self._get_jobname_and_dir(pj) if (not os.path.isdir(dir)): os.makedirs(dir) util.chdir(dir) g = graph.Graph.deserialize( os.path.join(cmj.ENGINE.base_dir, self.param.graph_file.val)) fepmae.write_fepmae(jobname, g) fmp_fname = jobname + ".fmp" edge_fname = jobname + ".edge" g.write(fmp_fname) utils.write_edge_file(g, edge_fname) new_jobs = launcher.create_fep_launcher_jobs( glob.glob(jobname + '*.mae'), self, pj, dir, fmp_fname) self.add_jobs(new_jobs) _copy_to_base_dir_and_register_transfering( self, edge_fname, self.main_jobname + '.edge', cmj.ENGINE.base_dir) self._print("debug", "Out CovalentFepMapper.crunch")
[docs]class ProteinFepMapper(cmj.StageBase): """ """ NAME = "protein_fep_mapper" PARAM = cmj._create_param_when_needed([ """ DATA = { rule = "" asl = "" neighbor = 0 sidechain = False fep_type = "%s" graph_file = "" } VALIDATE = { rule = {type = str} asl = {type = str} neighbor = {type = int range = [0 7]} sidechain = {type = bool} fep_type = {type = enum range = [%s]} graph_file = {type = str} } """ % (constants.FEP_TYPES.PROTEIN_SELECTIVITY, " ".join( constants.FEP_TYPES)), ])
[docs] def __init__(self, *arg, **kwarg): """ """ cmj.StageBase.__init__(self, *arg, **kwarg) self.main_jobname = sea.Atom("$MAINJOBNAME").val
# __init__
[docs] def crunch(self): """ """ from schrodinger.application.scisol.packages.fep import fepmae from schrodinger.application.scisol.packages.fep import graph from schrodinger.application.scisol.packages.fep import \ graph_generator as ggen from schrodinger.application.scisol.packages.fep import utils self._print("debug", "In ProteinFepMapper.crunch") for pj in self.get_prejobs(): graph_file = '' if os.path.isfile(self.param.graph_file.val): graph_file = os.path.join(cmj.ENGINE.base_dir, self.param.graph_file.val) jobname, dir = self._get_jobname_and_dir(pj) if (not os.path.isdir(dir)): os.makedirs(dir) util.chdir(dir) prime_fname = pj.output.get(PRIME_TAG) prev_graph = graph.Graph.deserialize( graph_file) if graph_file else None if not prime_fname and graph_file: self._print( "quiet", f"ProteinFepMapper: No new mutations, using previous graph {graph_file}." ) g = prev_graph else: raw_fname = pj.parent.output.struct_file() # Ignore the first CT, which is the WT protein. environment_strucs = list( structure.StructureReader(raw_fname))[1:] optimization_options = {} if self.param.fep_type.val == constants.FEP_TYPES.PROTEIN_STABILITY: # in case of PRM stability w/ membrane jobs, the first item # should set to none for the receptor_struc _, solvent, membrane, _ = \ fepmae.filter_receptors_and_ligands(environment_strucs) environment_strucs = [None, membrane, solvent] optimization_options[ "topology_type"] = graph.TOPOLOGY_WINDMILL if self.param.rule.val: optimization_options["rules"] = self.param.rule.val g = ggen.gen_graph_from_protein_mutants( structure.StructureReader(prime_fname), self.param.fep_type.val, mutation_chain_asl=self.param.asl.val, neighbor=self.param.neighbor.val, sidechain=self.param.sidechain.val, environment_strucs=environment_strucs, optimization_options=optimization_options) if prev_graph is not None: ggen.update_protein_graph( prev_graph, g, sidechain=self.param.sidechain.val, optimization_options=optimization_options) g = prev_graph fepmae.write_fepmae(jobname, g) fmp_fname = jobname + ".fmp" edge_fname = jobname + ".edge" g.write(fmp_fname) utils.write_edge_file(g, edge_fname) new_jobs = launcher.create_fep_launcher_jobs( glob.glob(jobname + '*.mae'), self, pj, dir, fmp_fname) self.add_jobs(new_jobs) _copy_to_base_dir_and_register_transfering( self, edge_fname, self.main_jobname + '.edge', cmj.ENGINE.base_dir) self._print("debug", "Out ProteinFepMapper.crunch")
[docs]class FepMapper(cmj.StageBase): """ """ NAME = "fep_mapper" PARAM = cmj._create_param_when_needed([ """ DATA = { receptor = 1 graph_file = "" rule = "" bias = ["bias"] atom_mapping = "" align_core_only = False ats = False debug = False mp = [] ignore_ddg = False } VALIDATE = { receptor = {type = int0} graph_file = {type = str} rule = {type = str} bias = {type = list size = 0 elem = {type = str range = [0 10000000000]}} atom_mapping = {type = str} align_core_only = {type = bool} ats = [{type = bool} { min_barrier_height = {type = float+} max_bond_dist = {type = int1} max_core_reduction = {type = int1} } ] debug = {type = bool} mp = {type = list elem = {type = str}} ignore_ddg = {type = bool} } """, ])
[docs] def __init__(self, *arg, **kwarg): """ """ cmj.StageBase.__init__(self, *arg, **kwarg) self.main_jobname = sea.Atom("$MAINJOBNAME").val
# __init__
[docs] def crunch(self): """ """ self._print("debug", "In FepMapper.crunch") for pj in self.get_prejobs(): pre_mae = pj.output.struct_file() jobname, dir = self._get_jobname_and_dir(pj) if (not os.path.isdir(dir)): os.makedirs(dir) util.chdir(dir) log_fh = open(jobname + ".log", 'w') cmd = [ 'run', '-FROM', 'scisol', 'fep_mapper.py', '-o', jobname, '-s', jobname, '-environment', str(self.param.receptor.val) ] # FIXME: self.param.environment.val? if self.param.atom_mapping.val: cmd += ['-atom-mapping', self.param.atom_mapping.val] if self.param.align_core_only.val: cmd.append('-align-core-only') if self.param.ats.val: cmd.append('-ats') # Automated torsional scaling ats = self.param.ats if isinstance(ats, sea.Map): if hasattr(ats, 'min_barrier_height'): cmd += [ '-ats_min_barrier_height', str(ats.min_barrier_height.val) ] if hasattr(ats, 'max_bond_dist'): cmd += [ '-ats_max_bond_dist', str(ats.max_bond_dist.val) ] if hasattr(ats, 'max_core_reduction'): cmd += [ '-ats_max_core_reduction', str(ats.max_core_reduction.val) ] if self.param.graph_file.val: fmp_fname = os.path.join(cmj.ENGINE.base_dir, self.param.graph_file.val) cmd.append(fmp_fname) else: cmd.append(pre_mae) fmp_fname = jobname + ".fmp" if self.param.rule.val: cmd += ['-rule', self.param.rule.val] for e in self.param.bias.val: cmd += ['-bias', str(e)] for e in self.param.mp.val: cmd += ['-mp', str(e)] cmd.append("-ignore-ddg") # always write mae files for ALL edges. self._print("debug", " ".join(cmd)) ret_code = subprocess.call(cmd, stdout=log_fh, stderr=log_fh) log_fh.close() self._print("quiet", "FEP Mapper backend log:") self._print("quiet", open(jobname + ".log").read()) if ret_code: self._print( "quiet", 'ERROR: fep_mapper.py subprocess failed for input file: %s.' % pre_mae) continue if not os.path.isfile(fmp_fname): self._print("quiet", 'ERROR: %s output file not found' % fmp_fname) continue edge_fname = jobname + '.edge' try: f = open(edge_fname, 'r') except IOError: self._print("quiet", 'ERROR: %s.edge output file not found' % jobname) else: self._log("Edges in graph:") with f: self._log(f.read()) new_jobs = launcher.create_fep_launcher_jobs( glob.glob(jobname + '*.mae'), self, pj, dir, fmp_fname) self.add_jobs(new_jobs) _copy_to_base_dir_and_register_transfering( self, edge_fname, self.main_jobname + '.edge', cmj.ENGINE.base_dir) self._print("debug", "Out FepMaper.crunch")
[docs]class FepMapperCleanup(cmj.StageBase): """ """ NAME = "fep_mapper_cleanup" PARAM = cmj._create_param_when_needed([ """ DATA = { input_graph_file = "" } VALIDATE = { input_graph_file = {type = str _check = multisim_file_exists} } """, ])
[docs] def __init__(self, *arg, **kwarg): """ """ cmj.StageBase.__init__(self, should_pack=False, *arg, **kwarg)
[docs] def crunch(self): """ """ from schrodinger.application.scisol.packages.fep.graph import Graph fmp_fname = None init_graph = out_graph = None fmp_out_fname = '' fmp_opt = [] self._print("debug", "In FepMapperCleanup.crunch") base_dir = cmj.ENGINE.base_dir jobname = cmj.ENGINE.jobname checkpoint = f'{jobname}-multisim_checkpoint' pj0 = self.get_prejobs()[0] _, dir = self._get_jobname_and_dir(pj0) if self.param.input_graph_file.val: fmp_fname = self.param.input_graph_file.val else: # Search graph file in previous jobs first, if not found, continue to search # for parent of the 1st job (arbitrarily chosen) for pj in self.get_prejobs(): fmp_fname = pj.output.get(GRAPH_TAG) if fmp_fname is not None: break if fmp_fname is None: parent = pj0.parent while parent is not None and fmp_fname is None: fmp_fname = parent.output.get(GRAPH_TAG) parent = parent.parent self._log(f"Loading initial graph {fmp_fname}...") init_graph = Graph.deserialize(fmp_fname) fmp_out_fname = f"{jobname}_out.fmp" fmp_opt = ['-fmp', fmp_fname, '-out-fmp', fmp_out_fname] CLEANUP_SCRIPTS = "fep_mapper_cleanup.py" cmd = ['run', '-FROM', 'scisol', CLEANUP_SCRIPTS, checkpoint, '-force' ] + fmp_opt self._log(" ".join(cmd)) def run(job): # FIXME: Why don't we run this directly rather creating # a new cmj.Job? os.chdir(base_dir) ret_code = subprocess.call(cmd) if ret_code == 0: job.status.set(cmj.JobStatus.SUCCESS) # FIXME: report here or in `poststage`? self._log(f"Loading output graph {fmp_out_fname}...") out_graph = Graph.deserialize(fmp_out_fname) ncompounds = FepMapperReport._count_finished_compounds( init_graph, out_graph) self._log( f'Number of new compounds completed in the current job: {ncompounds}' ) if ncompounds: desmond_license.checkout_model2_compounds( ncompounds, product=constants.PRODUCT.FEP) job = cmj.Job(jobname, pj0, self, run, dir) self.add_job(job) self._print("debug", "Out FepMapperCleanup.crunch")
[docs] def poststage(self): for fn in glob.glob('*.fmpdb') + glob.glob('*.fmp'): self._files4copy.append(os.path.abspath(fn))
[docs]class FepMapperReport(cmj.StageBase): """ """ NAME = "fep_mapper_report" PARAM = cmj._create_param_when_needed([ """ DATA = { dir = "$MAINJOBNAME_$STAGENO" graph = "$MAINJOBNAME_out.fmp" input_graph_file = "" report = "$MAINJOBNAME.csv" } VALIDATE = { report = {type = str1} input_graph_file = {type = str _check = multisim_file_exists} } """, ])
[docs] def __init__(self, *arg, **kwarg): """ """ self._graph = None cmj.StageBase.__init__(self, *arg, **kwarg)
# __init__ def __getstate__(self, state=None): """ """ state = state if (state) else PickleState() try: state.__dict__["_graph"] = self._graph except AttributeError: pass return cmj.StageBase.__getstate__(self, state)
[docs] def prestage(self): self._graph = None cmj.StageBase.prestage(self)
[docs] def compatible_graph(self, g): return sorted(node.id for node in self._graph.nodes_iter()) == \ sorted(node.id for node in g.nodes_iter())
[docs] def crunch(self): """ """ from schrodinger.application.scisol.packages.fep import graph self._print("debug", "In FepMapperReport.crunch") initial_graph = None for pj in self.get_prejobs(): if self._graph is None: fmp_fname = self.param.input_graph_file.val or pj.parent.parent.output.get( GRAPH_TAG) if not os.path.exists(fmp_fname): # We have to guess the pathname of the *.fmp file if it's missing at the specified location. util.chdir(pj.parent.parent.dir) fmp_fname = glob.glob(os.path.basename(fmp_fname)) if (not fmp_fname): fmp_fname = glob.glob("*.fmp")[0] fmp_fname = os.path.abspath(fmp_fname) self._graph = graph.Graph.deserialize(fmp_fname) initial_graph = initial_graph or copy.deepcopy(self._graph) ddg_mae = pj.output.struct_file() jobname, dir = self._get_jobname_and_dir(pj) if (not os.path.isdir(dir)): os.makedirs(dir) util.chdir(dir) ddg = None for ct in struc.read_all_ct(ddg_mae): try: n2n = ct.property[constants.MOLTYPE] ddg = measurement.string2measurement( ct.property['s_des_ddG']) break except KeyError: pass i = n2n.find(':') edge = self._graph.id2edge((n2n[:i], n2n[i + 1:])) self._log('ddG = %s for edge %s:%s' % (ddg, edge.short_id[0], edge.short_id[1])) edge.complex_dg = measurement.string2measurement( ct.property['s_des_dG1']) edge.solvent_dg = measurement.string2measurement( ct.property['s_des_dG2']) edge.set_data('file0', pj.input.get("file0")) edge.set_data('file1', pj.input.get("file1")) self.add_job(pj) ncompounds = self._count_finished_compounds(initial_graph, self._graph) self._log( f'Number of new compounds completed in the current job: {ncompounds}' ) if ncompounds: desmond_license.checkout_model2_compounds( ncompounds, product=constants.PRODUCT.FEP) self._print("debug", "Out FepMapperReport.crunch")
[docs] def poststage(self): if (self._graph): sea.set_macro_dict(self._get_macro_dict()) if (self.param.prefix.val != ""): sea.update_macro_dict({"$PREFIX": self.param.prefix.val}) dir = os.path.join(cmj.ENGINE.base_dir, self.param.dir.val) if (not os.path.isdir(dir)): os.makedirs(dir) util.chdir(dir) fmp_fname = self.param.graph.val self._graph.calc_cycle_closure() self._graph.write(fmp_fname) self._files4pack.append(os.path.abspath(fmp_fname)) fmp_fname_copy = os.path.join(cmj.ENGINE.base_dir, fmp_fname) shutil.copyfile(os.path.abspath(fmp_fname), fmp_fname_copy) self._files4copy.append(fmp_fname_copy) has_solvation_ddg = False for e in self._graph.edges_iter(): if e.bennett_solvation_ddg is not None: has_solvation_ddg = True break with open(self.param.report.val, "w") as fh: cols = ["ligand1", "ligand2", "ddG", "error"] if has_solvation_ddg: cols += ["solvation_ddG", "solvation_error"] cols += ["complex_job", "solvent_job", "edge"] fh.write(", ".join(cols) + "\n") for e in self._graph.edges_iter(): n0, n1 = e.direction try: ddg_val = e.bennett_ddg.val ddg_err = e.bennett_ddg.unc except AttributeError: ddg_val = "N/A" ddg_err = "N/A" try: solv_ddg_val = e.bennett_solvation_ddg.val solv_ddg_err = e.bennett_solvation_ddg.unc except AttributeError: solv_ddg_val = "N/A" solv_ddg_err = "N/A" try: file0 = os.path.basename(e.get_data('file0')) file1 = os.path.basename(e.get_data('file1')) # FIXME: really need to set files here? e.del_data('file0') e.del_data('file1') except TypeError: # if file0/file1 are not present file0 = "N/A" file1 = "N/A" vals = [ n0.struc.title, n1.struc.title, f"{ddg_val}", f"{ddg_err}" ] if has_solvation_ddg: vals += [f"{solv_ddg_val}", f"{solv_ddg_err}"] vals += [file0, file1, f"{n0.short_id}:{n1.short_id}"] fh.write(", ".join(vals) + "\n") self._files4pack.append(os.path.abspath(self.param.report.val)) for e in glob.glob("*.csv") + glob.glob("*.fmp") + glob.glob( "*.txt"): self._files4pack.append(os.path.abspath(e)) job = cmj.Job("dummyjobname", None, self, None, dir) job.status.set(cmj.JobStatus.SUCCESS) job.output.add(fmp_fname, tag=GRAPH_TAG) if "concluder" == self._NEXT_STAGE.NAME: self._NEXT_STAGE._pre_job = [ job, ] self._NEXT_STAGE._pre_jobre = [ job, ] else: self._NEXT_STAGE._pre_job.append(job) self._NEXT_STAGE._pre_jobre.append(job)
@classmethod def _count_finished_compounds(cls, init_graph: "graph.Graph", final_graph: "graph.Graph") -> int: nodes_with_ddg = set() init_count = final_count = 0 protein_fep_types = (constants.FEP_TYPES.LIGAND_SELECTIVITY, constants.FEP_TYPES.PROTEIN_STABILITY, constants.FEP_TYPES.PROTEIN_SELECTIVITY) def update_nodes_with_ddg(g): """Update the global `nodes_with_ddg` set for finished jobs. """ for edge in g.edges_iter(): if edge.bennett_ddg is not None: for node in edge.nodes: if (g.fep_type in protein_fep_types and node.struc.title.startswith('WT')): nodes_with_ddg.add('WT') else: nodes_with_ddg.add(node.id) update_nodes_with_ddg(init_graph) init_count = len(nodes_with_ddg) update_nodes_with_ddg(final_graph) final_count = len(nodes_with_ddg) num_finished_comp = final_count - init_count assert num_finished_comp >= 0, "Number of finished compound(s) must be >= 0" return num_finished_comp
[docs]class VacuumReport(FepMapperReport): NAME = "vacuum_report" PARAM = cmj._create_param_when_needed([ """ DATA = { dir = "$MAINJOBNAME_$STAGENO" graph = "$MAINJOBNAME_out.fmp" report = "$MAINJOBNAME.csv" } VALIDATE = { report = {type = str1} } """, ])
[docs] def crunch(self): """ """ from schrodinger.application.scisol.packages.fep import graph self._print("debug", "In VacuumReport.crunch") for pj in self.get_prejobs(): if self._graph is None: fmp_fname = pj.parent.output.get(GRAPH_TAG) if not os.path.exists(fmp_fname): # We have to guess the pathname of the *.fmp file # if it's missing at the specified location. util.chdir(pj.parent.dir) fmp_fname = glob.glob(os.path.basename(fmp_fname)) if not fmp_fname: fmp_fname = glob.glob("*.fmp") fmp_fname = os.path.abspath(fmp_fname[0]) self._graph = graph.Graph.deserialize(fmp_fname) ddg_mae = pj.output.struct_file() jobname, dir = self._get_jobname_and_dir(pj) if not os.path.isdir(dir): os.makedirs(dir) util.chdir(dir) for ct in struc.read_all_ct(ddg_mae): try: n2n = ct.property[constants.MOLTYPE] dg = measurement.string2measurement(ct.property['s_des_dG']) except KeyError: dg = None self._print('debug', n2n) i = n2n.find(':') n0 = n2n[:i] n1 = n2n[i + 1:] edge = self._graph.id2edge((n0, n1)) edge.vacuum_dg = dg self.add_job(pj) self._print("debug", "Out VacuumReport.crunch")
# currently this is only used by fep_scholar. It should be obsolesced by # fep_mapper_cleanup, but that uses scisol. So we must keep this for # academic users for now
[docs]class CalcDdg(cmj.StageBase): """ """ NAME = "calc_ddg" CMD = [ "run", "-FROM", "desmond", "calculate_ddg.py", ] PARAM = cmj._create_param_when_needed([ """ DATA = { } VALIDATE = { } """, ])
[docs] def crunch(self): """ """ self._print("debug", "In CalcDdg.crunch") # Filters off all non-multisim subjobs. multisim_subjob = [] for pj in self.get_prejobs(): if (0 <= pj.what.find("multisim subjob:")): if isinstance(pj.jctrl, str): start_time, junk = cmj.Job._get_time_helper( "2000-10-02-13:26:04") else: start_time, junk = cmj.Job._get_time_helper( pj.jctrl.StartTime) num = pj.what[-2:] fname = pj.what[16:-3] fname = os.path.basename(fname) pj.conj_fname = fname pj.conj_num = num pj.start_time = start_time multisim_subjob.append(pj) else: self._print("debug", "pj.what = %s" % pj.what) self._print( "quiet", "WARNING: Ignoring non-multisim subjob: %s" % str(pj)) if (multisim_subjob): # Sorts all multisim subjobs. multisim_subjob = sorted(multisim_subjob, key=lambda job: job.start_time) multisim_subjob = sorted(multisim_subjob, key=lambda job: job.conj_num) multisim_subjob = sorted(multisim_subjob, key=lambda job: job.conj_fname) prev = multisim_subjob[0] unique = [ prev, ] for subjob in multisim_subjob[1:]: if (prev.conj_fname != subjob.conj_fname): unique.append(subjob) prev = subjob elif (prev.conj_num != subjob.conj_num): unique.append(subjob) prev = subjob else: unique[-1] = subjob prev = subjob multisim_subjob = unique self._log("%d successful multisim subjobs in total" % len(multisim_subjob)) self._print("debug", str([str(e) for e in multisim_subjob])) # Checks if all subjobs are paired. subjob_pairs = [] prev_subjob = None for e in multisim_subjob: if (prev_subjob is None): prev_subjob = e else: struct_infname0 = prev_subjob.conj_fname struct_infname1 = e.conj_fname if (struct_infname0 == struct_infname1): subjob_pairs.append(( prev_subjob, e, os.path.basename(struct_infname0), )) prev_subjob = None else: self._print( "quiet", "WARNING: Conjugate subjob not found for subjob: %s." % str(prev_subjob)) prev_subjob = e if (prev_subjob): self._print( "quiet", "WARNING: Conjugate subjob not found for subjob: %s." % str(prev_subjob)) self._log("We found %d subjob pairs to calculate ddG.\n" % len(subjob_pairs)) for pj0, pj1, out_fname in subjob_pairs: in_fname0 = pj0.output.struct_file() in_fname1 = pj1.output.struct_file() out_fname = out_fname[:-5] + "_ddg.mae" if (in_fname0 is None): self._print( "quiet", "WARNING: Structure output file not found for job: %s" % str(pj0)) if (in_fname1 is None): self._print( "quiet", "WARNING: Structure output file not found for job: %s" % str(pj1)) if (in_fname0 and in_fname1): jobname, dir = self._get_jobname_and_dir(pj0) cmd = CalcDdg.CMD + [ in_fname0, in_fname1, out_fname, ] if (not os.path.isdir(dir)): os.makedirs(dir) util.chdir(dir) retcode = subprocess.call(cmd) if retcode: self._print( "quiet", f"WARNING: {self.NAME} failed for job: {str(pj0)} {str(pj1)}" ) continue self._print("quiet", "") new_job = cmj.Job(jobname, pj0, self, None, dir) new_job.need_host = False new_job.output.set_struct_file(os.path.join(dir, out_fname)) new_job.input.add(in_fname0, tag="file0") new_job.input.add(in_fname1, tag="file1") new_job.status.set(cmj.JobStatus.SUCCESS) self.add_job(new_job) self._print("debug", "Out CalcDdg.crunch")