imports: - $import os - $import glob - $import torch - $import scripts - $import scripts.inference device: '$torch.device("cuda" if torch.cuda.is_available() else "cpu")' model_config: _target_: scripts.exaonepath.EXAONEPathV1Downstream step_size: 256 patch_size: 256 macenko: true device: '$@device' model: '$@model_config' input_dir: 'samples' input_files: '$sorted(glob.glob(@input_dir+''/*.svs''))' root_dir: '$os.path.dirname(os.path.dirname(scripts.__file__))' inference: - $@model.load_state_dict(torch.load(os.path.join(@root_dir, "models/exaonepath_v1.0.0_msi.pt"))) - $scripts.inference.infer(@model, @input_files)