from .smal_warapper import SMAL from ..configs import CACHE_DIR_HAMER from .amr import AMR DEFAULT_CHECKPOINT = f'{CACHE_DIR_HAMER}/train/runs/AniMer/checkpoints/checkpoint.ckpt' def load_amr(checkpoint_path=DEFAULT_CHECKPOINT): from pathlib import Path from ..configs import get_config model_cfg = str(Path(checkpoint_path).parent.parent / '.hydra/config.yaml') model_cfg = get_config(model_cfg, update_cachedir=True) # Override some config values, to crop bbox correctly if (model_cfg.MODEL.BACKBONE.TYPE == 'vit') and ('BBOX_SHAPE' not in model_cfg.MODEL): model_cfg.defrost() assert model_cfg.MODEL.IMAGE_SIZE == 256, f"MODEL.IMAGE_SIZE ({model_cfg.MODEL.IMAGE_SIZE}) should be 256 for ViT backbone" model_cfg.MODEL.BBOX_SHAPE = [192, 256] model_cfg.freeze() # Update config to be compatible with demo if ('PRETRAINED_WEIGHTS' in model_cfg.MODEL.BACKBONE): model_cfg.defrost() model_cfg.MODEL.BACKBONE.pop('PRETRAINED_WEIGHTS') model_cfg.freeze() model = AMR.load_from_checkpoint(checkpoint_path, strict=False, cfg=model_cfg) return model, model_cfg