コンフィグの読み込み方
from detectron2.config import get_cfg
cfg = get_cfg()
cfg.merge_from_file("PATH/config.cfg")
cfg.merge_from_list(["MODEL.WEIGHTS", "PATH/model.pth"])
model = build_model(cfg)
Yacs Configs — detectron2 0.6 documentation
import logging
from detectron2.data.datasets import register_coco_instances
from detectron2.data import build_detection_test_loader
from detectron2.modeling import build_model
from detectron2.evaluation import COCOEvaluator, inference_on_dataset
from detectron2.config import get_cfg
import torch
from detectron2.checkpoint import DetectionCheckpointer
logging.basicConfig(level=logging.INFO)
PATH_CFG = "./byol-frozen-coco/config.yaml"
PATH_CHECKPOINT = "./byol-frozen-coco/model_final.pth"
register_coco_instances("coco_2017_val_s", {}, "./datasets/stuff_val2017_size_s.json", "/datasets/COCO/2017/coco/val2017")
register_coco_instances("coco_2017_val_m", {}, "./datasets/stuff_val2017_size_m.json", "/datasets/COCO/2017/coco/val2017")
register_coco_instances("coco_2017_val_l", {}, "./datasets/stuff_val2017_size_l.json", "/datasets/COCO/2017/coco/val2017")
TARGET = "coco_2017_val_s"
cfg = get_cfg()
cfg.merge_from_file(PATH_CFG)
cfg.freeze()
model = build_model(cfg)
DetectionCheckpointer(model).resume_or_load(PATH_CHECKPOINT)
model.eval()
evaluator = COCOEvaluator(TARGET, output_dir="./output/")
val_loader = build_detection_test_loader(cfg, TARGET, batch_size=10, num_workers=10)
with torch.no_grad():
print(inference_on_dataset(model, val_loader, evaluator))ra