from fastai.vision import all as fst
from fastai.learner import Learner
from fastai.vision.learner import vision_learner
from fastai.tabular.core import df_shrink
# Code to get the datasets...
# Create dataloaders
data_loaders = data_block.dataloaders(tmp_dir, bs=config["batch_size"]) # Config is a dictionay containing useful info
# Create model
base_model = getattr(fst, config["model"])
model = vision_learner(data_loaders, base_model, pretrained=True, lr=0.001,
metrics=fst.error_rate)
# Launch training
model.fine_tune(epochs)
And this works fine, I'm able to save and use the model with no issues. But then when I try to do Knowledge Distillation:
from fasterai.distill.all import *
from fastai.vision.all import *
# FasterAI tutorial replica
student = Learner(data_loaders, getattr(fst, config["model"]),
metrics=[accuracy])
kd_cb = KnowledgeDistillationCallback(model, SoftTarget)
student.fit_one_cycle(10, 1e-4, cbs=kd_cb)
File "/home/deeplearning/workspace/refactor/.venv/lib/python3.9/site-packages/fastai/torch_core.py", line 649, in trainable_params
return [p for p in m.parameters() if p.requires_grad]
AttributeError: 'function' object has no attribute 'parameters'