File size: 496 Bytes
b221e1d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import torch
import torchvision
from torch import nn
def create_model():
    torch.manual_seed(42)
    torch.cuda.manual_seed(42)

    weights = torchvision.models.ViT_B_16_Weights.DEFAULT
    transform = weights.transforms()
    model = torchvision.models.vit_b_16(weights=weights)

        # 4. Freeze all layers in base model
    for param in model.parameters():
        param.requires_grad = False

    model.heads = nn.Sequential(
         nn.Linear(768,1)
    )
    return model, transform