Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions dinov3/configs/ssl_default_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,7 @@ crops:
- 0.229
- 0.224
- 0.225
teacher_to_student_resolution_scale: 1.0
evaluation:
eval_period_iterations: 12500
low_freq_every: 5
Expand Down
43 changes: 43 additions & 0 deletions dinov3/configs/train/distillation_convnext/convnext_base_p16.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
ibot:
loss_weight: 1.0
mask_sample_probability: 0.5
mask_ratio_min_max:
- 0.1
- 0.5
mask_random_circular_shift: false
force_masking_even_with_zero_weight: false
separate_head: true
head_norm_last_layer: false
head_nlayers: 3
head_hidden_dim: 2048
student:
arch: convnext_base
patch_size: 16
drop_path_rate: 0.0
block_chunks: 4
optim:
epochs: 500
clip_grad: 3.0
layerwise_decay: 1.0
schedules:
lr:
start: 1e-6
peak: 1e-4
end: 1e-6
warmup_epochs: 80
freeze_last_layer_epochs: 1
weight_decay:
start: 0.02
end: 0.2
peak: 0.2
warmup_epochs: 500
teacher_temp:
start: 0.04
peak: 0.07
end: 0.07
warmup_epochs: 120
momentum:
start: 0.994
peak: 1.0
end: 1.0
warmup_epochs: 500
43 changes: 43 additions & 0 deletions dinov3/configs/train/distillation_convnext/convnext_large_p16.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
ibot:
loss_weight: 1.0
mask_sample_probability: 0.5
mask_ratio_min_max:
- 0.1
- 0.5
mask_random_circular_shift: false
force_masking_even_with_zero_weight: false
separate_head: true
head_norm_last_layer: false
head_nlayers: 3
head_hidden_dim: 2048
student:
arch: convnext_large
patch_size: 16
drop_path_rate: 0.0
block_chunks: 4
optim:
epochs: 500
clip_grad: 3.0
layerwise_decay: 1.0
schedules:
lr:
start: 1e-6
peak: 1e-4
end: 1e-6
warmup_epochs: 80
freeze_last_layer_epochs: 1
weight_decay:
start: 0.04
end: 0.2
peak: 0.2
warmup_epochs: 500
teacher_temp:
start: 0.04
peak: 0.07
end: 0.07
warmup_epochs: 120
momentum:
start: 0.994
peak: 1.0
end: 1.0
warmup_epochs: 500
43 changes: 43 additions & 0 deletions dinov3/configs/train/distillation_convnext/convnext_small_p16.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
ibot:
loss_weight: 1.0
mask_sample_probability: 0.5
mask_ratio_min_max:
- 0.1
- 0.5
mask_random_circular_shift: false
force_masking_even_with_zero_weight: false
separate_head: true
head_norm_last_layer: false
head_nlayers: 3
head_hidden_dim: 2048
student:
arch: convnext_small
patch_size: 16
drop_path_rate: 0.0
block_chunks: 4
optim:
epochs: 500
clip_grad: 3.0
layerwise_decay: 1.0
schedules:
lr:
start: 1e-6
peak: 2e-4
end: 1e-6
warmup_epochs: 80
freeze_last_layer_epochs: 1
weight_decay:
start: 0.04
end: 0.2
peak: 0.2
warmup_epochs: 500
teacher_temp:
start: 0.04
peak: 0.07
end: 0.07
warmup_epochs: 120
momentum:
start: 0.994
peak: 1.0
end: 1.0
warmup_epochs: 500
43 changes: 43 additions & 0 deletions dinov3/configs/train/distillation_convnext/convnext_tiny_p16.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
ibot:
loss_weight: 1.0
mask_sample_probability: 0.5
mask_ratio_min_max:
- 0.1
- 0.5
mask_random_circular_shift: false
force_masking_even_with_zero_weight: false
separate_head: true
head_norm_last_layer: false
head_nlayers: 3
head_hidden_dim: 2048
student:
arch: convnext_tiny
patch_size: 16
drop_path_rate: 0.0
block_chunks: 4
optim:
epochs: 500
clip_grad: 3.0
layerwise_decay: 1.0
schedules:
lr:
start: 1e-6
peak: 2e-4
end: 1e-6
warmup_epochs: 80
freeze_last_layer_epochs: 1
weight_decay:
start: 0.04
end: 0.2
peak: 0.2
warmup_epochs: 500
teacher_temp:
start: 0.04
peak: 0.07
end: 0.07
warmup_epochs: 120
momentum:
start: 0.994
peak: 1.0
end: 1.0
warmup_epochs: 500
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
MODEL:
META_ARCHITECTURE: MultiDistillationMetaArch
multidistillation:
enabled: true
global_batch_size: 32 # 4096 for 16 nodes
students:
- name: convnext_tiny
config_path: dinov3/configs/train/distillation_convnext/convnext_tiny_p16.yaml
ranks_range:
- 0
- 2
- name: convnext_small
config_path: dinov3/configs/train/distillation_convnext/convnext_small_p16.yaml
ranks_range:
- 2
- 4
- name: convnext_base
config_path: dinov3/configs/train/distillation_convnext/convnext_base_p16.yaml
ranks_range:
- 4
- 6
- name: convnext_large
config_path: dinov3/configs/train/distillation_convnext/convnext_large_p16.yaml
ranks_range:
- 6
- 8
distillation: # teacher
enabled: true
full_cfg_path: dinov3/configs/train/vitl_im1k_lin834.yaml
checkpoint_path: ignore
crops:
global_crops_size: 512
local_crops_size: 224
teacher_to_student_resolution_scale: 2.0
train:
dataset_path: ImageNet:split=TRAIN
cache_dataset: false
centering: "sinkhorn_knopp"
compile: true
ibot:
separate_head: true
Loading
Loading