-
Notifications
You must be signed in to change notification settings - Fork 1.1k
/
metafile.yml
51 lines (50 loc) · 1.79 KB
/
metafile.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
Collections:
- Name: MixMIM
Metadata:
Architecture:
- Attention Dropout
- Convolution
- Dense Connections
- Dropout
- GELU
- Layer Normalization
- Multi-Head Attention
- Scaled Dot-Product Attention
- Tanh Activation
Paper:
Title: 'MixMIM: Mixed and Masked Image Modeling for Efficient Visual Representation
Learning'
URL: https://arxiv.org/abs/2205.13137
README: configs/mixmim/README.md
Code:
URL: https://github.com/open-mmlab/mmpretrain/blob/main/mmpretrain/models/backbones/mixmim.py
Version: v1.0.0rc4
Models:
- Name: mixmim_mixmim-base_16xb128-coslr-300e_in1k
Metadata:
Epochs: 300
Batch Size: 2048
FLOPs: 16351906816
Parameters: 114665784
Training Data: ImageNet-1k
In Collection: MixMIM
Results: null
Weights: https://download.openmmlab.com/mmselfsup/1.x/mixmim/mixmim-base-p16_16xb128-coslr-300e_in1k/mixmim-base-p16_16xb128-coslr-300e_in1k_20221208-44fe8d2c.pth
Config: configs/mixmim/mixmim_mixmim-base_16xb128-coslr-300e_in1k.py
Downstream:
- mixmim-base_mixmim-pre_8xb128-coslr-100e_in1k
- Name: mixmim-base_mixmim-pre_8xb128-coslr-100e_in1k
Metadata:
Epochs: 100
Batch Size: 1024
FLOPs: 16351906816
Parameters: 88344352
Training Data: ImageNet-1k
In Collection: MixMIM
Results:
- Task: Image Classification
Dataset: ImageNet-1k
Metrics:
Top 1 Accuracy: 84.63
Weights: https://download.openmmlab.com/mmselfsup/1.x/mixmim/mixmim-base-p16_16xb128-coslr-300e_in1k/mixmim-base-p16_ft-8xb128-coslr-100e_in1k/mixmim-base-p16_ft-8xb128-coslr-100e_in1k_20221208-41ecada9.pth
Config: configs/mixmim/benchmarks/mixmim-base_8xb128-coslr-100e_in1k.py