Skip to content

Commit

Permalink
Attempt to fix xlm roberta test w/ pretrained hf weight difference
Browse files Browse the repository at this point in the history
  • Loading branch information
rwightman committed Oct 12, 2023
1 parent 2c396d2 commit e14f34b
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/open_clip/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -240,9 +240,9 @@ def create_model(
# cast_dtype set for fp16 and bf16 (manual mixed-precision), not set for 'amp' or 'pure' modes
cast_dtype = get_cast_dtype(precision)
is_hf_model = 'hf_model_name' in model_cfg.get('text_cfg', {})
if is_hf_model and not pretrained and pretrained_hf:
if is_hf_model:
# load pretrained weights for HF text model IFF no CLIP weights being loaded
model_cfg['text_cfg']['hf_model_pretrained'] = True
model_cfg['text_cfg']['hf_model_pretrained'] = pretrained_hf and not pretrained
custom_text = model_cfg.pop('custom_text', False) or force_custom_text or is_hf_model

if custom_text:
Expand Down

0 comments on commit e14f34b

Please sign in to comment.