Skip to content

Releases: chufangao/TTM-RE

v0.2

06 Oct 01:41
Compare
Choose a tag to compare
v0.2 Pre-release
Pre-release

place in out/train_revised_roberta_ttmre_docred_S-PU_isrank_1_m_1.0_e_1.0_seed_74

v0.1

17 Jun 07:09
1e640c8
Compare
Choose a tag to compare
v0.1 Pre-release
Pre-release
#!/bin/sh

python train2.py --data_dir ./dataset/docred \
    --transformer_type roberta \
    --model_name_or_path roberta-large \
    --train_file train_revised.json \
    --dev_file dev_revised.json \
    --test_file test_revised.json \
    --train_batch_size 4 \
    --test_batch_size 4 \
    --gradient_accumulation_steps 1 \
    --learning_rate 3e-5 \
    --max_grad_norm 1.0 \
    --warmup_ratio 0.06 \
    --num_train_epochs 30.0 \
    --seed 74 \
    --num_class 97 \
    --isrank 1 \
    --m_tag S-PU \
    --model_type mse_dist3 \
    --m 1.0 \
    --e 1.0 \
    --pretrain_distant 2\
    --num_layers 4

# finetune {'test_F1': 83.6041966673524, 'test_F1_ign': 82.67186522190119, 're_p': 85.79528318957718, 're_r': 81.52223750573133}