Skip to content

Commit

Permalink
Add a test that running contrastive loss doesn't kill the training
Browse files Browse the repository at this point in the history
  • Loading branch information
AngledLuffa committed Dec 17, 2024
1 parent 276f15c commit 7c40b0f
Showing 1 changed file with 14 additions and 0 deletions.
14 changes: 14 additions & 0 deletions stanza/tests/constituency/test_trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,6 +277,20 @@ def test_early_dropout(self, wordvec_pretrain_file):
if all(module.p == 0.0 for _, module in dropouts):
raise AssertionError("All dropouts were 0 after training even though early_dropout was set to -1")

def test_contrastive(self, wordvec_pretrain_file):
"""
Test that things don't blow up when a contrastive loss is used for a few iterations
"""
with tempfile.TemporaryDirectory(dir=TEST_WORKING_DIR) as tmpdirname:
args = ['--contrastive_learning_rate', '0.1']
self.run_train_test(wordvec_pretrain_file, tmpdirname, num_epochs=6, extra_args=args)

with tempfile.TemporaryDirectory(dir=TEST_WORKING_DIR) as tmpdirname:
# TODO: get some kind of loss record back from the training process
# so that we can check it is being properly applied?
args = ['--contrastive_learning_rate', '0.1', '--contrastive_initial_epoch', '3']
self.run_train_test(wordvec_pretrain_file, tmpdirname, num_epochs=6, extra_args=args)

def test_train_silver(self, wordvec_pretrain_file):
"""
Test the whole thing for a few iterations on the fake data
Expand Down

0 comments on commit 7c40b0f

Please sign in to comment.