Skip to content

Commit

Permalink
[Refactor] Use small bloom model in model partitioning's test
Browse files Browse the repository at this point in the history
  • Loading branch information
xrsrke committed Nov 28, 2023
1 parent 369263a commit 1dbcf4e
Showing 1 changed file with 5 additions and 9 deletions.
14 changes: 5 additions & 9 deletions tests/nn/pipeline_parallel/test_partitioner.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,21 @@
import pytest
import torch
from transformers import (
AutoModelForCausalLM,
AutoTokenizer,
BloomConfig,
BloomForCausalLM,
GPT2Config,
GPT2LMHeadModel,
)

from pipegoose.nn.pipeline_parallel.partitioner import UniformPartitioner
from pipegoose.testing.utils import init_parallel_context, spawn


def get_gpt2_and_tokenizer():
return AutoModelForCausalLM.from_pretrained("gpt2"), AutoTokenizer.from_pretrained("gpt2")


def get_bloom_560m_and_tokenizer():
return AutoModelForCausalLM.from_pretrained("bigscience/bloom-560m"), AutoTokenizer.from_pretrained(
"bigscience/bloom-560m"
)
model = GPT2LMHeadModel(config=GPT2Config(n_layer=6))
tokenizer = AutoTokenizer.from_pretrained("gpt2")
return model, tokenizer


def get_bloom_and_tokenizer_with_6_layers():
Expand Down Expand Up @@ -87,7 +84,6 @@ def run_model_partitioner(
[
get_gpt2_and_tokenizer,
get_bloom_and_tokenizer_with_6_layers,
get_bloom_560m_and_tokenizer,
],
)
def test_naive_partitioning(pipeline_parallel_size, model_retrieval_func):
Expand Down

0 comments on commit 1dbcf4e

Please sign in to comment.