dacorvo's picture
dacorvo HF Staff
add trn2 cached configs subdirectory
25e9ebd
{
"meta-llama/Llama-3.1-8B": [
{
"batch_size": 1,
"sequence_length": 4096,
"tensor_parallel_size": 4,
"instance_type" : "trn2"
},
{
"batch_size": 8,
"sequence_length": 4096,
"tensor_parallel_size" : 4,
"instance_type" : "trn2"
},
{
"batch_size": 32,
"sequence_length": 4096,
"tensor_parallel_size": 4,
"instance_type" : "trn2"
},
{
"batch_size": 64,
"sequence_length": 4096,
"tensor_parallel_size": 4,
"instance_type" : "trn2"
}
],
"meta-llama/Llama-3.2-1B": [
{
"batch_size": 1,
"sequence_length": 4096,
"tensor_parallel_size" : 4,
"instance_type" : "trn2"
},
{
"batch_size": 4,
"sequence_length": 4096,
"tensor_parallel_size" : 4,
"instance_type" : "trn2"
},
{
"batch_size": 64,
"sequence_length": 4096,
"tensor_parallel_size" : 4,
"instance_type" : "trn2"
}
],
"meta-llama/Llama-3.2-3B": [
{
"batch_size": 1,
"sequence_length": 4096,
"tensor_parallel_size" : 4,
"instance_type" : "trn2"
},
{
"batch_size": 64,
"sequence_length": 4096,
"tensor_parallel_size" : 4,
"instance_type" : "trn2"
}
],
"TinyLlama/TinyLlama-1.1B-Chat-v1.0": [
{
"batch_size": 1,
"sequence_length": 2048,
"tensor_parallel_size" : 4,
"instance_type" : "trn2"
}
],
"meta-llama/Llama-3.3-70B-Instruct": [
{
"batch_size": 32,
"sequence_length": 4096,
"tensor_parallel_size": 32,
"instance_type" : "trn2"
},
{
"batch_size": 1,
"sequence_length": 4096,
"tensor_parallel_size": 64,
"instance_type" : "trn2"
},
{
"batch_size": 32,
"sequence_length": 4096,
"tensor_parallel_size": 64,
"instance_type" : "trn2"
},
{
"batch_size": 64,
"sequence_length": 4096,
"tensor_parallel_size": 64,
"instance_type" : "trn2"
}
]
}