AutoGPTQ/auto_gptq/modeling/_const.py
2023-08-10 15:27:11 +08:00

23 lines
353 B
Python

from torch import device
CPU = device("cpu")
CUDA_0 = device("cuda:0")
SUPPORTED_MODELS = [
"bloom",
"gptj",
"gpt2",
"gpt_neox",
"opt",
"moss",
"gpt_bigcode",
"codegen",
"RefinedWebModel",
"RefinedWeb",
"baichuan",
"internlm",
"llama",
"qwen",
]
__all__ = ["CPU", "CUDA_0", "SUPPORTED_MODELS"]