Merge pull request #326 from TheBloke/TB_Latest_Falcon
Add support for Falcon as part of Transformers 4.33.0, including new Falcon 180B
This commit is contained in:
commit
06e071e68e
2 changed files with 3 additions and 0 deletions
|
@ -24,6 +24,8 @@ SUPPORTED_MODELS = [
|
|||
]
|
||||
if compare_transformers_version("v4.28.0", op="ge"):
|
||||
SUPPORTED_MODELS.append("llama")
|
||||
if compare_transformers_version("v4.33.0", op="ge"):
|
||||
SUPPORTED_MODELS.append("falcon")
|
||||
|
||||
EXLLAMA_DEFAULT_MAX_INPUT_LENGTH = 2048
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ GPTQ_CAUSAL_LM_MODEL_MAP = {
|
|||
"codegen": CodeGenGPTQForCausalLM,
|
||||
"RefinedWebModel": RWGPTQForCausalLM,
|
||||
"RefinedWeb": RWGPTQForCausalLM,
|
||||
"falcon": RWGPTQForCausalLM,
|
||||
"baichuan": BaiChuanGPTQForCausalLM,
|
||||
"internlm": InternLMGPTQForCausalLM,
|
||||
"qwen": QwenGPTQForCausalLM,
|
||||
|
|
Loading…
Add table
Reference in a new issue