This commit is contained in:
oobabooga 2025-04-26 19:29:08 -07:00
parent 363b632a0d
commit 70952553c7
2 changed files with 10 additions and 10 deletions

View file

@ -3,7 +3,6 @@ import traceback
from pathlib import Path from pathlib import Path
import torch import torch
from exllamav2 import ( from exllamav2 import (
ExLlamaV2, ExLlamaV2,
ExLlamaV2Cache, ExLlamaV2Cache,
@ -16,6 +15,7 @@ from exllamav2 import (
ExLlamaV2Tokenizer ExLlamaV2Tokenizer
) )
from exllamav2.generator import ExLlamaV2Sampler, ExLlamaV2StreamingGenerator from exllamav2.generator import ExLlamaV2Sampler, ExLlamaV2StreamingGenerator
from modules import shared from modules import shared
from modules.logging_colors import logger from modules.logging_colors import logger
from modules.text_generation import get_max_prompt_length from modules.text_generation import get_max_prompt_length

View file

@ -4,15 +4,6 @@ from pathlib import Path
from typing import Any, Dict, Optional, Union from typing import Any, Dict, Optional, Union
import torch import torch
from torch.nn import CrossEntropyLoss
from transformers import (
GenerationConfig,
GenerationMixin,
PretrainedConfig,
PreTrainedModel
)
from transformers.modeling_outputs import CausalLMOutputWithPast
from exllamav2 import ( from exllamav2 import (
ExLlamaV2, ExLlamaV2,
ExLlamaV2Cache, ExLlamaV2Cache,
@ -23,6 +14,15 @@ from exllamav2 import (
ExLlamaV2Cache_TP, ExLlamaV2Cache_TP,
ExLlamaV2Config ExLlamaV2Config
) )
from torch.nn import CrossEntropyLoss
from transformers import (
GenerationConfig,
GenerationMixin,
PretrainedConfig,
PreTrainedModel
)
from transformers.modeling_outputs import CausalLMOutputWithPast
from modules import shared from modules import shared
from modules.logging_colors import logger from modules.logging_colors import logger