Skip to content

Commit c5c90a8

Browse files
committed
Clean up imports
1 parent 27bccf0 commit c5c90a8

File tree

10 files changed

+3
-15
lines changed

10 files changed

+3
-15
lines changed

conversion/adaptivegptq.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,6 @@
22
from torch import nn
33
import torch.nn.functional as F
44
import math
5-
import exllamav2.ext
6-
from exllamav2 import ext
75
from exllamav2.ext import exllamav2_ext as ext_c, none_tensor
86

97

conversion/quantize.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from conversion.adaptivegptq import AdaptiveGPTQ
66
import torch
77
from torch import nn
8-
import os, sys, time, math, json
8+
import os, time, math, json
99
import torch.nn.functional as F
1010
import gc
1111

examples/streaming.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
import sys, os
33
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
44

5-
from exllamav2 import(
5+
from exllamav2 import (
66
ExLlamaV2,
77
ExLlamaV2Config,
88
ExLlamaV2Cache,

exllamav2/attn.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,6 @@
88
import math
99
from exllamav2 import ext
1010
from exllamav2.ext import exllamav2_ext as ext_c, none_tensor
11-
import gc
12-
import sys
1311
# from flash_attn import flash_attn_func
1412
# import xformers.ops as xops
1513
# from exllamav2.util import list_live_tensors, set_snapshot, diff_snapshot, print_vram_usage_peak

exllamav2/config.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import torch
22
from safetensors import safe_open
3-
import sys, os, glob, json
3+
import os, glob, json
44

55
class ExLlamaV2Config:
66

exllamav2/linear.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import torch
22
from exllamav2.module import ExLlamaV2Module
33
from torch import nn
4-
import math
54
from exllamav2 import ext
65
from exllamav2.ext import exllamav2_ext as ext_c, none_tensor
76
from safetensors import safe_open

exllamav2/mlp.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,8 @@
11
import torch
2-
from torch import nn
32
import torch.nn.functional as F
43
from exllamav2.module import ExLlamaV2Module
54
from exllamav2.rmsnorm import ExLlamaV2RMSNorm
65
from exllamav2.linear import ExLlamaV2Linear
7-
from exllamav2 import ext
86
from exllamav2.ext import exllamav2_ext as ext_c, none_tensor
97

108
class ExLlamaV2MLP(ExLlamaV2Module):

exllamav2/model.py

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,6 @@
66
print("")
77

88
import torch
9-
from torch import nn
10-
import torch.nn.functional as F
11-
from safetensors import safe_open
129
import math
1310
from exllamav2.config import ExLlamaV2Config
1411
from exllamav2.cache import ExLlamaV2Cache

exllamav2/model_init.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from exllamav2 import(
55
ExLlamaV2,
66
ExLlamaV2Config,
7-
ExLlamaV2Cache,
87
ExLlamaV2Tokenizer
98
)
109

exllamav2/rmsnorm.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import torch
22
from torch import nn
33
from exllamav2.module import ExLlamaV2Module
4-
from exllamav2 import ext
54
from exllamav2.ext import exllamav2_ext as ext_c, none_tensor
65

76
class ExLlamaV2RMSNorm(ExLlamaV2Module):

0 commit comments

Comments
 (0)