Skip to content

Commit e438f94

Browse files
authored
disable rwkv5 fp16 (#10699)
1 parent 6a32216 commit e438f94

File tree

1 file changed

+3
-0
lines changed
  • python/llm/src/ipex_llm/transformers/models

1 file changed

+3
-0
lines changed

python/llm/src/ipex_llm/transformers/models/rwkv5.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
import torch.nn.functional as F
3737

3838
from typing import List, Optional
39+
from ipex_llm.utils.common.log4Error import invalidInputError
3940

4041

4142
def extract_key_value(self, hidden, state=None):
@@ -265,6 +266,8 @@ def rwkv_model_forward(
265266
output_hidden_states: Optional[bool] = None,
266267
return_dict: Optional[bool] = None,
267268
):
269+
invalidInputError(self.embeddings.weight.dtype == torch.float,
270+
"Only fp32 is supported for now, fp16 and bf16 are not supported")
268271
use_cache = use_cache if use_cache is not None else self.config.use_cache
269272
# change `state` layout and put `num_hidden_layers` to the highest dim
270273
if input_ids.device.type == "xpu" and use_cache and state is None:

0 commit comments

Comments
 (0)