|
情况描述环境:linuxtransformers4.39.0tokenizers0.15.2torch2.1.2+cu121flash-attn2.3.312345在使用vllm运行xverse/XVERSE-13B-256K时(代码如下):qwen_model=AutoModelForSequenceClassification.from_pretrained(args.pre_train,trust_remote_code=True,attn_implementation="flash_attention_2",torch_dtype=torch.bfloat16,device_map="auto",#balanced_low_0num_labels=5)12345678报错如下Traceback(mostrecentcalllast):File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/utils/import_utils.py",line1364,in_get_modulereturnimportlib.import_module("."+module_name,self.__name__)File"/data/miniconda3/envs/xxx/lib/python3.10/importlib/__init__.py",line126,inimport_modulereturn_bootstrap._gcd_import(name[level:],package,level)File"",line1050,in_gcd_importFile"",line1027,in_find_and_loadFile"",line1006,in_find_and_load_unlockedFile"",line688,in_load_unlockedFile"",line883,inexec_moduleFile"",line241,in_call_with_frames_removedFile"/usr/local/app/.local/lib/python3.10/site-packages/transformers/models/qwen2/modeling_qwen2.py",line49,infromflash_attnimportflash_attn_func,flash_attn_varlen_funcFile"/usr/local/app/.local/lib/python3.10/site-packages/flash_attn/__init__.py",line3,infromflash_attn.flash_attn_interfaceimport(File"/usr/local/app/.local/lib/python3.10/site-packages/flash_attn/flash_attn_interface.py",line10,inimportflash_attn_2_cudaasflash_attn_cudaImportError:/usr/local/app/.local/lib/python3.10/site-packages/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so:undefinedsymbol:_ZN3c104cuda9SetDeviceEiTheaboveexceptionwasthedirectcauseofthefollowingexception:Traceback(mostrecentcalllast):File"/cfs/xxx/xxx/long-context/xxx/train.py",line434,inqwen_model=AutoModelForCausalLM.from_pretrained(File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py",line565,infrom_pretrainedmodel_class=_get_model_class(config,cls._model_mapping)File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py",line387,in_get_model_classsupported_models=model_mapping[type(config)]File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py",line740,in__getitem__returnself._load_attr_from_module(model_type,model_name)File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py",line754,in_load_attr_from_modulereturngetattribute_from_module(self._modules[module_name],attr)File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/models/auto/auto_factory.py",line698,ingetattribute_from_moduleifhasattr(module,attr):File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/utils/import_utils.py",line1354,in__getattr__module=self._get_module(self._class_to_module[name])File"/usr/local/app/.local/lib/python3.10/site-packages/transformers/utils/import_utils.py",line1366,in_get_moduleraiseRuntimeError(RuntimeError:Failedtoimporttransformers.models.qwen2.modeling_qwen2becauseofthefollowingerror(lookuptoseeitstraceback):/usr/local/app/.local/lib/python3.10/site-packages/flash_attn_2_cuda.cpython-310-x86_64-linux-gnu.so:undefinedsymbol:_ZN3c104cuda9SetDeviceEi12345678910111213141516171819202122232425262728293031323334353637383940解决pipinstallflash-attn==2.5.9.post11
|
|