add
This commit is contained in:
13
models/LLaVA/build/lib/llava/train/train_mem.py
Normal file
13
models/LLaVA/build/lib/llava/train/train_mem.py
Normal file
@@ -0,0 +1,13 @@
|
||||
# Adopted from https://github.com/lm-sys/FastChat. Below is the original copyright:
|
||||
# Adopted from tatsu-lab@stanford_alpaca. Below is the original copyright:
|
||||
# Make it more memory efficient by monkey patching the LLaMA model with FlashAttn.
|
||||
|
||||
# Need to call this before importing transformers.
|
||||
from llava.train.llama_flash_attn_monkey_patch import replace_llama_attn_with_flash_attn
|
||||
|
||||
replace_llama_attn_with_flash_attn()
|
||||
|
||||
from llava.train.train import train
|
||||
|
||||
if __name__ == "__main__":
|
||||
train()
|
Reference in New Issue
Block a user