6 lines
302 B
Python
6 lines
302 B
Python
from transformers import AutoTokenizer, AutoModelForCausalLM
|
|
tokenizer = AutoTokenizer.from_pretrained("codellama/CodeLlama-7b-hf")
|
|
tokenizer.save_pretrained('CodeLlama-7b-tokenizer')
|
|
model = AutoModelForCausalLM.from_pretrained("codellama/CodeLlama-7b-hf")
|
|
model.save_pretrained('CodeLlama-7b-model')
|