sage/codellama/load_model.py
2025-07-16 14:28:41 +08:00

6 lines
302 B
Python

from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("codellama/CodeLlama-7b-hf")
tokenizer.save_pretrained('CodeLlama-7b-tokenizer')
model = AutoModelForCausalLM.from_pretrained("codellama/CodeLlama-7b-hf")
model.save_pretrained('CodeLlama-7b-model')