Based on discussions on https://huggingface.co/codellama/CodeLlama-7b-Instruct-hf/discussions/10
- pip install git+https://github.com/huggingface/transformers.git@main
- pip install tokenizer transformers
# Use a pipeline as a high-level helper
from transformers import pipeline
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoConfig