# Load model directly !pip install transformers==4.30.0 from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("sohamslc5/llama_2_7B_sohamslc5") model = AutoModelForCausalLM.from_pretrained("sohamslc5/llama_2_7B_sohamslc5") print(model)