from transformers import AutoTokenizer, AutoModelForCausalLM
path = './gpt2/m_checkpoint-3364613'
tokenizer = AutoTokenizer.from_pretrained(path)
model = AutoModelForCausalLM.from_pretrained(path)

OSError: Model name './gpt2/m_checkpoint-3364613' was not found in tokenizers model name list (gpt2, gpt2-medium, gpt2-large, gpt2-xl, distilgpt2). We assumed './gpt2/m_checkpoint-3364613' was a path, a model identifier, or url to a directory containing vocabulary files named ['vocab.json', 'merges.txt'] but couldn't find such vocabulary files at this path or url.
OSError: Model name './gpt2/m_checkpoint-3364613' was not found in tokenizers model name list (gpt2, gpt2-medium, gpt2-large, gpt2-xl, distilgpt2). We assumed './gpt2/m_checkpoint-3364613' was a path, a model identifier, or url to a directory containing vocabulary files named ['vocab.json', 'merges.txt'] but couldn't find such vocabulary files at this path or url.