From 8590e4bd651ee573cc52e80aca9400b4a46eab29 Mon Sep 17 00:00:00 2001 From: Nils Blach Date: Wed, 18 Oct 2023 10:34:27 +0900 Subject: [PATCH] Fix importing transformers library before setting of env variables --- graph_of_thoughts/controller/llamachat_hf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/graph_of_thoughts/controller/llamachat_hf.py b/graph_of_thoughts/controller/llamachat_hf.py index 0065cf8..d423a50 100644 --- a/graph_of_thoughts/controller/llamachat_hf.py +++ b/graph_of_thoughts/controller/llamachat_hf.py @@ -8,7 +8,6 @@ import os import torch -import transformers from typing import List, Dict, Union from .abstract_language_model import AbstractLanguageModel @@ -48,6 +47,7 @@ class Llama2HF(AbstractLanguageModel): # Important: must be done before importing transformers os.environ["TRANSFORMERS_CACHE"] = self.config["cache_dir"] + import transformers hf_model_id = f"meta-llama/{self.model_id}" model_config = transformers.AutoConfig.from_pretrained(hf_model_id)