Romain Lembo commited on
Commit
9a6bf7a
·
1 Parent(s): 1de2e9a

Refactor HuggingFace model initialization to directly retrieve API token from environment variables

Browse files
Files changed (1) hide show
  1. agent.py +1 -3
agent.py CHANGED
@@ -61,8 +61,6 @@ def build_graph(provider: str = "google"):
61
  # Groq https://console.groq.com/docs/models
62
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
63
  elif provider == "huggingface":
64
- # Get HuggingFace API token from environment variables
65
- hf_token = os.environ.get("HF_TOKEN")
66
  llm = ChatHuggingFace(
67
  llm=HuggingFaceEndpoint(
68
  repo_id="mistralai/Mistral-7B-Instruct-v0.2",
@@ -72,7 +70,7 @@ def build_graph(provider: str = "google"):
72
  # "google/gemma-7b-it"
73
  # "mosaicml/mpt-7b-instruct"
74
  # "tiiuae/falcon-7b-instruct"
75
- token=hf_token,
76
  ),
77
  )
78
  else:
 
61
  # Groq https://console.groq.com/docs/models
62
  llm = ChatGroq(model="qwen-qwq-32b", temperature=0) # optional : qwen-qwq-32b gemma2-9b-it
63
  elif provider == "huggingface":
 
 
64
  llm = ChatHuggingFace(
65
  llm=HuggingFaceEndpoint(
66
  repo_id="mistralai/Mistral-7B-Instruct-v0.2",
 
70
  # "google/gemma-7b-it"
71
  # "mosaicml/mpt-7b-instruct"
72
  # "tiiuae/falcon-7b-instruct"
73
+ token=os.environ.get("HF_TOKEN"),
74
  ),
75
  )
76
  else: