Spaces:
Runtime error
Runtime error
| """ | |
| This module loads the LLM from the local file system | |
| Modify this file if you need to download some other model from Hugging Face or OpenAI/ChatGPT | |
| """ | |
| # from langchain.llms import CTransformers | |
| # from langchain_openai import OpenAI | |
| from langchain_groq import ChatGroq | |
| from langchain_google_genai import ChatGoogleGenerativeAI | |
| from dotenv import load_dotenv | |
| import os | |
| model_name = 'gemma2-9b-it' | |
| def load_llm(model_name=model_name): | |
| # llm = ChatGroq( | |
| # temperature=0, | |
| # model=model_name, | |
| # ) | |
| llm = ChatGoogleGenerativeAI( | |
| model="gemini-1.5-flash", | |
| temperature=0, | |
| max_tokens=None, | |
| timeout=None, | |
| max_retries=2, | |
| ) | |
| return llm | |
| if __name__ == '__main__': | |
| load_dotenv() | |
| llm = load_llm() | |
| result = llm.invoke("Provide a short answer: What is machine learning?") | |
| print(result.content) | |