this is a AI chat script that I've been working on which uses LLM Studio locally on your computer for free. It connects to LLM studio where it uses the Dataset. It will use any dataset you want just make sure that LLM Studio API is turned on and listening to port 5000.
import requests
# Set up the local LLM Studio API endpoint (modify based on your setup)
LLM_STUDIO_API_URL = "http://localhost:5000/chat" # Example URL, modify as needed
def chat_with_llm(prompt):
response = requests.post(LLM_STUDIO_API_URL, json={"input": prompt})
if response.status_code == 200:
return response.json().get("output", "").strip()
else:
return "Error: Failed to connect to LLM Studio."
if __name__ == "__main__":
while True:
human_input = input("Human: ")
if human_input.lower() in ["quit", "exit", "bye"]:
break
response = chat_with_llm(human_input)
print("Chatbot:", response)
import requests
# Set up the local LLM Studio API endpoint (modify based on your setup)
LLM_STUDIO_API_URL = "http://localhost:5000/chat" # Example URL, modify as needed
def chat_with_llm(prompt):
response = requests.post(LLM_STUDIO_API_URL, json={"input": prompt})
if response.status_code == 200:
return response.json().get("output", "").strip()
else:
return "Error: Failed to connect to LLM Studio."
if __name__ == "__main__":
while True:
human_input = input("Human: ")
if human_input.lower() in ["quit", "exit", "bye"]:
break
response = chat_with_llm(human_input)
print("Chatbot:", response)