from fastapi import FastAPI #!/usr/bin/env python """Example LangChain server exposes a retriever.""" from fastapi import FastAPI from langchain_community.vectorstores import FAISS from langchain_community.embeddings import OllamaEmbeddings from langserve import add_routes import os from dotenv import load_dotenv from langflow.load import load_flow_from_json chain_flow = load_flow_from_json("Memory Chatbot.json") load_dotenv() HF_API_KEY = os.getenv('HF_API_KEY') vectorstore = FAISS.from_texts( ["cats like fish", "dogs like sticks"], embedding=OllamaEmbeddings( model="nomic-embed-text", base_url="https://lintasmediadanawa-hf-llm-api.hf.space", headers={"Authorization":"Bearer "+HF_API_KEY} ) ) retriever = vectorstore.as_retriever() app = FastAPI( title="LangChain Server", version="1.0", description="Spin up a simple api server using Langchain's Runnable interfaces", ) # Adds routes to the app for using the retriever under: # /invoke # /batch # /stream add_routes(app, retriever, path="/retriever") add_routes(app, chain_flow, path="/chat") if __name__ == "__main__": import uvicorn uvicorn.run(app, host="localhost", port=8000)