I am facing an issue resource not found error.
This is what I have tried:
- Checked the version, azure_openai_api_key, modelname, version and
everything is correct. - The model was deployed yesterday so more than
5 minutes have passed. - I already have an embeddings model in Azure
openAI and embeddings are created and stored in local.
I have cross checked the logs in Azure and the console says that the requests are reached.
I am not sure why I am getting this error even though everything seems to be correct and in line with Langchain and Azure OpenAI documentation.
import os
import dill # Import dill instead of pickle
import streamlit as st
from dotenv import load_dotenv
from langchain_community.document_loaders import PyPDFLoader
from langchain_community.vectorstores import FAISS
from azure.storage.blob import BlobServiceClient
from langchain_openai import AzureOpenAIEmbeddings
from langchain_openai import AzureOpenAI
from langchain.chains import RetrievalQA
from langchain.prompts import PromptTemplate
load_dotenv()
//ex: entries in .env
AZURE_OPENAI_ENDPOINT="https://jkazureopenaiservice.openai.azure.com/"
AZURE_OPENAI_API_KEY = "key"
DEPLOYMENT_NAME = "gpt-4o" # The name of your model deployment in Azure
AZURE_OPENAI_API_VERSION = "2024-05-13"
AZURE_OPENAI_DEPLOYMENT = "gpt-4o"
# Azure OpenAI configuration
AZURE_OPENAI_API_BASE = os.getenv("AZURE_OPENAI_API_BASE")
AZURE_OPENAI_API_KEY = os.getenv("AZURE_OPENAI_API_KEY")
AZURE_OPENAI_API_VERSION = "2024-05-13"
DEPLOYMENT_NAME = os.getenv("DEPLOYMENT_NAME")
# Path to the folder containing your PDFs
pdf_folder_path = './localKnowledgeHub'
# Initialize a list to hold all documents
all_documents = []
# Load each PDF file
for filename in os.listdir(pdf_folder_path):
if filename.endswith('.pdf'):
file_path = os.path.join(pdf_folder_path, filename)
loader = PyPDFLoader(file_path)
documents = loader.load()
all_documents.extend(documents)
# Initialize the embeddings model
embeddings = AzureOpenAIEmbeddings(
model="text-embedding-3-large"
)
# Create the FAISS vector store
#vector_store = FAISS.from_documents(all_documents, embeddings)
#vector_store.save_local("vectorstore")
# Load the vector store locally
vector_store = FAISS.load_local("vectorstore", embeddings, allow_dangerous_deserialization=True)
# Initialize the Azure OpenAI language model
llm = AzureOpenAI(
api_key=AZURE_OPENAI_API_KEY,
api_version=AZURE_OPENAI_API_VERSION,
deployment_name=DEPLOYMENT_NAME,
temperature=70
)
# Define the prompt template
PROMPT_TEMPLATE = """You are an AI Assistant. Given the following context:
{context}
Answer the following question:
{question}
Assistant:"""
PROMPT = PromptTemplate(
template=PROMPT_TEMPLATE, input_variables=["context", "question"]
)
# Create a retrieval QA chain using LangChain with the custom prompt
qa_chain = RetrievalQA.from_chain_type(
llm=llm,
chain_type="stuff", # 'stuff' means directly use documents retrieved
retriever=vector_store.as_retriever(),
chain_type_kwargs={"prompt": PROMPT}
)
# Set up the Streamlit UI
st.title("Chatbot")
st.write("Ask me anything a.")
# Input box for user questions
user_question = st.text_input("Question:")
# When the user submits a question
if user_question:
# Get the answer from the QA chain using the invoke method
try:
result = qa_chain.invoke({"query": user_question})
answer = result['output_text']
except Exception as e:
answer = f"An error occurred: {str(e)}"
# Display the answer
st.write("Answer:", answer)
2
Answers
According to this document the latest preview and GS versions are
2024-07-01-preview
and2024-06-01
.And whatever you given the api version is not supported, so you are getting no resource found error.
Try any one of the above api version.
I tried you code with sample pdf file give one of the above api version.
Output:
You can also find sample code in your ai studio chat playground, where you will the api version supported.
and the code.
It is also recommended to use
azure_ad_token_provider
instead of using api key.Instead of
AzureOpenAI
, try usingAzureChatOpenAI
.Details here: https://python.langchain.com/v0.2/docs/integrations/chat/azure_chat_openai/