Spaces:
Sleeping
Sleeping
Commit
·
c2e8954
1
Parent(s):
8e003ad
Update Gradio. Fix multiple file upload
Browse files- .gitignore +2 -2
- README.md +1 -1
- app.py +18 -18
.gitignore
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
-
chroma_db/*
|
| 2 |
__pycache__/*
|
| 3 |
-
.venv
|
|
|
|
|
|
|
|
|
| 1 |
__pycache__/*
|
| 2 |
+
.venv
|
| 3 |
+
.env
|
README.md
CHANGED
|
@@ -4,7 +4,7 @@ emoji: 👁
|
|
| 4 |
colorFrom: red
|
| 5 |
colorTo: purple
|
| 6 |
sdk: gradio
|
| 7 |
-
sdk_version:
|
| 8 |
app_file: app.py
|
| 9 |
pinned: false
|
| 10 |
license: mit
|
|
|
|
| 4 |
colorFrom: red
|
| 5 |
colorTo: purple
|
| 6 |
sdk: gradio
|
| 7 |
+
sdk_version: 5.13.1
|
| 8 |
app_file: app.py
|
| 9 |
pinned: false
|
| 10 |
license: mit
|
app.py
CHANGED
|
@@ -20,7 +20,7 @@ from llama_index.vector_stores.chroma import ChromaVectorStore
|
|
| 20 |
|
| 21 |
load_dotenv()
|
| 22 |
|
| 23 |
-
title = "Gaia Mistral
|
| 24 |
description = "Example of an assistant with Gradio, RAG from PDF documents and Mistral AI via its API"
|
| 25 |
placeholder = (
|
| 26 |
"Vous pouvez me posez une question sur ce contexte, appuyer sur Entrée pour valider"
|
|
@@ -76,22 +76,24 @@ def empty_db():
|
|
| 76 |
return get_documents_in_db()
|
| 77 |
|
| 78 |
|
| 79 |
-
def load_file(
|
| 80 |
-
|
|
|
|
| 81 |
|
| 82 |
-
|
| 83 |
-
|
| 84 |
|
| 85 |
return (
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
|
| 91 |
|
| 92 |
-
def load_document(
|
| 93 |
-
|
| 94 |
-
|
|
|
|
| 95 |
|
| 96 |
|
| 97 |
with gr.Blocks() as demo:
|
|
@@ -113,7 +115,7 @@ with gr.Blocks() as demo:
|
|
| 113 |
input_file = gr.File(
|
| 114 |
label="Load a pdf",
|
| 115 |
file_types=[".pdf"],
|
| 116 |
-
file_count="
|
| 117 |
type="filepath",
|
| 118 |
interactive=True,
|
| 119 |
)
|
|
@@ -123,9 +125,7 @@ with gr.Blocks() as demo:
|
|
| 123 |
|
| 124 |
input_file.upload(
|
| 125 |
fn=load_document,
|
| 126 |
-
inputs=
|
| 127 |
-
input_file,
|
| 128 |
-
],
|
| 129 |
outputs=[file_msg],
|
| 130 |
concurrency_limit=20,
|
| 131 |
)
|
|
@@ -143,7 +143,7 @@ with gr.Blocks() as demo:
|
|
| 143 |
|
| 144 |
file_btn.click(
|
| 145 |
load_file,
|
| 146 |
-
inputs=
|
| 147 |
outputs=[file_msg, btn_msg, db_list],
|
| 148 |
show_progress="full",
|
| 149 |
)
|
|
@@ -164,4 +164,4 @@ with gr.Blocks() as demo:
|
|
| 164 |
|
| 165 |
demo.title = title
|
| 166 |
|
| 167 |
-
demo.launch()
|
|
|
|
| 20 |
|
| 21 |
load_dotenv()
|
| 22 |
|
| 23 |
+
title = "Gaia Mistral 8x22b Chat RAG PDF Demo"
|
| 24 |
description = "Example of an assistant with Gradio, RAG from PDF documents and Mistral AI via its API"
|
| 25 |
placeholder = (
|
| 26 |
"Vous pouvez me posez une question sur ce contexte, appuyer sur Entrée pour valider"
|
|
|
|
| 76 |
return get_documents_in_db()
|
| 77 |
|
| 78 |
|
| 79 |
+
def load_file(files):
|
| 80 |
+
for file in files:
|
| 81 |
+
documents = loader.load_data(file=file)
|
| 82 |
|
| 83 |
+
for doc in documents:
|
| 84 |
+
index.insert(doc)
|
| 85 |
|
| 86 |
return (
|
| 87 |
+
gr.Textbox(visible=False),
|
| 88 |
+
gr.Textbox(value=f"Document encoded ! You can ask questions", visible=True),
|
| 89 |
+
get_documents_in_db(),
|
| 90 |
+
)
|
| 91 |
|
| 92 |
|
| 93 |
+
def load_document(input_files):
|
| 94 |
+
for input_file in input_files:
|
| 95 |
+
file_name = input_file.name.split("/")[-1]
|
| 96 |
+
return gr.Textbox(value=f"Document loaded: {file_name}", visible=True)
|
| 97 |
|
| 98 |
|
| 99 |
with gr.Blocks() as demo:
|
|
|
|
| 115 |
input_file = gr.File(
|
| 116 |
label="Load a pdf",
|
| 117 |
file_types=[".pdf"],
|
| 118 |
+
file_count="multiple",
|
| 119 |
type="filepath",
|
| 120 |
interactive=True,
|
| 121 |
)
|
|
|
|
| 125 |
|
| 126 |
input_file.upload(
|
| 127 |
fn=load_document,
|
| 128 |
+
inputs=input_file,
|
|
|
|
|
|
|
| 129 |
outputs=[file_msg],
|
| 130 |
concurrency_limit=20,
|
| 131 |
)
|
|
|
|
| 143 |
|
| 144 |
file_btn.click(
|
| 145 |
load_file,
|
| 146 |
+
inputs=input_file,
|
| 147 |
outputs=[file_msg, btn_msg, db_list],
|
| 148 |
show_progress="full",
|
| 149 |
)
|
|
|
|
| 164 |
|
| 165 |
demo.title = title
|
| 166 |
|
| 167 |
+
demo.launch(share=True)
|