Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -110,7 +110,6 @@
|
|
| 110 |
# iface.launch()
|
| 111 |
|
| 112 |
|
| 113 |
-
|
| 114 |
import torch
|
| 115 |
import pandas as pd
|
| 116 |
from sentence_transformers import SentenceTransformer, util
|
|
@@ -122,19 +121,14 @@ df2 = pd.read_csv("cleaned2.csv")
|
|
| 122 |
embeddings = torch.load("embeddings1.pt")
|
| 123 |
embeddings2 = torch.load("embeddings2.pt")
|
| 124 |
|
| 125 |
-
def
|
| 126 |
-
|
| 127 |
-
if isinstance(query, list):
|
| 128 |
-
query = query[0] if query else ""
|
| 129 |
-
|
| 130 |
-
if not query or query.strip() == "":
|
| 131 |
return "No query provided"
|
| 132 |
|
| 133 |
-
query_embedding = model.encode(
|
| 134 |
top_idx = int(util.pytorch_cos_sim(query_embedding, embeddings)[0].argmax())
|
| 135 |
top_idx2 = int(util.pytorch_cos_sim(query_embedding, embeddings2)[0].argmax())
|
| 136 |
|
| 137 |
-
# Return formatted text (like your working first app)
|
| 138 |
result = f"""Question 1: {df.iloc[top_idx]["question"]}
|
| 139 |
Link 1: {df.iloc[top_idx]["link"]}
|
| 140 |
|
|
@@ -143,13 +137,12 @@ Link 2: {df2.iloc[top_idx2]["link"]}"""
|
|
| 143 |
|
| 144 |
return result
|
| 145 |
|
| 146 |
-
#
|
|
|
|
| 147 |
iface = gr.Interface(
|
| 148 |
-
fn=
|
| 149 |
inputs=[gr.Textbox(label="text", lines=3)],
|
| 150 |
-
outputs='text',
|
| 151 |
-
title=
|
| 152 |
)
|
| 153 |
-
|
| 154 |
-
# Enable API access for curl requests
|
| 155 |
-
iface.launch(share=False, show_api=True)
|
|
|
|
| 110 |
# iface.launch()
|
| 111 |
|
| 112 |
|
|
|
|
| 113 |
import torch
|
| 114 |
import pandas as pd
|
| 115 |
from sentence_transformers import SentenceTransformer, util
|
|
|
|
| 121 |
embeddings = torch.load("embeddings1.pt")
|
| 122 |
embeddings2 = torch.load("embeddings2.pt")
|
| 123 |
|
| 124 |
+
def predict(text):
|
| 125 |
+
if not text or text.strip() == "":
|
|
|
|
|
|
|
|
|
|
|
|
|
| 126 |
return "No query provided"
|
| 127 |
|
| 128 |
+
query_embedding = model.encode(text, convert_to_tensor=True)
|
| 129 |
top_idx = int(util.pytorch_cos_sim(query_embedding, embeddings)[0].argmax())
|
| 130 |
top_idx2 = int(util.pytorch_cos_sim(query_embedding, embeddings2)[0].argmax())
|
| 131 |
|
|
|
|
| 132 |
result = f"""Question 1: {df.iloc[top_idx]["question"]}
|
| 133 |
Link 1: {df.iloc[top_idx]["link"]}
|
| 134 |
|
|
|
|
| 137 |
|
| 138 |
return result
|
| 139 |
|
| 140 |
+
# Match the EXACT structure of your working translation app
|
| 141 |
+
title = "Search CSV"
|
| 142 |
iface = gr.Interface(
|
| 143 |
+
fn=predict, # Changed from search_fatwa to predict
|
| 144 |
inputs=[gr.Textbox(label="text", lines=3)],
|
| 145 |
+
outputs='text',
|
| 146 |
+
title=title,
|
| 147 |
)
|
| 148 |
+
iface.launch()
|
|
|
|
|
|