VibecoderMcSwaggins commited on
Commit
cd004e1
·
1 Parent(s): 3fcd8e7

feat: Complete Phase 9 Source Cleanup (Remove WebTool) (#14)

Browse files

* feat: complete Phase 9 source cleanup

- Removed WebTool (src/tools/websearch.py) and related tests
- Updated SearchHandler and models to support only PubMed
- Refactored all examples and tests to remove web search dependencies
- Added verification tests to ensure no WebTool references remain
- Fixed all unit tests to align with strict 'pubmed' source typing

* fix: address CodeRabbit review feedback

- Deduplicate tenacity dependency in pyproject.toml
- Remove 'and web' from search agent description
- Normalize Citation source to 'pubmed' in search agent
- Update app.py markdown to remove web search references
- Update embeddings demo with realistic PubMed URL/author
- Enhance SearchHandler tests to verify multi-tool aggregation

* refactor: replace mock data with real PubMed data in embeddings demo

* fix: address CodeRabbit review feedback

- Fix nested test_search_handler_pubmed_only (was inside another test, never ran)
- Improve test_no_webtool_references to catch `from src.tools import WebTool`
- Remove orphaned `# Search` comment in pyproject.toml

All 59 tests pass (was 58 - nested test now discovered and running)

examples/embeddings_demo/run_embeddings.py CHANGED
@@ -2,77 +2,19 @@
2
  """
3
  Demo: Semantic Search & Deduplication (Phase 6).
4
 
5
- This script demonstrates embedding-based capabilities:
6
- - Text embedding with sentence-transformers
7
- - Semantic similarity search via ChromaDB
8
- - Duplicate detection by meaning (not just URL)
9
 
10
  Usage:
11
  uv run python examples/embeddings_demo/run_embeddings.py
12
-
13
- No API keys required - uses local sentence-transformers model.
14
  """
15
 
16
  import asyncio
17
 
18
  from src.services.embeddings import EmbeddingService
19
- from src.utils.models import Citation, Evidence
20
-
21
-
22
- def create_sample_evidence() -> list[Evidence]:
23
- """Create sample evidence with some semantic duplicates."""
24
- return [
25
- Evidence(
26
- content="Metformin activates AMPK which inhibits mTOR signaling pathway.",
27
- citation=Citation(
28
- source="pubmed",
29
- title="Metformin and AMPK activation",
30
- url="https://pubmed.ncbi.nlm.nih.gov/11111/",
31
- date="2023",
32
- authors=["Smith J"],
33
- ),
34
- ),
35
- Evidence(
36
- content="The drug metformin works by turning on AMPK, blocking the mTOR pathway.",
37
- citation=Citation(
38
- source="pubmed",
39
- title="AMPK-mTOR axis in diabetes treatment",
40
- url="https://pubmed.ncbi.nlm.nih.gov/22222/",
41
- date="2022",
42
- authors=["Jones A"],
43
- ),
44
- ),
45
- Evidence(
46
- content="Sildenafil increases nitric oxide signaling for vasodilation.",
47
- citation=Citation(
48
- source="web",
49
- title="How Viagra Works",
50
- url="https://example.com/viagra-mechanism",
51
- date="2023",
52
- authors=["WebMD"],
53
- ),
54
- ),
55
- Evidence(
56
- content="Clinical trials show metformin reduces cancer incidence in diabetic patients.",
57
- citation=Citation(
58
- source="pubmed",
59
- title="Metformin and cancer prevention",
60
- url="https://pubmed.ncbi.nlm.nih.gov/33333/",
61
- date="2024",
62
- authors=["Lee K", "Park S"],
63
- ),
64
- ),
65
- Evidence(
66
- content="Metformin inhibits mTOR through AMPK activation mechanism.",
67
- citation=Citation(
68
- source="pubmed",
69
- title="mTOR inhibition by Metformin",
70
- url="https://pubmed.ncbi.nlm.nih.gov/44444/",
71
- date="2023",
72
- authors=["Brown M"],
73
- ),
74
- ),
75
- ]
76
 
77
 
78
  def create_fresh_service(name_suffix: str = "") -> EmbeddingService:
@@ -90,121 +32,73 @@ def create_fresh_service(name_suffix: str = "") -> EmbeddingService:
90
  return service
91
 
92
 
93
- async def demo_embedding() -> None:
94
- """Demo single text embedding."""
95
  print("\n" + "=" * 60)
96
- print("1. TEXT EMBEDDING DEMO")
97
  print("=" * 60)
98
 
99
- service = create_fresh_service("embed")
100
-
101
- texts = [
102
- "Metformin activates AMPK",
103
- "Aspirin reduces inflammation",
104
- "Metformin turns on the AMPK enzyme",
105
- ]
106
-
107
- print("\nEmbedding sample texts...")
108
- embeddings = await service.embed_batch(texts)
109
-
110
- for text, emb in zip(texts, embeddings, strict=False):
111
- print(f" '{text[:40]}...' -> [{emb[0]:.4f}, {emb[1]:.4f}, ... ] (dim={len(emb)})")
112
-
113
- # Calculate similarity between text 0 and text 2 (semantically similar)
114
- import numpy as np
115
-
116
- sim_0_2 = np.dot(embeddings[0], embeddings[2]) / (
117
- np.linalg.norm(embeddings[0]) * np.linalg.norm(embeddings[2])
118
- )
119
- sim_0_1 = np.dot(embeddings[0], embeddings[1]) / (
120
- np.linalg.norm(embeddings[0]) * np.linalg.norm(embeddings[1])
121
- )
122
-
123
- print(f"\nSimilarity (Metformin AMPK) vs (Metformin turns on AMPK): {sim_0_2:.3f}")
124
- print(f"Similarity (Metformin AMPK) vs (Aspirin inflammation): {sim_0_1:.3f}")
125
- print(" -> Semantically similar texts have higher cosine similarity!")
126
-
127
-
128
- async def demo_semantic_search() -> None:
129
- """Demo semantic similarity search."""
130
- print("\n" + "=" * 60)
131
- print("2. SEMANTIC SEARCH DEMO")
132
- print("=" * 60)
133
-
134
- service = create_fresh_service("search")
135
-
136
- # Add some documents to the vector store
137
- docs = [
138
- ("doc1", "Metformin activates AMPK enzyme in liver cells", {"source": "pubmed"}),
139
- ("doc2", "Aspirin inhibits COX-2 to reduce inflammation", {"source": "pubmed"}),
140
- ("doc3", "Statins lower cholesterol by inhibiting HMG-CoA reductase", {"source": "web"}),
141
- ("doc4", "AMPK activation leads to improved glucose metabolism", {"source": "pubmed"}),
142
- ("doc5", "Sildenafil works via nitric oxide pathway", {"source": "web"}),
143
- ]
144
-
145
- print("\nIndexing documents...")
146
- for doc_id, content, meta in docs:
147
- await service.add_evidence(doc_id, content, meta)
148
- print(f" Added: {doc_id}")
149
-
150
- # Search for semantically related content
151
- query = "drugs that activate AMPK"
152
- print(f"\nSearching for: '{query}'")
153
-
154
- results = await service.search_similar(query, n_results=3)
155
-
156
- print("\nTop 3 results:")
157
  for i, r in enumerate(results, 1):
158
- # Lower distance = more similar (cosine distance: 0=identical, 2=opposite)
159
  similarity = 1 - r["distance"]
160
- print(f" {i}. [{similarity:.2%} similar] {r['content'][:60]}...")
161
-
162
-
163
- async def demo_deduplication() -> None:
164
- """Demo semantic deduplication."""
165
- print("\n" + "=" * 60)
166
- print("3. SEMANTIC DEDUPLICATION DEMO")
167
- print("=" * 60)
168
-
169
- # Create fresh service for clean demo
170
- service = create_fresh_service("dedup")
171
-
172
- evidence = create_sample_evidence()
173
- print(f"\nOriginal evidence count: {len(evidence)}")
174
- for i, e in enumerate(evidence, 1):
175
- print(f" {i}. {e.citation.title}")
176
-
177
- print("\nRunning semantic deduplication (threshold=0.85)...")
178
- unique = await service.deduplicate(evidence, threshold=0.85)
179
 
180
- print(f"\nUnique evidence count: {len(unique)}")
181
- print(f"Removed {len(evidence) - len(unique)} semantic duplicates\n")
 
 
182
 
183
- for i, e in enumerate(unique, 1):
184
- print(f" {i}. {e.citation.title}")
185
 
186
- print("\n -> Notice: Papers about 'Metformin AMPK mTOR' were deduplicated!")
187
- print(" Different titles, same semantic meaning = duplicate removed.")
 
 
188
 
 
189
 
190
- async def main() -> None:
191
- """Run all embedding demos."""
192
- print("\n" + "=" * 60)
193
- print("DeepCritical Embeddings Demo (Phase 6)")
194
- print("Using: sentence-transformers + ChromaDB")
195
- print("=" * 60)
196
-
197
- await demo_embedding()
198
- await demo_semantic_search()
199
- await demo_deduplication()
200
 
201
  print("\n" + "=" * 60)
202
- print("Demo complete! Embeddings enable:")
203
- print(" - Finding papers by MEANING, not just keywords")
204
- print(" - Removing duplicate findings automatically")
205
- print(" - Building diverse evidence sets for research")
206
  print("=" * 60 + "\n")
207
 
208
 
209
  if __name__ == "__main__":
210
- asyncio.run(main())
 
2
  """
3
  Demo: Semantic Search & Deduplication (Phase 6).
4
 
5
+ This script demonstrates embedding-based capabilities using REAL data:
6
+ - Fetches REAL abstracts from PubMed
7
+ - Embeds text with sentence-transformers
8
+ - Performs semantic deduplication on LIVE research data
9
 
10
  Usage:
11
  uv run python examples/embeddings_demo/run_embeddings.py
 
 
12
  """
13
 
14
  import asyncio
15
 
16
  from src.services.embeddings import EmbeddingService
17
+ from src.tools.pubmed import PubMedTool
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
 
20
  def create_fresh_service(name_suffix: str = "") -> EmbeddingService:
 
32
  return service
33
 
34
 
35
+ async def demo_real_pipeline() -> None:
36
+ """Run the demo using REAL PubMed data."""
37
  print("\n" + "=" * 60)
38
+ print("DeepCritical Embeddings Demo (REAL DATA)")
39
  print("=" * 60)
40
 
41
+ # 1. Fetch Real Data
42
+ query = "metformin mechanism of action"
43
+ print(f"\n[1] Fetching real papers for: '{query}'...")
44
+ pubmed = PubMedTool()
45
+ # Fetch enough results to likely get some overlap/redundancy
46
+ evidence = await pubmed.search(query, max_results=10)
47
+
48
+ print(f" Found {len(evidence)} papers.")
49
+ print("\n Sample Titles:")
50
+ for i, e in enumerate(evidence[:3], 1):
51
+ print(f" {i}. {e.citation.title[:80]}...")
52
+
53
+ # 2. Embed Data
54
+ print("\n[2] Embedding abstracts (sentence-transformers)...")
55
+ service = create_fresh_service("real_demo")
56
+
57
+ # 3. Semantic Search
58
+ print("\n[3] Semantic Search Demo")
59
+ print(" Indexing evidence...")
60
+ for e in evidence:
61
+ # Use URL as ID for uniqueness
62
+ await service.add_evidence(
63
+ evidence_id=e.citation.url,
64
+ content=e.content,
65
+ metadata={
66
+ "source": e.citation.source,
67
+ "title": e.citation.title,
68
+ "date": e.citation.date,
69
+ },
70
+ )
71
+
72
+ semantic_query = "activation of AMPK pathway"
73
+ print(f" Searching for concept: '{semantic_query}'")
74
+ results = await service.search_similar(semantic_query, n_results=2)
75
+
76
+ print(" Top matches:")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  for i, r in enumerate(results, 1):
 
78
  similarity = 1 - r["distance"]
79
+ print(f" {i}. [{similarity:.1%} match] {r['metadata']['title'][:70]}...")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
 
81
+ # 4. Semantic Deduplication
82
+ print("\n[4] Semantic Deduplication Demo")
83
+ # Create a FRESH service for deduplication so we don't clash with Step 3's index
84
+ dedup_service = create_fresh_service("dedup_demo")
85
 
86
+ print(" Checking for redundant papers (threshold=0.85)...")
 
87
 
88
+ # To force a duplicate for demo purposes, let's double the evidence list
89
+ # simulating finding the same papers again or very similar ones
90
+ duplicated_evidence = evidence + evidence[:2]
91
+ print(f" Input pool: {len(duplicated_evidence)} items (with artificial duplicates added)")
92
 
93
+ unique = await dedup_service.deduplicate(duplicated_evidence, threshold=0.85)
94
 
95
+ print(f" Output pool: {len(unique)} unique items")
96
+ print(f" Removed {len(duplicated_evidence) - len(unique)} duplicates.")
 
 
 
 
 
 
 
 
97
 
98
  print("\n" + "=" * 60)
99
+ print("Demo complete! Verified with REAL PubMed data.")
 
 
 
100
  print("=" * 60 + "\n")
101
 
102
 
103
  if __name__ == "__main__":
104
+ asyncio.run(demo_real_pipeline())
examples/full_stack_demo/run_full.py CHANGED
@@ -3,7 +3,7 @@
3
  Demo: Full Stack DeepCritical Agent (Phases 1-8).
4
 
5
  This script demonstrates the COMPLETE REAL drug repurposing research pipeline:
6
- - Phase 2: REAL Search (PubMed + Web API calls)
7
  - Phase 6: REAL Embeddings (sentence-transformers + ChromaDB)
8
  - Phase 7: REAL Hypothesis (LLM mechanistic reasoning)
9
  - Phase 3: REAL Judge (LLM evidence assessment)
@@ -118,12 +118,11 @@ async def run_full_demo(query: str, max_iterations: int) -> None:
118
  from src.services.embeddings import EmbeddingService
119
  from src.tools.pubmed import PubMedTool
120
  from src.tools.search_handler import SearchHandler
121
- from src.tools.websearch import WebTool
122
 
123
  # Initialize REAL services
124
  print("[Init] Loading embedding model...")
125
  embedding_service = EmbeddingService()
126
- search_handler = SearchHandler(tools=[PubMedTool(), WebTool()], timeout=30.0)
127
  judge_handler = JudgeHandler()
128
 
129
  # Shared evidence store
@@ -134,7 +133,7 @@ async def run_full_demo(query: str, max_iterations: int) -> None:
134
  print_step(iteration, f"ITERATION {iteration}/{max_iterations}")
135
 
136
  # Step 1: REAL Search
137
- print("\n[Search] Querying PubMed and Web (REAL API calls)...")
138
  all_evidence = await _run_search_iteration(
139
  query, iteration, evidence_store, all_evidence, search_handler, embedding_service
140
  )
@@ -175,7 +174,7 @@ async def main() -> None:
175
  formatter_class=argparse.RawDescriptionHelpFormatter,
176
  epilog="""
177
  This demo runs the COMPLETE pipeline with REAL API calls:
178
- 1. REAL search: Actual PubMed + DuckDuckGo queries
179
  2. REAL embeddings: Actual sentence-transformers model
180
  3. REAL hypothesis: Actual LLM generating mechanistic chains
181
  4. REAL judge: Actual LLM assessing evidence quality
@@ -224,7 +223,7 @@ Examples:
224
  print(" DeepCritical Full Stack Demo Complete!")
225
  print(" ")
226
  print(" Everything you just saw was REAL:")
227
- print(" - Real PubMed/Web searches")
228
  print(" - Real embedding computations")
229
  print(" - Real LLM reasoning")
230
  print(" - Real scientific report")
 
3
  Demo: Full Stack DeepCritical Agent (Phases 1-8).
4
 
5
  This script demonstrates the COMPLETE REAL drug repurposing research pipeline:
6
+ - Phase 2: REAL Search (PubMed only)
7
  - Phase 6: REAL Embeddings (sentence-transformers + ChromaDB)
8
  - Phase 7: REAL Hypothesis (LLM mechanistic reasoning)
9
  - Phase 3: REAL Judge (LLM evidence assessment)
 
118
  from src.services.embeddings import EmbeddingService
119
  from src.tools.pubmed import PubMedTool
120
  from src.tools.search_handler import SearchHandler
 
121
 
122
  # Initialize REAL services
123
  print("[Init] Loading embedding model...")
124
  embedding_service = EmbeddingService()
125
+ search_handler = SearchHandler(tools=[PubMedTool()], timeout=30.0)
126
  judge_handler = JudgeHandler()
127
 
128
  # Shared evidence store
 
133
  print_step(iteration, f"ITERATION {iteration}/{max_iterations}")
134
 
135
  # Step 1: REAL Search
136
+ print("\n[Search] Querying PubMed (REAL API calls)...")
137
  all_evidence = await _run_search_iteration(
138
  query, iteration, evidence_store, all_evidence, search_handler, embedding_service
139
  )
 
174
  formatter_class=argparse.RawDescriptionHelpFormatter,
175
  epilog="""
176
  This demo runs the COMPLETE pipeline with REAL API calls:
177
+ 1. REAL search: Actual PubMed queries
178
  2. REAL embeddings: Actual sentence-transformers model
179
  3. REAL hypothesis: Actual LLM generating mechanistic chains
180
  4. REAL judge: Actual LLM assessing evidence quality
 
223
  print(" DeepCritical Full Stack Demo Complete!")
224
  print(" ")
225
  print(" Everything you just saw was REAL:")
226
+ print(" - Real PubMed searches")
227
  print(" - Real embedding computations")
228
  print(" - Real LLM reasoning")
229
  print(" - Real scientific report")
examples/hypothesis_demo/run_hypothesis.py CHANGED
@@ -3,7 +3,7 @@
3
  Demo: Hypothesis Generation (Phase 7).
4
 
5
  This script demonstrates the REAL hypothesis generation pipeline:
6
- 1. REAL search: PubMed + Web (actual API calls)
7
  2. REAL embeddings: Semantic deduplication
8
  3. REAL LLM: Mechanistic hypothesis generation
9
 
@@ -23,7 +23,6 @@ from src.agents.hypothesis_agent import HypothesisAgent
23
  from src.services.embeddings import EmbeddingService
24
  from src.tools.pubmed import PubMedTool
25
  from src.tools.search_handler import SearchHandler
26
- from src.tools.websearch import WebTool
27
 
28
 
29
  async def run_hypothesis_demo(query: str) -> None:
@@ -36,8 +35,8 @@ async def run_hypothesis_demo(query: str) -> None:
36
  print(f"{'='*60}\n")
37
 
38
  # Step 1: REAL Search
39
- print("[Step 1] Searching PubMed + Web...")
40
- search_handler = SearchHandler(tools=[PubMedTool(), WebTool()], timeout=30.0)
41
  result = await search_handler.execute(query, max_results_per_tool=5)
42
 
43
  print(f" Found {result.total_found} results from {result.sources_searched}")
@@ -129,7 +128,7 @@ Examples:
129
 
130
  print("\n" + "=" * 60)
131
  print("Demo complete! This was a REAL pipeline:")
132
- print(" 1. REAL search: Actual PubMed + Web API calls")
133
  print(" 2. REAL embeddings: Actual sentence-transformers")
134
  print(" 3. REAL LLM: Actual hypothesis generation")
135
  print("=" * 60 + "\n")
 
3
  Demo: Hypothesis Generation (Phase 7).
4
 
5
  This script demonstrates the REAL hypothesis generation pipeline:
6
+ 1. REAL search: PubMed (actual API calls)
7
  2. REAL embeddings: Semantic deduplication
8
  3. REAL LLM: Mechanistic hypothesis generation
9
 
 
23
  from src.services.embeddings import EmbeddingService
24
  from src.tools.pubmed import PubMedTool
25
  from src.tools.search_handler import SearchHandler
 
26
 
27
 
28
  async def run_hypothesis_demo(query: str) -> None:
 
35
  print(f"{'='*60}\n")
36
 
37
  # Step 1: REAL Search
38
+ print("[Step 1] Searching PubMed...")
39
+ search_handler = SearchHandler(tools=[PubMedTool()], timeout=30.0)
40
  result = await search_handler.execute(query, max_results_per_tool=5)
41
 
42
  print(f" Found {result.total_found} results from {result.sources_searched}")
 
128
 
129
  print("\n" + "=" * 60)
130
  print("Demo complete! This was a REAL pipeline:")
131
+ print(" 1. REAL search: Actual PubMed API calls")
132
  print(" 2. REAL embeddings: Actual sentence-transformers")
133
  print(" 3. REAL LLM: Actual hypothesis generation")
134
  print("=" * 60 + "\n")
examples/orchestrator_demo/run_agent.py CHANGED
@@ -3,7 +3,7 @@
3
  Demo: DeepCritical Agent Loop (Search + Judge + Orchestrator).
4
 
5
  This script demonstrates the REAL Phase 4 orchestration:
6
- - REAL Iterative Search (PubMed + Web API calls)
7
  - REAL Evidence Evaluation (LLM Judge)
8
  - REAL Orchestration Loop
9
  - REAL Final Synthesis
@@ -26,7 +26,6 @@ from src.agent_factory.judges import JudgeHandler
26
  from src.orchestrator import Orchestrator
27
  from src.tools.pubmed import PubMedTool
28
  from src.tools.search_handler import SearchHandler
29
- from src.tools.websearch import WebTool
30
  from src.utils.models import OrchestratorConfig
31
 
32
  MAX_ITERATIONS = 10
@@ -39,7 +38,7 @@ async def main() -> None:
39
  formatter_class=argparse.RawDescriptionHelpFormatter,
40
  epilog="""
41
  This demo runs the REAL search-judge-synthesize loop:
42
- 1. REAL search: Actual PubMed + DuckDuckGo queries
43
  2. REAL judge: Actual LLM assessing evidence quality
44
  3. REAL loop: Actual iterative refinement based on LLM decisions
45
  4. REAL synthesis: Actual research summary generation
@@ -78,7 +77,7 @@ Examples:
78
  print(f"{'='*60}\n")
79
 
80
  # Setup REAL components
81
- search_handler = SearchHandler(tools=[PubMedTool(), WebTool()], timeout=30.0)
82
  judge_handler = JudgeHandler() # REAL LLM judge
83
 
84
  config = OrchestratorConfig(max_iterations=args.iterations)
@@ -102,7 +101,7 @@ Examples:
102
 
103
  print("\n" + "=" * 60)
104
  print("Demo complete! Everything was REAL:")
105
- print(" - Real PubMed/Web searches")
106
  print(" - Real LLM judge decisions")
107
  print(" - Real iterative refinement")
108
  print("=" * 60 + "\n")
 
3
  Demo: DeepCritical Agent Loop (Search + Judge + Orchestrator).
4
 
5
  This script demonstrates the REAL Phase 4 orchestration:
6
+ - REAL Iterative Search (PubMed only)
7
  - REAL Evidence Evaluation (LLM Judge)
8
  - REAL Orchestration Loop
9
  - REAL Final Synthesis
 
26
  from src.orchestrator import Orchestrator
27
  from src.tools.pubmed import PubMedTool
28
  from src.tools.search_handler import SearchHandler
 
29
  from src.utils.models import OrchestratorConfig
30
 
31
  MAX_ITERATIONS = 10
 
38
  formatter_class=argparse.RawDescriptionHelpFormatter,
39
  epilog="""
40
  This demo runs the REAL search-judge-synthesize loop:
41
+ 1. REAL search: Actual PubMed queries
42
  2. REAL judge: Actual LLM assessing evidence quality
43
  3. REAL loop: Actual iterative refinement based on LLM decisions
44
  4. REAL synthesis: Actual research summary generation
 
77
  print(f"{'='*60}\n")
78
 
79
  # Setup REAL components
80
+ search_handler = SearchHandler(tools=[PubMedTool()], timeout=30.0)
81
  judge_handler = JudgeHandler() # REAL LLM judge
82
 
83
  config = OrchestratorConfig(max_iterations=args.iterations)
 
101
 
102
  print("\n" + "=" * 60)
103
  print("Demo complete! Everything was REAL:")
104
+ print(" - Real PubMed searches")
105
  print(" - Real LLM judge decisions")
106
  print(" - Real iterative refinement")
107
  print("=" * 60 + "\n")
examples/orchestrator_demo/run_magentic.py CHANGED
@@ -20,7 +20,6 @@ from src.agent_factory.judges import JudgeHandler
20
  from src.orchestrator_factory import create_orchestrator
21
  from src.tools.pubmed import PubMedTool
22
  from src.tools.search_handler import SearchHandler
23
- from src.tools.websearch import WebTool
24
  from src.utils.models import OrchestratorConfig
25
 
26
 
@@ -43,7 +42,7 @@ async def main() -> None:
43
  print(f"{ '='*60}\n")
44
 
45
  # 1. Setup Search Tools
46
- search_handler = SearchHandler(tools=[PubMedTool(), WebTool()], timeout=30.0)
47
 
48
  # 2. Setup Judge
49
  judge_handler = JudgeHandler()
 
20
  from src.orchestrator_factory import create_orchestrator
21
  from src.tools.pubmed import PubMedTool
22
  from src.tools.search_handler import SearchHandler
 
23
  from src.utils.models import OrchestratorConfig
24
 
25
 
 
42
  print(f"{ '='*60}\n")
43
 
44
  # 1. Setup Search Tools
45
+ search_handler = SearchHandler(tools=[PubMedTool()], timeout=30.0)
46
 
47
  # 2. Setup Judge
48
  judge_handler = JudgeHandler()
examples/search_demo/run_search.py CHANGED
@@ -4,7 +4,6 @@ Demo: Search for drug repurposing evidence.
4
 
5
  This script demonstrates Phase 2 functionality:
6
  - PubMed search (biomedical literature)
7
- - Web search (DuckDuckGo)
8
  - SearchHandler (parallel scatter-gather orchestration)
9
 
10
  Usage:
@@ -23,7 +22,6 @@ import sys
23
 
24
  from src.tools.pubmed import PubMedTool
25
  from src.tools.search_handler import SearchHandler
26
- from src.tools.websearch import WebTool
27
 
28
 
29
  async def main(query: str) -> None:
@@ -35,11 +33,10 @@ async def main(query: str) -> None:
35
 
36
  # Initialize tools
37
  pubmed = PubMedTool()
38
- web = WebTool()
39
- handler = SearchHandler(tools=[pubmed, web], timeout=30.0)
40
 
41
  # Execute search
42
- print("Searching PubMed and Web in parallel...")
43
  result = await handler.execute(query, max_results_per_tool=5)
44
 
45
  # Display results
 
4
 
5
  This script demonstrates Phase 2 functionality:
6
  - PubMed search (biomedical literature)
 
7
  - SearchHandler (parallel scatter-gather orchestration)
8
 
9
  Usage:
 
22
 
23
  from src.tools.pubmed import PubMedTool
24
  from src.tools.search_handler import SearchHandler
 
25
 
26
 
27
  async def main(query: str) -> None:
 
33
 
34
  # Initialize tools
35
  pubmed = PubMedTool()
36
+ handler = SearchHandler(tools=[pubmed], timeout=30.0)
 
37
 
38
  # Execute search
39
+ print("Searching PubMed in parallel...")
40
  result = await handler.execute(query, max_results_per_tool=5)
41
 
42
  # Display results
pyproject.toml CHANGED
@@ -19,9 +19,6 @@ dependencies = [
19
  "beautifulsoup4>=4.12", # HTML parsing
20
  "xmltodict>=0.13", # PubMed XML -> dict
21
 
22
- # Search
23
- "duckduckgo-search>=6.0", # Free web search
24
-
25
  # UI
26
  "gradio>=5.0", # Chat interface
27
 
 
19
  "beautifulsoup4>=4.12", # HTML parsing
20
  "xmltodict>=0.13", # PubMed XML -> dict
21
 
 
 
 
22
  # UI
23
  "gradio>=5.0", # Chat interface
24
 
src/agents/search_agent.py CHANGED
@@ -28,7 +28,7 @@ class SearchAgent(BaseAgent): # type: ignore[misc]
28
  ) -> None:
29
  super().__init__(
30
  name="SearchAgent",
31
- description="Searches PubMed and web for drug repurposing evidence",
32
  )
33
  self._handler = search_handler
34
  self._evidence_store = evidence_store
@@ -95,7 +95,7 @@ class SearchAgent(BaseAgent): # type: ignore[misc]
95
  citation=Citation(
96
  title=meta.get("title", "Related Evidence"),
97
  url=item["id"],
98
- source=meta.get("source", "vector_db"),
99
  date=meta.get("date", "n.d."),
100
  authors=authors,
101
  ),
 
28
  ) -> None:
29
  super().__init__(
30
  name="SearchAgent",
31
+ description="Searches PubMed for drug repurposing evidence",
32
  )
33
  self._handler = search_handler
34
  self._evidence_store = evidence_store
 
95
  citation=Citation(
96
  title=meta.get("title", "Related Evidence"),
97
  url=item["id"],
98
+ source="pubmed",
99
  date=meta.get("date", "n.d."),
100
  authors=authors,
101
  ),
src/app.py CHANGED
@@ -10,7 +10,6 @@ from src.agent_factory.judges import JudgeHandler, MockJudgeHandler
10
  from src.orchestrator_factory import create_orchestrator
11
  from src.tools.pubmed import PubMedTool
12
  from src.tools.search_handler import SearchHandler
13
- from src.tools.websearch import WebTool
14
  from src.utils.models import OrchestratorConfig
15
 
16
 
@@ -33,7 +32,7 @@ def configure_orchestrator(use_mock: bool = False, mode: str = "simple") -> Any:
33
 
34
  # Create search tools
35
  search_handler = SearchHandler(
36
- tools=[PubMedTool(), WebTool()],
37
  timeout=config.search_timeout,
38
  )
39
 
@@ -128,7 +127,7 @@ def create_demo() -> Any:
128
  ## AI-Powered Drug Repurposing Research Agent
129
 
130
  Ask questions about potential drug repurposing opportunities.
131
- The agent will search PubMed and the web, evaluate evidence, and provide recommendations.
132
 
133
  **Example questions:**
134
  - "What drugs could be repurposed for Alzheimer's disease?"
@@ -161,7 +160,7 @@ def create_demo() -> Any:
161
  **Note**: This is a research tool and should not be used for medical decisions.
162
  Always consult healthcare professionals for medical advice.
163
 
164
- Built with 🤖 PydanticAI + 🔬 PubMed + 🦆 DuckDuckGo
165
  """)
166
 
167
  return demo
 
10
  from src.orchestrator_factory import create_orchestrator
11
  from src.tools.pubmed import PubMedTool
12
  from src.tools.search_handler import SearchHandler
 
13
  from src.utils.models import OrchestratorConfig
14
 
15
 
 
32
 
33
  # Create search tools
34
  search_handler = SearchHandler(
35
+ tools=[PubMedTool()],
36
  timeout=config.search_timeout,
37
  )
38
 
 
127
  ## AI-Powered Drug Repurposing Research Agent
128
 
129
  Ask questions about potential drug repurposing opportunities.
130
+ The agent will search PubMed, evaluate evidence, and provide recommendations.
131
 
132
  **Example questions:**
133
  - "What drugs could be repurposed for Alzheimer's disease?"
 
160
  **Note**: This is a research tool and should not be used for medical decisions.
161
  Always consult healthcare professionals for medical advice.
162
 
163
+ Built with 🤖 PydanticAI + 🔬 PubMed
164
  """)
165
 
166
  return demo
src/services/embeddings.py CHANGED
@@ -18,7 +18,7 @@ class EmbeddingService:
18
  """Handles text embedding and vector storage.
19
 
20
  All embedding operations run in a thread pool to avoid blocking
21
- the async event loop. See src/tools/websearch.py for the pattern.
22
  """
23
 
24
  def __init__(self, model_name: str = "all-MiniLM-L6-v2"):
 
18
  """Handles text embedding and vector storage.
19
 
20
  All embedding operations run in a thread pool to avoid blocking
21
+ the async event loop.
22
  """
23
 
24
  def __init__(self, model_name: str = "all-MiniLM-L6-v2"):
src/tools/__init__.py CHANGED
@@ -3,7 +3,6 @@
3
  from src.tools.base import SearchTool
4
  from src.tools.pubmed import PubMedTool
5
  from src.tools.search_handler import SearchHandler
6
- from src.tools.websearch import WebTool
7
 
8
  # Re-export
9
- __all__ = ["PubMedTool", "SearchHandler", "SearchTool", "WebTool"]
 
3
  from src.tools.base import SearchTool
4
  from src.tools.pubmed import PubMedTool
5
  from src.tools.search_handler import SearchHandler
 
6
 
7
  # Re-export
8
+ __all__ = ["PubMedTool", "SearchHandler", "SearchTool"]
src/tools/search_handler.py CHANGED
@@ -49,7 +49,7 @@ class SearchHandler:
49
 
50
  # Process results
51
  all_evidence: list[Evidence] = []
52
- sources_searched: list[Literal["pubmed", "web"]] = []
53
  errors: list[str] = []
54
 
55
  for tool, result in zip(self.tools, results, strict=True):
@@ -62,7 +62,7 @@ class SearchHandler:
62
  all_evidence.extend(success_result)
63
 
64
  # Cast tool.name to the expected Literal
65
- tool_name = cast(Literal["pubmed", "web"], tool.name)
66
  sources_searched.append(tool_name)
67
  logger.info("Search tool succeeded", tool=tool.name, count=len(success_result))
68
 
 
49
 
50
  # Process results
51
  all_evidence: list[Evidence] = []
52
+ sources_searched: list[Literal["pubmed"]] = []
53
  errors: list[str] = []
54
 
55
  for tool, result in zip(self.tools, results, strict=True):
 
62
  all_evidence.extend(success_result)
63
 
64
  # Cast tool.name to the expected Literal
65
+ tool_name = cast(Literal["pubmed"], tool.name)
66
  sources_searched.append(tool_name)
67
  logger.info("Search tool succeeded", tool=tool.name, count=len(success_result))
68
 
src/tools/websearch.py DELETED
@@ -1,62 +0,0 @@
1
- """Web search tool using DuckDuckGo."""
2
-
3
- import asyncio
4
- from typing import Any
5
-
6
- from duckduckgo_search import DDGS
7
-
8
- from src.utils.exceptions import SearchError
9
- from src.utils.models import Citation, Evidence
10
-
11
-
12
- class WebTool:
13
- """Search tool for general web search via DuckDuckGo."""
14
-
15
- def __init__(self) -> None:
16
- pass
17
-
18
- @property
19
- def name(self) -> str:
20
- return "web"
21
-
22
- async def search(self, query: str, max_results: int = 10) -> list[Evidence]:
23
- """
24
- Search DuckDuckGo and return evidence.
25
-
26
- Note: duckduckgo-search is synchronous, so we run it in executor.
27
- """
28
- loop = asyncio.get_running_loop()
29
- try:
30
- results = await loop.run_in_executor(
31
- None,
32
- lambda: self._sync_search(query, max_results),
33
- )
34
- return results
35
- except Exception as e:
36
- raise SearchError(f"Web search failed: {e}") from e
37
-
38
- def _sync_search(self, query: str, max_results: int) -> list[Evidence]:
39
- """Synchronous search implementation."""
40
- evidence_list = []
41
-
42
- with DDGS() as ddgs:
43
- results: list[dict[str, Any]] = list(ddgs.text(query, max_results=max_results))
44
-
45
- # Truncation rationale: LLM context limits + cost optimization
46
- # - Content: 1000 chars (~250 tokens) - web snippets are shorter than abstracts
47
- # - Title: 500 chars covers most web page titles
48
- for result in results:
49
- evidence_list.append(
50
- Evidence(
51
- content=result.get("body", "")[:1000],
52
- citation=Citation(
53
- source="web",
54
- title=result.get("title", "Unknown")[:500],
55
- url=result.get("href", ""),
56
- date="Unknown",
57
- authors=[],
58
- ),
59
- )
60
- )
61
-
62
- return evidence_list
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/utils/models.py CHANGED
@@ -9,7 +9,7 @@ from pydantic import BaseModel, Field
9
  class Citation(BaseModel):
10
  """A citation to a source document."""
11
 
12
- source: Literal["pubmed", "web"] = Field(description="Where this came from")
13
  title: str = Field(min_length=1, max_length=500)
14
  url: str = Field(description="URL to the source")
15
  date: str = Field(description="Publication date (YYYY-MM-DD or 'Unknown')")
@@ -41,7 +41,7 @@ class SearchResult(BaseModel):
41
 
42
  query: str
43
  evidence: list[Evidence]
44
- sources_searched: list[Literal["pubmed", "web"]]
45
  total_found: int
46
  errors: list[str] = Field(default_factory=list)
47
 
 
9
  class Citation(BaseModel):
10
  """A citation to a source document."""
11
 
12
+ source: Literal["pubmed"] = Field(description="Where this came from")
13
  title: str = Field(min_length=1, max_length=500)
14
  url: str = Field(description="URL to the source")
15
  date: str = Field(description="Publication date (YYYY-MM-DD or 'Unknown')")
 
41
 
42
  query: str
43
  evidence: list[Evidence]
44
+ sources_searched: list[Literal["pubmed"]]
45
  total_found: int
46
  errors: list[str] = Field(default_factory=list)
47
 
tests/conftest.py CHANGED
@@ -44,7 +44,7 @@ def sample_evidence():
44
  Evidence(
45
  content="Drug repurposing offers faster path to treatment...",
46
  citation=Citation(
47
- source="web",
48
  title="Drug Repurposing Strategies",
49
  url="https://example.com/drug-repurposing",
50
  date="Unknown",
 
44
  Evidence(
45
  content="Drug repurposing offers faster path to treatment...",
46
  citation=Citation(
47
+ source="pubmed",
48
  title="Drug Repurposing Strategies",
49
  url="https://example.com/drug-repurposing",
50
  date="Unknown",
tests/unit/agent_factory/test_judges.py CHANGED
@@ -158,7 +158,7 @@ class TestMockJudgeHandler:
158
  ),
159
  Evidence(
160
  content="Content 2",
161
- citation=Citation(source="web", title="T2", url="u2", date="2024"),
162
  ),
163
  ]
164
 
@@ -214,7 +214,7 @@ class TestMockJudgeHandler:
214
  ),
215
  Evidence(
216
  content="Content 2",
217
- citation=Citation(source="web", title="T2", url="u2", date="2024"),
218
  ),
219
  ]
220
 
 
158
  ),
159
  Evidence(
160
  content="Content 2",
161
+ citation=Citation(source="pubmed", title="T2", url="u2", date="2024"),
162
  ),
163
  ]
164
 
 
214
  ),
215
  Evidence(
216
  content="Content 2",
217
+ citation=Citation(source="pubmed", title="T2", url="u2", date="2024"),
218
  ),
219
  ]
220
 
tests/unit/agents/test_search_agent.py CHANGED
@@ -103,7 +103,7 @@ async def test_run_uses_embeddings(mock_handler: AsyncMock) -> None:
103
  {
104
  "id": "u2",
105
  "content": "related content",
106
- "metadata": {"source": "web", "title": "related", "date": "2024"},
107
  "distance": 0.1,
108
  }
109
  ]
 
103
  {
104
  "id": "u2",
105
  "content": "related content",
106
+ "metadata": {"source": "pubmed", "title": "related", "date": "2024"},
107
  "distance": 0.1,
108
  }
109
  ]
tests/unit/services/test_embeddings.py CHANGED
@@ -131,11 +131,11 @@ class TestEmbeddingService:
131
  evidence = [
132
  Evidence(
133
  content="Duplicate content",
134
- citation=Citation(source="web", url="u1", title="t1", date="2024"),
135
  ),
136
  Evidence(
137
  content="Unique content",
138
- citation=Citation(source="web", url="u2", title="t2", date="2024"),
139
  ),
140
  ]
141
 
 
131
  evidence = [
132
  Evidence(
133
  content="Duplicate content",
134
+ citation=Citation(source="pubmed", url="u1", title="t1", date="2024"),
135
  ),
136
  Evidence(
137
  content="Unique content",
138
+ citation=Citation(source="pubmed", url="u2", title="t2", date="2024"),
139
  ),
140
  ]
141
 
tests/unit/test_no_webtool_references.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ast
2
+ import pathlib
3
+
4
+ import pytest
5
+
6
+
7
+ def test_examples_no_webtool_imports():
8
+ """No example files should import WebTool or the websearch module."""
9
+ examples_dir = pathlib.Path("examples")
10
+
11
+ for py_file in examples_dir.rglob("*.py"):
12
+ content = py_file.read_text()
13
+ tree = ast.parse(content)
14
+
15
+ for node in ast.walk(tree):
16
+ if isinstance(node, ast.ImportFrom):
17
+ module = node.module or ""
18
+ if "websearch" in module:
19
+ pytest.fail(f"{py_file} imports websearch (should be removed)")
20
+ # Also check for `from src.tools import WebTool`
21
+ for alias in node.names:
22
+ if alias.name == "WebTool":
23
+ pytest.fail(f"{py_file} imports WebTool (should be removed)")
24
+ elif isinstance(node, ast.Import):
25
+ for alias in node.names:
26
+ if "websearch" in alias.name:
27
+ pytest.fail(f"{py_file} imports websearch (should be removed)")
tests/unit/tools/test_search_handler.py CHANGED
@@ -28,23 +28,14 @@ class TestSearchHandler:
28
  )
29
 
30
  mock_tool_2 = AsyncMock()
31
- mock_tool_2.name = "web"
32
- mock_tool_2.search = AsyncMock(
33
- return_value=[
34
- Evidence(
35
- content="Result 2",
36
- citation=Citation(source="web", title="T2", url="u2", date="2024"),
37
- )
38
- ]
39
- )
40
 
41
  handler = SearchHandler(tools=[mock_tool_1, mock_tool_2])
42
  result = await handler.execute("test query")
43
 
44
- expected_total = 2
45
- assert result.total_found == expected_total
46
  assert "pubmed" in result.sources_searched
47
- assert "web" in result.sources_searched
48
  assert len(result.errors) == 0
49
 
50
  @pytest.mark.asyncio
@@ -62,7 +53,7 @@ class TestSearchHandler:
62
  )
63
 
64
  mock_tool_fail = AsyncMock()
65
- mock_tool_fail.name = "web"
66
  mock_tool_fail.search = AsyncMock(side_effect=SearchError("API down"))
67
 
68
  handler = SearchHandler(tools=[mock_tool_ok, mock_tool_fail])
@@ -71,4 +62,22 @@ class TestSearchHandler:
71
  assert result.total_found == 1
72
  assert "pubmed" in result.sources_searched
73
  assert len(result.errors) == 1
74
- assert "web" in result.errors[0]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  )
29
 
30
  mock_tool_2 = AsyncMock()
31
+ mock_tool_2.name = "pubmed" # Type system currently restricts to pubmed
32
+ mock_tool_2.search = AsyncMock(return_value=[])
 
 
 
 
 
 
 
33
 
34
  handler = SearchHandler(tools=[mock_tool_1, mock_tool_2])
35
  result = await handler.execute("test query")
36
 
37
+ assert result.total_found == 1
 
38
  assert "pubmed" in result.sources_searched
 
39
  assert len(result.errors) == 0
40
 
41
  @pytest.mark.asyncio
 
53
  )
54
 
55
  mock_tool_fail = AsyncMock()
56
+ mock_tool_fail.name = "pubmed" # Mocking a second pubmed instance failing
57
  mock_tool_fail.search = AsyncMock(side_effect=SearchError("API down"))
58
 
59
  handler = SearchHandler(tools=[mock_tool_ok, mock_tool_fail])
 
62
  assert result.total_found == 1
63
  assert "pubmed" in result.sources_searched
64
  assert len(result.errors) == 1
65
+ # The error message format is "{tool.name}: {error!s}"
66
+ assert "pubmed: API down" in result.errors[0]
67
+
68
+ @pytest.mark.asyncio
69
+ async def test_search_handler_pubmed_only(self):
70
+ """SearchHandler should work with only PubMed tool."""
71
+ # This is the specific test requested in Phase 9 spec
72
+ from src.tools.pubmed import PubMedTool
73
+
74
+ mock_pubmed = AsyncMock(spec=PubMedTool)
75
+ mock_pubmed.name = "pubmed"
76
+ mock_pubmed.search.return_value = []
77
+
78
+ handler = SearchHandler(tools=[mock_pubmed], timeout=30.0)
79
+ result = await handler.execute("metformin diabetes", max_results_per_tool=3)
80
+
81
+ assert result.sources_searched == ["pubmed"]
82
+ assert "web" not in result.sources_searched
83
+ assert len(result.errors) == 0
tests/unit/tools/test_websearch.py DELETED
@@ -1,36 +0,0 @@
1
- """Unit tests for WebTool."""
2
-
3
- from unittest.mock import MagicMock
4
-
5
- import pytest
6
-
7
- from src.tools.websearch import WebTool
8
-
9
-
10
- class TestWebTool:
11
- """Tests for WebTool."""
12
-
13
- @pytest.mark.asyncio
14
- async def test_search_returns_evidence(self, mocker):
15
- """WebTool should return Evidence objects from search."""
16
- mock_results = [
17
- {
18
- "title": "Drug Repurposing Article",
19
- "href": "https://example.com/article",
20
- "body": "Some content about drug repurposing...",
21
- }
22
- ]
23
-
24
- mock_ddgs = MagicMock()
25
- mock_ddgs.__enter__ = MagicMock(return_value=mock_ddgs)
26
- mock_ddgs.__exit__ = MagicMock(return_value=None)
27
- mock_ddgs.text = MagicMock(return_value=mock_results)
28
-
29
- mocker.patch("src.tools.websearch.DDGS", return_value=mock_ddgs)
30
-
31
- tool = WebTool()
32
- results = await tool.search("drug repurposing")
33
-
34
- assert len(results) == 1
35
- assert results[0].citation.source == "web"
36
- assert "Drug Repurposing" in results[0].citation.title
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tests/unit/tools/test_websearch_removed.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ def test_websearch_module_deleted():
2
+ """WebTool should no longer exist."""
3
+ import pytest
4
+
5
+ with pytest.raises(ImportError):
6
+ from src.tools.websearch import WebTool # noqa: F401
tests/unit/utils/test_text_utils.py CHANGED
@@ -46,17 +46,17 @@ class TestTextUtils:
46
  Evidence(
47
  content="A",
48
  relevance=0.9,
49
- citation=Citation(source="web", title="A", url="a", date="2023"),
50
  ),
51
  Evidence(
52
  content="B",
53
  relevance=0.1,
54
- citation=Citation(source="web", title="B", url="b", date="2023"),
55
  ),
56
  Evidence(
57
  content="C",
58
  relevance=0.8,
59
- citation=Citation(source="web", title="C", url="c", date="2023"),
60
  ),
61
  ]
62
 
@@ -108,17 +108,17 @@ class TestTextUtils:
108
  Evidence(
109
  content="A",
110
  relevance=0.9,
111
- citation=Citation(source="web", title="A", url="a", date="2023"),
112
  ),
113
  Evidence(
114
  content="B",
115
  relevance=0.9,
116
- citation=Citation(source="web", title="B", url="b", date="2023"),
117
  ),
118
  Evidence(
119
  content="C",
120
  relevance=0.9,
121
- citation=Citation(source="web", title="C", url="c", date="2023"),
122
  ),
123
  ]
124
 
 
46
  Evidence(
47
  content="A",
48
  relevance=0.9,
49
+ citation=Citation(source="pubmed", title="A", url="a", date="2023"),
50
  ),
51
  Evidence(
52
  content="B",
53
  relevance=0.1,
54
+ citation=Citation(source="pubmed", title="B", url="b", date="2023"),
55
  ),
56
  Evidence(
57
  content="C",
58
  relevance=0.8,
59
+ citation=Citation(source="pubmed", title="C", url="c", date="2023"),
60
  ),
61
  ]
62
 
 
108
  Evidence(
109
  content="A",
110
  relevance=0.9,
111
+ citation=Citation(source="pubmed", title="A", url="a", date="2023"),
112
  ),
113
  Evidence(
114
  content="B",
115
  relevance=0.9,
116
+ citation=Citation(source="pubmed", title="B", url="b", date="2023"),
117
  ),
118
  Evidence(
119
  content="C",
120
  relevance=0.9,
121
+ citation=Citation(source="pubmed", title="C", url="c", date="2023"),
122
  ),
123
  ]
124
 
uv.lock CHANGED
@@ -981,7 +981,6 @@ source = { editable = "." }
981
  dependencies = [
982
  { name = "anthropic" },
983
  { name = "beautifulsoup4" },
984
- { name = "duckduckgo-search" },
985
  { name = "gradio" },
986
  { name = "httpx" },
987
  { name = "openai" },
@@ -1020,7 +1019,6 @@ requires-dist = [
1020
  { name = "anthropic", specifier = ">=0.18.0" },
1021
  { name = "beautifulsoup4", specifier = ">=4.12" },
1022
  { name = "chromadb", marker = "extra == 'embeddings'", specifier = ">=0.4.0" },
1023
- { name = "duckduckgo-search", specifier = ">=6.0" },
1024
  { name = "gradio", specifier = ">=5.0" },
1025
  { name = "httpx", specifier = ">=0.27" },
1026
  { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.10" },
@@ -1098,20 +1096,6 @@ wheels = [
1098
  { url = "https://files.pythonhosted.org/packages/11/a8/c6a4b901d17399c77cd81fb001ce8961e9f5e04d3daf27e8925cb012e163/docutils-0.22.3-py3-none-any.whl", hash = "sha256:bd772e4aca73aff037958d44f2be5229ded4c09927fcf8690c577b66234d6ceb", size = 633032 },
1099
  ]
1100
 
1101
- [[package]]
1102
- name = "duckduckgo-search"
1103
- version = "8.1.1"
1104
- source = { registry = "https://pypi.org/simple" }
1105
- dependencies = [
1106
- { name = "click" },
1107
- { name = "lxml" },
1108
- { name = "primp" },
1109
- ]
1110
- sdist = { url = "https://files.pythonhosted.org/packages/10/ef/07791a05751e6cc9de1dd49fb12730259ee109b18e6d097e25e6c32d5617/duckduckgo_search-8.1.1.tar.gz", hash = "sha256:9da91c9eb26a17e016ea1da26235d40404b46b0565ea86d75a9f78cc9441f935", size = 22868 }
1111
- wheels = [
1112
- { url = "https://files.pythonhosted.org/packages/db/72/c027b3b488b1010cf71670032fcf7e681d44b81829d484bb04e31a949a8d/duckduckgo_search-8.1.1-py3-none-any.whl", hash = "sha256:f48adbb06626ee05918f7e0cef3a45639e9939805c4fc179e68c48a12f1b5062", size = 18932 },
1113
- ]
1114
-
1115
  [[package]]
1116
  name = "durationpy"
1117
  version = "0.10"
@@ -2066,108 +2050,6 @@ wheels = [
2066
  { url = "https://files.pythonhosted.org/packages/f3/2b/851e78a60b85e8e8e8c6ebb9928f8e883df0340a93e34960ed9f0a41fa82/logfire_api-4.15.1-py3-none-any.whl", hash = "sha256:a88b5c4b6e4acbf6f35a3e992a63f271cf2797aefd21e1cfc93d52b21ade65f6", size = 95031 },
2067
  ]
2068
 
2069
- [[package]]
2070
- name = "lxml"
2071
- version = "6.0.2"
2072
- source = { registry = "https://pypi.org/simple" }
2073
- sdist = { url = "https://files.pythonhosted.org/packages/aa/88/262177de60548e5a2bfc46ad28232c9e9cbde697bd94132aeb80364675cb/lxml-6.0.2.tar.gz", hash = "sha256:cd79f3367bd74b317dda655dc8fcfa304d9eb6e4fb06b7168c5cf27f96e0cd62", size = 4073426 }
2074
- wheels = [
2075
- { url = "https://files.pythonhosted.org/packages/77/d5/becbe1e2569b474a23f0c672ead8a29ac50b2dc1d5b9de184831bda8d14c/lxml-6.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:13e35cbc684aadf05d8711a5d1b5857c92e5e580efa9a0d2be197199c8def607", size = 8634365 },
2076
- { url = "https://files.pythonhosted.org/packages/28/66/1ced58f12e804644426b85d0bb8a4478ca77bc1761455da310505f1a3526/lxml-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3b1675e096e17c6fe9c0e8c81434f5736c0739ff9ac6123c87c2d452f48fc938", size = 4650793 },
2077
- { url = "https://files.pythonhosted.org/packages/11/84/549098ffea39dfd167e3f174b4ce983d0eed61f9d8d25b7bf2a57c3247fc/lxml-6.0.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac6e5811ae2870953390452e3476694196f98d447573234592d30488147404d", size = 4944362 },
2078
- { url = "https://files.pythonhosted.org/packages/ac/bd/f207f16abf9749d2037453d56b643a7471d8fde855a231a12d1e095c4f01/lxml-6.0.2-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5aa0fc67ae19d7a64c3fe725dc9a1bb11f80e01f78289d05c6f62545affec438", size = 5083152 },
2079
- { url = "https://files.pythonhosted.org/packages/15/ae/bd813e87d8941d52ad5b65071b1affb48da01c4ed3c9c99e40abb266fbff/lxml-6.0.2-cp311-cp311-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de496365750cc472b4e7902a485d3f152ecf57bd3ba03ddd5578ed8ceb4c5964", size = 5023539 },
2080
- { url = "https://files.pythonhosted.org/packages/02/cd/9bfef16bd1d874fbe0cb51afb00329540f30a3283beb9f0780adbb7eec03/lxml-6.0.2-cp311-cp311-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:200069a593c5e40b8f6fc0d84d86d970ba43138c3e68619ffa234bc9bb806a4d", size = 5344853 },
2081
- { url = "https://files.pythonhosted.org/packages/b8/89/ea8f91594bc5dbb879734d35a6f2b0ad50605d7fb419de2b63d4211765cc/lxml-6.0.2-cp311-cp311-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d2de809c2ee3b888b59f995625385f74629707c9355e0ff856445cdcae682b7", size = 5225133 },
2082
- { url = "https://files.pythonhosted.org/packages/b9/37/9c735274f5dbec726b2db99b98a43950395ba3d4a1043083dba2ad814170/lxml-6.0.2-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:b2c3da8d93cf5db60e8858c17684c47d01fee6405e554fb55018dd85fc23b178", size = 4677944 },
2083
- { url = "https://files.pythonhosted.org/packages/20/28/7dfe1ba3475d8bfca3878365075abe002e05d40dfaaeb7ec01b4c587d533/lxml-6.0.2-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:442de7530296ef5e188373a1ea5789a46ce90c4847e597856570439621d9c553", size = 5284535 },
2084
- { url = "https://files.pythonhosted.org/packages/e7/cf/5f14bc0de763498fc29510e3532bf2b4b3a1c1d5d0dff2e900c16ba021ef/lxml-6.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2593c77efde7bfea7f6389f1ab249b15ed4aa5bc5cb5131faa3b843c429fbedb", size = 5067343 },
2085
- { url = "https://files.pythonhosted.org/packages/1c/b0/bb8275ab5472f32b28cfbbcc6db7c9d092482d3439ca279d8d6fa02f7025/lxml-6.0.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:3e3cb08855967a20f553ff32d147e14329b3ae70ced6edc2f282b94afbc74b2a", size = 4725419 },
2086
- { url = "https://files.pythonhosted.org/packages/25/4c/7c222753bc72edca3b99dbadba1b064209bc8ed4ad448af990e60dcce462/lxml-6.0.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ed6c667fcbb8c19c6791bbf40b7268ef8ddf5a96940ba9404b9f9a304832f6c", size = 5275008 },
2087
- { url = "https://files.pythonhosted.org/packages/6c/8c/478a0dc6b6ed661451379447cdbec77c05741a75736d97e5b2b729687828/lxml-6.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b8f18914faec94132e5b91e69d76a5c1d7b0c73e2489ea8929c4aaa10b76bbf7", size = 5248906 },
2088
- { url = "https://files.pythonhosted.org/packages/2d/d9/5be3a6ab2784cdf9accb0703b65e1b64fcdd9311c9f007630c7db0cfcce1/lxml-6.0.2-cp311-cp311-win32.whl", hash = "sha256:6605c604e6daa9e0d7f0a2137bdc47a2e93b59c60a65466353e37f8272f47c46", size = 3610357 },
2089
- { url = "https://files.pythonhosted.org/packages/e2/7d/ca6fb13349b473d5732fb0ee3eec8f6c80fc0688e76b7d79c1008481bf1f/lxml-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e5867f2651016a3afd8dd2c8238baa66f1e2802f44bc17e236f547ace6647078", size = 4036583 },
2090
- { url = "https://files.pythonhosted.org/packages/ab/a2/51363b5ecd3eab46563645f3a2c3836a2fc67d01a1b87c5017040f39f567/lxml-6.0.2-cp311-cp311-win_arm64.whl", hash = "sha256:4197fb2534ee05fd3e7afaab5d8bfd6c2e186f65ea7f9cd6a82809c887bd1285", size = 3680591 },
2091
- { url = "https://files.pythonhosted.org/packages/f3/c8/8ff2bc6b920c84355146cd1ab7d181bc543b89241cfb1ebee824a7c81457/lxml-6.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a59f5448ba2ceccd06995c95ea59a7674a10de0810f2ce90c9006f3cbc044456", size = 8661887 },
2092
- { url = "https://files.pythonhosted.org/packages/37/6f/9aae1008083bb501ef63284220ce81638332f9ccbfa53765b2b7502203cf/lxml-6.0.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e8113639f3296706fbac34a30813929e29247718e88173ad849f57ca59754924", size = 4667818 },
2093
- { url = "https://files.pythonhosted.org/packages/f1/ca/31fb37f99f37f1536c133476674c10b577e409c0a624384147653e38baf2/lxml-6.0.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a8bef9b9825fa8bc816a6e641bb67219489229ebc648be422af695f6e7a4fa7f", size = 4950807 },
2094
- { url = "https://files.pythonhosted.org/packages/da/87/f6cb9442e4bada8aab5ae7e1046264f62fdbeaa6e3f6211b93f4c0dd97f1/lxml-6.0.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:65ea18d710fd14e0186c2f973dc60bb52039a275f82d3c44a0e42b43440ea534", size = 5109179 },
2095
- { url = "https://files.pythonhosted.org/packages/c8/20/a7760713e65888db79bbae4f6146a6ae5c04e4a204a3c48896c408cd6ed2/lxml-6.0.2-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c371aa98126a0d4c739ca93ceffa0fd7a5d732e3ac66a46e74339acd4d334564", size = 5023044 },
2096
- { url = "https://files.pythonhosted.org/packages/a2/b0/7e64e0460fcb36471899f75831509098f3fd7cd02a3833ac517433cb4f8f/lxml-6.0.2-cp312-cp312-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:700efd30c0fa1a3581d80a748157397559396090a51d306ea59a70020223d16f", size = 5359685 },
2097
- { url = "https://files.pythonhosted.org/packages/b9/e1/e5df362e9ca4e2f48ed6411bd4b3a0ae737cc842e96877f5bf9428055ab4/lxml-6.0.2-cp312-cp312-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c33e66d44fe60e72397b487ee92e01da0d09ba2d66df8eae42d77b6d06e5eba0", size = 5654127 },
2098
- { url = "https://files.pythonhosted.org/packages/c6/d1/232b3309a02d60f11e71857778bfcd4acbdb86c07db8260caf7d008b08f8/lxml-6.0.2-cp312-cp312-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:90a345bbeaf9d0587a3aaffb7006aa39ccb6ff0e96a57286c0cb2fd1520ea192", size = 5253958 },
2099
- { url = "https://files.pythonhosted.org/packages/35/35/d955a070994725c4f7d80583a96cab9c107c57a125b20bb5f708fe941011/lxml-6.0.2-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:064fdadaf7a21af3ed1dcaa106b854077fbeada827c18f72aec9346847cd65d0", size = 4711541 },
2100
- { url = "https://files.pythonhosted.org/packages/1e/be/667d17363b38a78c4bd63cfd4b4632029fd68d2c2dc81f25ce9eb5224dd5/lxml-6.0.2-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fbc74f42c3525ac4ffa4b89cbdd00057b6196bcefe8bce794abd42d33a018092", size = 5267426 },
2101
- { url = "https://files.pythonhosted.org/packages/ea/47/62c70aa4a1c26569bc958c9ca86af2bb4e1f614e8c04fb2989833874f7ae/lxml-6.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6ddff43f702905a4e32bc24f3f2e2edfe0f8fde3277d481bffb709a4cced7a1f", size = 5064917 },
2102
- { url = "https://files.pythonhosted.org/packages/bd/55/6ceddaca353ebd0f1908ef712c597f8570cc9c58130dbb89903198e441fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6da5185951d72e6f5352166e3da7b0dc27aa70bd1090b0eb3f7f7212b53f1bb8", size = 4788795 },
2103
- { url = "https://files.pythonhosted.org/packages/cf/e8/fd63e15da5e3fd4c2146f8bbb3c14e94ab850589beab88e547b2dbce22e1/lxml-6.0.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:57a86e1ebb4020a38d295c04fc79603c7899e0df71588043eb218722dabc087f", size = 5676759 },
2104
- { url = "https://files.pythonhosted.org/packages/76/47/b3ec58dc5c374697f5ba37412cd2728f427d056315d124dd4b61da381877/lxml-6.0.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:2047d8234fe735ab77802ce5f2297e410ff40f5238aec569ad7c8e163d7b19a6", size = 5255666 },
2105
- { url = "https://files.pythonhosted.org/packages/19/93/03ba725df4c3d72afd9596eef4a37a837ce8e4806010569bedfcd2cb68fd/lxml-6.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6f91fd2b2ea15a6800c8e24418c0775a1694eefc011392da73bc6cef2623b322", size = 5277989 },
2106
- { url = "https://files.pythonhosted.org/packages/c6/80/c06de80bfce881d0ad738576f243911fccf992687ae09fd80b734712b39c/lxml-6.0.2-cp312-cp312-win32.whl", hash = "sha256:3ae2ce7d6fedfb3414a2b6c5e20b249c4c607f72cb8d2bb7cc9c6ec7c6f4e849", size = 3611456 },
2107
- { url = "https://files.pythonhosted.org/packages/f7/d7/0cdfb6c3e30893463fb3d1e52bc5f5f99684a03c29a0b6b605cfae879cd5/lxml-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:72c87e5ee4e58a8354fb9c7c84cbf95a1c8236c127a5d1b7683f04bed8361e1f", size = 4011793 },
2108
- { url = "https://files.pythonhosted.org/packages/ea/7b/93c73c67db235931527301ed3785f849c78991e2e34f3fd9a6663ffda4c5/lxml-6.0.2-cp312-cp312-win_arm64.whl", hash = "sha256:61cb10eeb95570153e0c0e554f58df92ecf5109f75eacad4a95baa709e26c3d6", size = 3672836 },
2109
- { url = "https://files.pythonhosted.org/packages/53/fd/4e8f0540608977aea078bf6d79f128e0e2c2bba8af1acf775c30baa70460/lxml-6.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:9b33d21594afab46f37ae58dfadd06636f154923c4e8a4d754b0127554eb2e77", size = 8648494 },
2110
- { url = "https://files.pythonhosted.org/packages/5d/f4/2a94a3d3dfd6c6b433501b8d470a1960a20ecce93245cf2db1706adf6c19/lxml-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6c8963287d7a4c5c9a432ff487c52e9c5618667179c18a204bdedb27310f022f", size = 4661146 },
2111
- { url = "https://files.pythonhosted.org/packages/25/2e/4efa677fa6b322013035d38016f6ae859d06cac67437ca7dc708a6af7028/lxml-6.0.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1941354d92699fb5ffe6ed7b32f9649e43c2feb4b97205f75866f7d21aa91452", size = 4946932 },
2112
- { url = "https://files.pythonhosted.org/packages/ce/0f/526e78a6d38d109fdbaa5049c62e1d32fdd70c75fb61c4eadf3045d3d124/lxml-6.0.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bb2f6ca0ae2d983ded09357b84af659c954722bbf04dea98030064996d156048", size = 5100060 },
2113
- { url = "https://files.pythonhosted.org/packages/81/76/99de58d81fa702cc0ea7edae4f4640416c2062813a00ff24bd70ac1d9c9b/lxml-6.0.2-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb2a12d704f180a902d7fa778c6d71f36ceb7b0d317f34cdc76a5d05aa1dd1df", size = 5019000 },
2114
- { url = "https://files.pythonhosted.org/packages/b5/35/9e57d25482bc9a9882cb0037fdb9cc18f4b79d85df94fa9d2a89562f1d25/lxml-6.0.2-cp313-cp313-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:6ec0e3f745021bfed19c456647f0298d60a24c9ff86d9d051f52b509663feeb1", size = 5348496 },
2115
- { url = "https://files.pythonhosted.org/packages/a6/8e/cb99bd0b83ccc3e8f0f528e9aa1f7a9965dfec08c617070c5db8d63a87ce/lxml-6.0.2-cp313-cp313-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:846ae9a12d54e368933b9759052d6206a9e8b250291109c48e350c1f1f49d916", size = 5643779 },
2116
- { url = "https://files.pythonhosted.org/packages/d0/34/9e591954939276bb679b73773836c6684c22e56d05980e31d52a9a8deb18/lxml-6.0.2-cp313-cp313-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ef9266d2aa545d7374938fb5c484531ef5a2ec7f2d573e62f8ce722c735685fd", size = 5244072 },
2117
- { url = "https://files.pythonhosted.org/packages/8d/27/b29ff065f9aaca443ee377aff699714fcbffb371b4fce5ac4ca759e436d5/lxml-6.0.2-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:4077b7c79f31755df33b795dc12119cb557a0106bfdab0d2c2d97bd3cf3dffa6", size = 4718675 },
2118
- { url = "https://files.pythonhosted.org/packages/2b/9f/f756f9c2cd27caa1a6ef8c32ae47aadea697f5c2c6d07b0dae133c244fbe/lxml-6.0.2-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a7c5d5e5f1081955358533be077166ee97ed2571d6a66bdba6ec2f609a715d1a", size = 5255171 },
2119
- { url = "https://files.pythonhosted.org/packages/61/46/bb85ea42d2cb1bd8395484fd72f38e3389611aa496ac7772da9205bbda0e/lxml-6.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:8f8d0cbd0674ee89863a523e6994ac25fd5be9c8486acfc3e5ccea679bad2679", size = 5057175 },
2120
- { url = "https://files.pythonhosted.org/packages/95/0c/443fc476dcc8e41577f0af70458c50fe299a97bb6b7505bb1ae09aa7f9ac/lxml-6.0.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:2cbcbf6d6e924c28f04a43f3b6f6e272312a090f269eff68a2982e13e5d57659", size = 4785688 },
2121
- { url = "https://files.pythonhosted.org/packages/48/78/6ef0b359d45bb9697bc5a626e1992fa5d27aa3f8004b137b2314793b50a0/lxml-6.0.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:dfb874cfa53340009af6bdd7e54ebc0d21012a60a4e65d927c2e477112e63484", size = 5660655 },
2122
- { url = "https://files.pythonhosted.org/packages/ff/ea/e1d33808f386bc1339d08c0dcada6e4712d4ed8e93fcad5f057070b7988a/lxml-6.0.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:fb8dae0b6b8b7f9e96c26fdd8121522ce5de9bb5538010870bd538683d30e9a2", size = 5247695 },
2123
- { url = "https://files.pythonhosted.org/packages/4f/47/eba75dfd8183673725255247a603b4ad606f4ae657b60c6c145b381697da/lxml-6.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:358d9adae670b63e95bc59747c72f4dc97c9ec58881d4627fe0120da0f90d314", size = 5269841 },
2124
- { url = "https://files.pythonhosted.org/packages/76/04/5c5e2b8577bc936e219becb2e98cdb1aca14a4921a12995b9d0c523502ae/lxml-6.0.2-cp313-cp313-win32.whl", hash = "sha256:e8cd2415f372e7e5a789d743d133ae474290a90b9023197fd78f32e2dc6873e2", size = 3610700 },
2125
- { url = "https://files.pythonhosted.org/packages/fe/0a/4643ccc6bb8b143e9f9640aa54e38255f9d3b45feb2cbe7ae2ca47e8782e/lxml-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:b30d46379644fbfc3ab81f8f82ae4de55179414651f110a1514f0b1f8f6cb2d7", size = 4010347 },
2126
- { url = "https://files.pythonhosted.org/packages/31/ef/dcf1d29c3f530577f61e5fe2f1bd72929acf779953668a8a47a479ae6f26/lxml-6.0.2-cp313-cp313-win_arm64.whl", hash = "sha256:13dcecc9946dca97b11b7c40d29fba63b55ab4170d3c0cf8c0c164343b9bfdcf", size = 3671248 },
2127
- { url = "https://files.pythonhosted.org/packages/03/15/d4a377b385ab693ce97b472fe0c77c2b16ec79590e688b3ccc71fba19884/lxml-6.0.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:b0c732aa23de8f8aec23f4b580d1e52905ef468afb4abeafd3fec77042abb6fe", size = 8659801 },
2128
- { url = "https://files.pythonhosted.org/packages/c8/e8/c128e37589463668794d503afaeb003987373c5f94d667124ffd8078bbd9/lxml-6.0.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:4468e3b83e10e0317a89a33d28f7aeba1caa4d1a6fd457d115dd4ffe90c5931d", size = 4659403 },
2129
- { url = "https://files.pythonhosted.org/packages/00/ce/74903904339decdf7da7847bb5741fc98a5451b42fc419a86c0c13d26fe2/lxml-6.0.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:abd44571493973bad4598a3be7e1d807ed45aa2adaf7ab92ab7c62609569b17d", size = 4966974 },
2130
- { url = "https://files.pythonhosted.org/packages/1f/d3/131dec79ce61c5567fecf82515bd9bc36395df42501b50f7f7f3bd065df0/lxml-6.0.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:370cd78d5855cfbffd57c422851f7d3864e6ae72d0da615fca4dad8c45d375a5", size = 5102953 },
2131
- { url = "https://files.pythonhosted.org/packages/3a/ea/a43ba9bb750d4ffdd885f2cd333572f5bb900cd2408b67fdda07e85978a0/lxml-6.0.2-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:901e3b4219fa04ef766885fb40fa516a71662a4c61b80c94d25336b4934b71c0", size = 5055054 },
2132
- { url = "https://files.pythonhosted.org/packages/60/23/6885b451636ae286c34628f70a7ed1fcc759f8d9ad382d132e1c8d3d9bfd/lxml-6.0.2-cp314-cp314-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:a4bf42d2e4cf52c28cc1812d62426b9503cdb0c87a6de81442626aa7d69707ba", size = 5352421 },
2133
- { url = "https://files.pythonhosted.org/packages/48/5b/fc2ddfc94ddbe3eebb8e9af6e3fd65e2feba4967f6a4e9683875c394c2d8/lxml-6.0.2-cp314-cp314-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b2c7fdaa4d7c3d886a42534adec7cfac73860b89b4e5298752f60aa5984641a0", size = 5673684 },
2134
- { url = "https://files.pythonhosted.org/packages/29/9c/47293c58cc91769130fbf85531280e8cc7868f7fbb6d92f4670071b9cb3e/lxml-6.0.2-cp314-cp314-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:98a5e1660dc7de2200b00d53fa00bcd3c35a3608c305d45a7bbcaf29fa16e83d", size = 5252463 },
2135
- { url = "https://files.pythonhosted.org/packages/9b/da/ba6eceb830c762b48e711ded880d7e3e89fc6c7323e587c36540b6b23c6b/lxml-6.0.2-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:dc051506c30b609238d79eda75ee9cab3e520570ec8219844a72a46020901e37", size = 4698437 },
2136
- { url = "https://files.pythonhosted.org/packages/a5/24/7be3f82cb7990b89118d944b619e53c656c97dc89c28cfb143fdb7cd6f4d/lxml-6.0.2-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8799481bbdd212470d17513a54d568f44416db01250f49449647b5ab5b5dccb9", size = 5269890 },
2137
- { url = "https://files.pythonhosted.org/packages/1b/bd/dcfb9ea1e16c665efd7538fc5d5c34071276ce9220e234217682e7d2c4a5/lxml-6.0.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9261bb77c2dab42f3ecd9103951aeca2c40277701eb7e912c545c1b16e0e4917", size = 5097185 },
2138
- { url = "https://files.pythonhosted.org/packages/21/04/a60b0ff9314736316f28316b694bccbbabe100f8483ad83852d77fc7468e/lxml-6.0.2-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:65ac4a01aba353cfa6d5725b95d7aed6356ddc0a3cd734de00124d285b04b64f", size = 4745895 },
2139
- { url = "https://files.pythonhosted.org/packages/d6/bd/7d54bd1846e5a310d9c715921c5faa71cf5c0853372adf78aee70c8d7aa2/lxml-6.0.2-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b22a07cbb82fea98f8a2fd814f3d1811ff9ed76d0fc6abc84eb21527596e7cc8", size = 5695246 },
2140
- { url = "https://files.pythonhosted.org/packages/fd/32/5643d6ab947bc371da21323acb2a6e603cedbe71cb4c99c8254289ab6f4e/lxml-6.0.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d759cdd7f3e055d6bc8d9bec3ad905227b2e4c785dc16c372eb5b5e83123f48a", size = 5260797 },
2141
- { url = "https://files.pythonhosted.org/packages/33/da/34c1ec4cff1eea7d0b4cd44af8411806ed943141804ac9c5d565302afb78/lxml-6.0.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:945da35a48d193d27c188037a05fec5492937f66fb1958c24fc761fb9d40d43c", size = 5277404 },
2142
- { url = "https://files.pythonhosted.org/packages/82/57/4eca3e31e54dc89e2c3507e1cd411074a17565fa5ffc437c4ae0a00d439e/lxml-6.0.2-cp314-cp314-win32.whl", hash = "sha256:be3aaa60da67e6153eb15715cc2e19091af5dc75faef8b8a585aea372507384b", size = 3670072 },
2143
- { url = "https://files.pythonhosted.org/packages/e3/e0/c96cf13eccd20c9421ba910304dae0f619724dcf1702864fd59dd386404d/lxml-6.0.2-cp314-cp314-win_amd64.whl", hash = "sha256:fa25afbadead523f7001caf0c2382afd272c315a033a7b06336da2637d92d6ed", size = 4080617 },
2144
- { url = "https://files.pythonhosted.org/packages/d5/5d/b3f03e22b3d38d6f188ef044900a9b29b2fe0aebb94625ce9fe244011d34/lxml-6.0.2-cp314-cp314-win_arm64.whl", hash = "sha256:063eccf89df5b24e361b123e257e437f9e9878f425ee9aae3144c77faf6da6d8", size = 3754930 },
2145
- { url = "https://files.pythonhosted.org/packages/5e/5c/42c2c4c03554580708fc738d13414801f340c04c3eff90d8d2d227145275/lxml-6.0.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:6162a86d86893d63084faaf4ff937b3daea233e3682fb4474db07395794fa80d", size = 8910380 },
2146
- { url = "https://files.pythonhosted.org/packages/bf/4f/12df843e3e10d18d468a7557058f8d3733e8b6e12401f30b1ef29360740f/lxml-6.0.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:414aaa94e974e23a3e92e7ca5b97d10c0cf37b6481f50911032c69eeb3991bba", size = 4775632 },
2147
- { url = "https://files.pythonhosted.org/packages/e4/0c/9dc31e6c2d0d418483cbcb469d1f5a582a1cd00a1f4081953d44051f3c50/lxml-6.0.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:48461bd21625458dd01e14e2c38dd0aea69addc3c4f960c30d9f59d7f93be601", size = 4975171 },
2148
- { url = "https://files.pythonhosted.org/packages/e7/2b/9b870c6ca24c841bdd887504808f0417aa9d8d564114689266f19ddf29c8/lxml-6.0.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:25fcc59afc57d527cfc78a58f40ab4c9b8fd096a9a3f964d2781ffb6eb33f4ed", size = 5110109 },
2149
- { url = "https://files.pythonhosted.org/packages/bf/0c/4f5f2a4dd319a178912751564471355d9019e220c20d7db3fb8307ed8582/lxml-6.0.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5179c60288204e6ddde3f774a93350177e08876eaf3ab78aa3a3649d43eb7d37", size = 5041061 },
2150
- { url = "https://files.pythonhosted.org/packages/12/64/554eed290365267671fe001a20d72d14f468ae4e6acef1e179b039436967/lxml-6.0.2-cp314-cp314t-manylinux_2_26_i686.manylinux_2_28_i686.whl", hash = "sha256:967aab75434de148ec80597b75062d8123cadf2943fb4281f385141e18b21338", size = 5306233 },
2151
- { url = "https://files.pythonhosted.org/packages/7a/31/1d748aa275e71802ad9722df32a7a35034246b42c0ecdd8235412c3396ef/lxml-6.0.2-cp314-cp314t-manylinux_2_26_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d100fcc8930d697c6561156c6810ab4a508fb264c8b6779e6e61e2ed5e7558f9", size = 5604739 },
2152
- { url = "https://files.pythonhosted.org/packages/8f/41/2c11916bcac09ed561adccacceaedd2bf0e0b25b297ea92aab99fd03d0fa/lxml-6.0.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ca59e7e13e5981175b8b3e4ab84d7da57993eeff53c07764dcebda0d0e64ecd", size = 5225119 },
2153
- { url = "https://files.pythonhosted.org/packages/99/05/4e5c2873d8f17aa018e6afde417c80cc5d0c33be4854cce3ef5670c49367/lxml-6.0.2-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:957448ac63a42e2e49531b9d6c0fa449a1970dbc32467aaad46f11545be9af1d", size = 4633665 },
2154
- { url = "https://files.pythonhosted.org/packages/0f/c9/dcc2da1bebd6275cdc723b515f93edf548b82f36a5458cca3578bc899332/lxml-6.0.2-cp314-cp314t-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b7fc49c37f1786284b12af63152fe1d0990722497e2d5817acfe7a877522f9a9", size = 5234997 },
2155
- { url = "https://files.pythonhosted.org/packages/9c/e2/5172e4e7468afca64a37b81dba152fc5d90e30f9c83c7c3213d6a02a5ce4/lxml-6.0.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e19e0643cc936a22e837f79d01a550678da8377d7d801a14487c10c34ee49c7e", size = 5090957 },
2156
- { url = "https://files.pythonhosted.org/packages/a5/b3/15461fd3e5cd4ddcb7938b87fc20b14ab113b92312fc97afe65cd7c85de1/lxml-6.0.2-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:1db01e5cf14345628e0cbe71067204db658e2fb8e51e7f33631f5f4735fefd8d", size = 4764372 },
2157
- { url = "https://files.pythonhosted.org/packages/05/33/f310b987c8bf9e61c4dd8e8035c416bd3230098f5e3cfa69fc4232de7059/lxml-6.0.2-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:875c6b5ab39ad5291588aed6925fac99d0097af0dd62f33c7b43736043d4a2ec", size = 5634653 },
2158
- { url = "https://files.pythonhosted.org/packages/70/ff/51c80e75e0bc9382158133bdcf4e339b5886c6ee2418b5199b3f1a61ed6d/lxml-6.0.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:cdcbed9ad19da81c480dfd6dd161886db6096083c9938ead313d94b30aadf272", size = 5233795 },
2159
- { url = "https://files.pythonhosted.org/packages/56/4d/4856e897df0d588789dd844dbed9d91782c4ef0b327f96ce53c807e13128/lxml-6.0.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:80dadc234ebc532e09be1975ff538d154a7fa61ea5031c03d25178855544728f", size = 5257023 },
2160
- { url = "https://files.pythonhosted.org/packages/0f/85/86766dfebfa87bea0ab78e9ff7a4b4b45225df4b4d3b8cc3c03c5cd68464/lxml-6.0.2-cp314-cp314t-win32.whl", hash = "sha256:da08e7bb297b04e893d91087df19638dc7a6bb858a954b0cc2b9f5053c922312", size = 3911420 },
2161
- { url = "https://files.pythonhosted.org/packages/fe/1a/b248b355834c8e32614650b8008c69ffeb0ceb149c793961dd8c0b991bb3/lxml-6.0.2-cp314-cp314t-win_amd64.whl", hash = "sha256:252a22982dca42f6155125ac76d3432e548a7625d56f5a273ee78a5057216eca", size = 4406837 },
2162
- { url = "https://files.pythonhosted.org/packages/92/aa/df863bcc39c5e0946263454aba394de8a9084dbaff8ad143846b0d844739/lxml-6.0.2-cp314-cp314t-win_arm64.whl", hash = "sha256:bb4c1847b303835d89d785a18801a883436cdfd5dc3d62947f9c49e24f0f5a2c", size = 3822205 },
2163
- { url = "https://files.pythonhosted.org/packages/0b/11/29d08bc103a62c0eba8016e7ed5aeebbf1e4312e83b0b1648dd203b0e87d/lxml-6.0.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1c06035eafa8404b5cf475bb37a9f6088b0aca288d4ccc9d69389750d5543700", size = 3949829 },
2164
- { url = "https://files.pythonhosted.org/packages/12/b3/52ab9a3b31e5ab8238da241baa19eec44d2ab426532441ee607165aebb52/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c7d13103045de1bdd6fe5d61802565f1a3537d70cd3abf596aa0af62761921ee", size = 4226277 },
2165
- { url = "https://files.pythonhosted.org/packages/a0/33/1eaf780c1baad88224611df13b1c2a9dfa460b526cacfe769103ff50d845/lxml-6.0.2-pp311-pypy311_pp73-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0a3c150a95fbe5ac91de323aa756219ef9cf7fde5a3f00e2281e30f33fa5fa4f", size = 4330433 },
2166
- { url = "https://files.pythonhosted.org/packages/7a/c1/27428a2ff348e994ab4f8777d3a0ad510b6b92d37718e5887d2da99952a2/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60fa43be34f78bebb27812ed90f1925ec99560b0fa1decdb7d12b84d857d31e9", size = 4272119 },
2167
- { url = "https://files.pythonhosted.org/packages/f0/d0/3020fa12bcec4ab62f97aab026d57c2f0cfd480a558758d9ca233bb6a79d/lxml-6.0.2-pp311-pypy311_pp73-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:21c73b476d3cfe836be731225ec3421fa2f048d84f6df6a8e70433dff1376d5a", size = 4417314 },
2168
- { url = "https://files.pythonhosted.org/packages/6c/77/d7f491cbc05303ac6801651aabeb262d43f319288c1ea96c66b1d2692ff3/lxml-6.0.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:27220da5be049e936c3aca06f174e8827ca6445a4353a1995584311487fc4e3e", size = 3518768 },
2169
- ]
2170
-
2171
  [[package]]
2172
  name = "markdown-it-py"
2173
  version = "4.0.0"
@@ -3382,22 +3264,6 @@ wheels = [
3382
  { url = "https://files.pythonhosted.org/packages/5d/c4/b2d28e9d2edf4f1713eb3c29307f1a63f3d67cf09bdda29715a36a68921a/pre_commit-4.5.0-py2.py3-none-any.whl", hash = "sha256:25e2ce09595174d9c97860a95609f9f852c0614ba602de3561e267547f2335e1", size = 226429 },
3383
  ]
3384
 
3385
- [[package]]
3386
- name = "primp"
3387
- version = "0.15.0"
3388
- source = { registry = "https://pypi.org/simple" }
3389
- sdist = { url = "https://files.pythonhosted.org/packages/56/0b/a87556189da4de1fc6360ca1aa05e8335509633f836cdd06dd17f0743300/primp-0.15.0.tar.gz", hash = "sha256:1af8ea4b15f57571ff7fc5e282a82c5eb69bc695e19b8ddeeda324397965b30a", size = 113022 }
3390
- wheels = [
3391
- { url = "https://files.pythonhosted.org/packages/f5/5a/146ac964b99ea7657ad67eb66f770be6577dfe9200cb28f9a95baffd6c3f/primp-0.15.0-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1b281f4ca41a0c6612d4c6e68b96e28acfe786d226a427cd944baa8d7acd644f", size = 3178914 },
3392
- { url = "https://files.pythonhosted.org/packages/bc/8a/cc2321e32db3ce64d6e32950d5bcbea01861db97bfb20b5394affc45b387/primp-0.15.0-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:489cbab55cd793ceb8f90bb7423c6ea64ebb53208ffcf7a044138e3c66d77299", size = 2955079 },
3393
- { url = "https://files.pythonhosted.org/packages/c3/7b/cbd5d999a07ff2a21465975d4eb477ae6f69765e8fe8c9087dab250180d8/primp-0.15.0-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c18b45c23f94016215f62d2334552224236217aaeb716871ce0e4dcfa08eb161", size = 3281018 },
3394
- { url = "https://files.pythonhosted.org/packages/1b/6e/a6221c612e61303aec2bcac3f0a02e8b67aee8c0db7bdc174aeb8010f975/primp-0.15.0-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e985a9cba2e3f96a323722e5440aa9eccaac3178e74b884778e926b5249df080", size = 3255229 },
3395
- { url = "https://files.pythonhosted.org/packages/3b/54/bfeef5aca613dc660a69d0760a26c6b8747d8fdb5a7f20cb2cee53c9862f/primp-0.15.0-cp38-abi3-manylinux_2_34_armv7l.whl", hash = "sha256:6b84a6ffa083e34668ff0037221d399c24d939b5629cd38223af860de9e17a83", size = 3014522 },
3396
- { url = "https://files.pythonhosted.org/packages/ac/96/84078e09f16a1dad208f2fe0f8a81be2cf36e024675b0f9eec0c2f6e2182/primp-0.15.0-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:592f6079646bdf5abbbfc3b0a28dac8de943f8907a250ce09398cda5eaebd260", size = 3418567 },
3397
- { url = "https://files.pythonhosted.org/packages/6c/80/8a7a9587d3eb85be3d0b64319f2f690c90eb7953e3f73a9ddd9e46c8dc42/primp-0.15.0-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5a728e5a05f37db6189eb413d22c78bd143fa59dd6a8a26dacd43332b3971fe8", size = 3606279 },
3398
- { url = "https://files.pythonhosted.org/packages/0c/dd/f0183ed0145e58cf9d286c1b2c14f63ccee987a4ff79ac85acc31b5d86bd/primp-0.15.0-cp38-abi3-win_amd64.whl", hash = "sha256:aeb6bd20b06dfc92cfe4436939c18de88a58c640752cf7f30d9e4ae893cdec32", size = 3149967 },
3399
- ]
3400
-
3401
  [[package]]
3402
  name = "prompt-toolkit"
3403
  version = "3.0.52"
 
981
  dependencies = [
982
  { name = "anthropic" },
983
  { name = "beautifulsoup4" },
 
984
  { name = "gradio" },
985
  { name = "httpx" },
986
  { name = "openai" },
 
1019
  { name = "anthropic", specifier = ">=0.18.0" },
1020
  { name = "beautifulsoup4", specifier = ">=4.12" },
1021
  { name = "chromadb", marker = "extra == 'embeddings'", specifier = ">=0.4.0" },
 
1022
  { name = "gradio", specifier = ">=5.0" },
1023
  { name = "httpx", specifier = ">=0.27" },
1024
  { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.10" },
 
1096
  { url = "https://files.pythonhosted.org/packages/11/a8/c6a4b901d17399c77cd81fb001ce8961e9f5e04d3daf27e8925cb012e163/docutils-0.22.3-py3-none-any.whl", hash = "sha256:bd772e4aca73aff037958d44f2be5229ded4c09927fcf8690c577b66234d6ceb", size = 633032 },
1097
  ]
1098
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1099
  [[package]]
1100
  name = "durationpy"
1101
  version = "0.10"
 
2050
  { url = "https://files.pythonhosted.org/packages/f3/2b/851e78a60b85e8e8e8c6ebb9928f8e883df0340a93e34960ed9f0a41fa82/logfire_api-4.15.1-py3-none-any.whl", hash = "sha256:a88b5c4b6e4acbf6f35a3e992a63f271cf2797aefd21e1cfc93d52b21ade65f6", size = 95031 },
2051
  ]
2052
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2053
  [[package]]
2054
  name = "markdown-it-py"
2055
  version = "4.0.0"
 
3264
  { url = "https://files.pythonhosted.org/packages/5d/c4/b2d28e9d2edf4f1713eb3c29307f1a63f3d67cf09bdda29715a36a68921a/pre_commit-4.5.0-py2.py3-none-any.whl", hash = "sha256:25e2ce09595174d9c97860a95609f9f852c0614ba602de3561e267547f2335e1", size = 226429 },
3265
  ]
3266
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3267
  [[package]]
3268
  name = "prompt-toolkit"
3269
  version = "3.0.52"