JawadBenali commited on
Commit
174f3a1
·
1 Parent(s): 93a1ab1

new commit

Browse files
Files changed (1) hide show
  1. app.py +39 -1
app.py CHANGED
@@ -12,7 +12,6 @@ from pathlib import Path
12
  from PIL import Image
13
  from plantuml import PlantUML
14
 
15
- from server import LLMClientSingleton
16
  from services.pattern_detector import PatternDetectionService, PatternRecommendation
17
  from services.sequence_service import CallGraphVisitor, ProjectSequenceAnalyzer, SequenceDiagramService
18
  from services.usecase_service import UseCaseDiagramService
@@ -41,6 +40,45 @@ logger = logging.getLogger(__name__)
41
  PLANTUML_SERVER_URL = 'http://www.plantuml.com/plantuml/img/'
42
  plantuml_client = PlantUML(url=PLANTUML_SERVER_URL)
43
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
44
 
45
  # Global singleton instance
46
  _llm_singleton = LLMClientSingleton()
 
12
  from PIL import Image
13
  from plantuml import PlantUML
14
 
 
15
  from services.pattern_detector import PatternDetectionService, PatternRecommendation
16
  from services.sequence_service import CallGraphVisitor, ProjectSequenceAnalyzer, SequenceDiagramService
17
  from services.usecase_service import UseCaseDiagramService
 
40
  PLANTUML_SERVER_URL = 'http://www.plantuml.com/plantuml/img/'
41
  plantuml_client = PlantUML(url=PLANTUML_SERVER_URL)
42
 
43
+ # --- SINGLETON LLM CLIENT ---
44
+ class LLMClientSingleton:
45
+ """Singleton pattern for LLM client to avoid repeated connections"""
46
+ _instance = None
47
+ _llm_client = None
48
+ _current_provider = None
49
+
50
+ def __new__(cls):
51
+ if cls._instance is None:
52
+ cls._instance = super().__new__(cls)
53
+ return cls._instance
54
+
55
+ def get_client(self, preferred_provider: str = "openai", temperature: float = 0.0):
56
+ """Get or create LLM client with provider fallback"""
57
+ if self._llm_client is not None and self._current_provider == preferred_provider:
58
+ return self._llm_client
59
+
60
+ # Define provider strategies with fallbacks
61
+ strategies = {
62
+ "openai": [create_openai_llm, create_sambanova_llm, create_nebius_llm],
63
+ "sambanova": [create_sambanova_llm, create_openai_llm, create_nebius_llm],
64
+ "nebius": [create_nebius_llm, create_openai_llm, create_sambanova_llm],
65
+ "gemini": [create_gemini_llm, create_sambanova_llm, create_nebius_llm],
66
+ }
67
+
68
+ factories = strategies.get(preferred_provider, strategies["nebius"])
69
+ names = ["openai", "sambanova", "nebius", "gemini"]
70
+
71
+ for factory, name in zip(factories, names):
72
+ try:
73
+ self._llm_client = factory(temperature=temperature)
74
+ self._current_provider = name
75
+ logger.info(f"✅ Connected to {name}")
76
+ return self._llm_client
77
+ except Exception as e:
78
+ logger.warning(f"⚠️ {name} failed: {str(e)[:100]}")
79
+
80
+ logger.error("❌ All LLM providers failed")
81
+ return None
82
 
83
  # Global singleton instance
84
  _llm_singleton = LLMClientSingleton()