destinyebuka commited on
Commit
fc4db52
·
1 Parent(s): 821f84a
Files changed (4) hide show
  1. app/ai/graph.py +0 -101
  2. app/ai/routes/chat.py +44 -130
  3. app/ai/routes/image_upload_routes.py +164 -0
  4. main.py +54 -62
app/ai/graph.py DELETED
@@ -1,101 +0,0 @@
1
- # app/ai/graph.py - COMPLETE LANGGRAPH WORKFLOW
2
- from langgraph.graph import StateGraph, START, END
3
- from typing import Dict
4
- from structlog import get_logger
5
-
6
- from app.ai.state import ChatState
7
- from app.ai.nodes.intent_node import intent_node
8
- from app.ai.nodes.draft_node import draft_node
9
- from app.ai.nodes.search_node import search_node
10
- from app.ai.nodes.publish_node import publish_node
11
-
12
- logger = get_logger(__name__)
13
-
14
- # ============================================
15
- # BUILD WORKFLOW
16
- # ============================================
17
- workflow = StateGraph(ChatState)
18
-
19
- # ===== ADD NODES =====
20
- workflow.add_node("intent", intent_node)
21
- workflow.add_node("draft", draft_node)
22
- workflow.add_node("search", search_node)
23
- workflow.add_node("publish", publish_node)
24
-
25
-
26
- # ===== ROUTING LOGIC =====
27
- def route_from_intent(state: Dict) -> str:
28
- """Route based on intent"""
29
- intent = state.get("intent")
30
-
31
- logger.info(f"🔀 Routing from intent", intent=intent)
32
-
33
- if intent == "list":
34
- return "draft"
35
- elif intent == "search":
36
- return "search"
37
- elif intent == "my_listings":
38
- return "search" # Reuse search node
39
- else:
40
- return END
41
-
42
-
43
- def route_from_draft(state: Dict) -> str:
44
- """Route from draft"""
45
- status = state.get("status")
46
-
47
- logger.info(f"🔀 Routing from draft", status=status)
48
-
49
- if status == "publishing":
50
- return "publish"
51
- elif status == "discarded":
52
- return END
53
- elif status == "preview_shown":
54
- return END
55
- else:
56
- return END
57
-
58
-
59
- def route_after_publish(state: Dict) -> str:
60
- """Route after publish"""
61
- logger.info(f"🔀 Routing after publish")
62
- return END
63
-
64
-
65
- # ===== ADD EDGES =====
66
-
67
- # Start → Intent (always start here)
68
- workflow.add_edge(START, "intent")
69
-
70
- # Intent → Draft/Search/End (conditional)
71
- workflow.add_conditional_edges(
72
- "intent",
73
- route_from_intent,
74
- {
75
- "draft": "draft",
76
- "search": "search",
77
- END: END,
78
- }
79
- )
80
-
81
- # Draft → Publish/End (conditional)
82
- workflow.add_conditional_edges(
83
- "draft",
84
- route_from_draft,
85
- {
86
- "publish": "publish",
87
- END: END,
88
- }
89
- )
90
-
91
- # Publish → End
92
- workflow.add_edge("publish", END)
93
-
94
- # Search → End
95
- workflow.add_edge("search", END)
96
-
97
-
98
- # ===== COMPILE =====
99
- agent = workflow.compile()
100
-
101
- logger.info("✅ LangGraph workflow compiled successfully")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app/ai/routes/chat.py CHANGED
@@ -1,29 +1,17 @@
1
- # app/ai/routes/chat.py - FIXED: Better context handling & session management
2
- """
3
- Chat Endpoint - Main integration point.
4
-
5
- Flow:
6
- 1. Authenticate user
7
- 2. Validate/clear stale session if needed
8
- 3. Handle image upload (if present)
9
- 4. Process message with LLM tool calling (intent detection + routing)
10
- 5. Save to memory
11
- 6. Return response to frontend
12
- """
13
 
14
  from fastapi import APIRouter, Depends, HTTPException
15
  from fastapi.security import HTTPBearer
16
  from pydantic import BaseModel
17
- from typing import Optional, List, Dict, Any
18
  from structlog import get_logger
19
  from datetime import datetime, timedelta
20
 
21
  from app.guards.jwt_guard import decode_access_token
22
  from app.ai.memory.redis_context_memory import get_current_memory
23
  from app.ai.tools.intent_detector_tool import process_user_message
24
- from app.ai.tools.listing_tool import process_listing
25
- from app.ai.tools.casual_chat_tool import process_casual_chat
26
- from app.ai.services.image_service import upload_to_cloudflare
27
  from app.ai.memory.redis_memory import is_rate_limited
28
 
29
  logger = get_logger(__name__)
@@ -32,14 +20,12 @@ router = APIRouter()
32
  security = HTTPBearer()
33
 
34
 
35
- # ========== REQUEST/RESPONSE MODELS ==========
36
  class AskBody(BaseModel):
37
  message: str
38
  session_id: Optional[str] = None
39
  thread_id: Optional[str] = None
40
- image_file: Optional[bytes] = None
41
- image_mime: Optional[str] = None
42
- start_new_session: Optional[bool] = False # NEW: Allow client to start fresh
43
 
44
 
45
  class ChatResponse(BaseModel):
@@ -52,13 +38,9 @@ class ChatResponse(BaseModel):
52
  error: Optional[str] = None
53
 
54
 
55
- # ========== HELPER: Clear stale context ==========
56
  async def should_reset_context(context: Dict) -> bool:
57
- """
58
- Determine if context should be reset based on:
59
- - User has been idle for 30+ minutes
60
- - Session is in incomplete state for too long
61
- """
62
 
63
  if not context or not context.get("last_activity"):
64
  return False
@@ -69,7 +51,7 @@ async def should_reset_context(context: Dict) -> bool:
69
 
70
  # Reset if idle for more than 30 minutes
71
  if idle_time > timedelta(minutes=30):
72
- logger.info("⏱️ Session idle for 30+ minutes, resetting context")
73
  return True
74
 
75
  return False
@@ -90,13 +72,9 @@ def reset_context() -> Dict:
90
  }
91
 
92
 
93
- # ========== HELPER: Detect if user wants to start fresh ==========
94
  def user_wants_fresh_start(message: str) -> bool:
95
- """
96
- Detect if user is trying to start a new conversation:
97
- - "start fresh", "new listing", "clear", "reset"
98
- - "start over", "begin again", "clear session"
99
- """
100
 
101
  message_lower = message.lower().strip()
102
  fresh_start_keywords = [
@@ -113,13 +91,13 @@ def user_wants_fresh_start(message: str) -> bool:
113
  ]
114
 
115
  if any(keyword in message_lower for keyword in fresh_start_keywords):
116
- logger.info("🔄 User requested fresh start")
117
  return True
118
 
119
  return False
120
 
121
 
122
- # ========== MAIN CHAT ENDPOINT ==========
123
  @router.post("/ask", response_model=ChatResponse)
124
  async def ask_ai(
125
  body: AskBody,
@@ -128,47 +106,39 @@ async def ask_ai(
128
  """
129
  Main chat endpoint - Intent detection & tool routing.
130
 
131
- Flow:
132
- 1. Authenticate
133
- 2. Rate limit check
134
- 3. Get session memory
135
- 4. Check if context should be reset (idle/stale)
136
- 5. Handle image upload (if present)
137
- 6. Process message with LLM (intent detection + tool calling)
138
- 7. Save to memory
139
- 8. Return response
140
  """
141
 
142
  try:
143
- # ========== 1. AUTHENTICATE ==========
144
  payload = decode_access_token(token.credentials)
145
  if not payload:
146
- logger.warning("🔓 Invalid token")
147
  raise HTTPException(status_code=401, detail="Invalid token")
148
 
149
  user_id = payload["user_id"]
150
  user_role = payload.get("role", "renter")
151
 
152
- # ========== 2. RATE LIMIT CHECK ==========
153
  if await is_rate_limited(user_id):
154
- logger.warning("⚠️ Rate limit exceeded", user_id=user_id)
155
  raise HTTPException(status_code=429, detail="Rate limit exceeded")
156
 
157
- # ========== 3. GET SESSION MEMORY ==========
158
  session_id = body.session_id or "default"
159
  memory = await get_current_memory(user_id, session_id)
160
  context = await memory.get_context()
161
 
162
  logger.info(
163
- "💬 Chat message received",
164
  user_id=user_id,
165
  session_id=session_id,
166
  status=context.get("status"),
167
- has_image=body.image_file is not None,
168
  message_len=len(body.message) if body.message else 0
169
  )
170
 
171
- # ========== 4. CHECK IF CONTEXT SHOULD BE RESET ==========
172
  should_reset = (
173
  body.start_new_session or
174
  user_wants_fresh_start(body.message) or
@@ -176,15 +146,13 @@ async def ask_ai(
176
  )
177
 
178
  if should_reset:
179
- logger.info("🔄 Resetting context", user_id=user_id, session_id=session_id)
180
  context = reset_context()
181
  context["user_role"] = user_role
182
  await memory.update_context(context)
183
-
184
- # Clear message history too
185
  await memory.clear()
186
 
187
- response_msg = "Fresh start! What can I help you with? I can help you list a property, search for one, or just chat!"
188
 
189
  return ChatResponse(
190
  success=True,
@@ -193,70 +161,19 @@ async def ask_ai(
193
  state=context
194
  )
195
 
196
- # ========== 5. HANDLE IMAGE UPLOAD (if present) ==========
197
- if body.image_file:
198
- logger.info("📸 Image upload request")
199
-
200
- success, image_url, error = await upload_to_cloudflare(
201
- image_bytes=body.image_file,
202
- mime_type=body.image_mime or "image/jpeg",
203
- )
204
-
205
- if not success:
206
- logger.error("❌ Image upload failed", error=error)
207
- return ChatResponse(
208
- success=False,
209
- text=f"❌ Image upload failed: {error}",
210
- action="error",
211
- error=error
212
- )
213
-
214
- logger.info("✅ Image URL received from Cloudflare", url=image_url[:50] + "...")
215
-
216
- # Update state with image URL
217
- if "state" not in context:
218
- context["state"] = {}
219
-
220
- if "images" not in context["state"]:
221
- context["state"]["images"] = []
222
-
223
- context["state"]["images"].append(image_url)
224
- context["last_activity"] = datetime.utcnow().isoformat()
225
-
226
- # Add to message history
227
- user_message = f"[Image uploaded: {image_url}]"
228
- await memory.add_message("user", user_message)
229
- await memory.update_context(context)
230
-
231
- logger.info(
232
- "💾 Image URL saved to state",
233
- total_images=len(context["state"]["images"]),
234
- user_id=user_id
235
- )
236
-
237
- # Return success
238
- total = len(context["state"]["images"])
239
- return ChatResponse(
240
- success=True,
241
- text=f"✅ Image uploaded successfully!\n\nYou now have {total} image(s).\n\nYou can:\n• Add more images\n• Generate preview\n• Continue with next step",
242
- action="image_uploaded",
243
- state=context
244
- )
245
-
246
- # ========== 6. VALIDATE MESSAGE ==========
247
  if not body.message or body.message.strip() == "":
248
  return ChatResponse(
249
  success=False,
250
- text="Please provide a message or upload an image.",
251
  action="error",
252
  error="Empty message"
253
  )
254
 
255
- # ========== 7. PROCESS MESSAGE WITH LLM (INTENT DETECTION + TOOL CALLING) ==========
256
- logger.info("🚀 Processing user message with LLM", user_id=user_id)
257
 
258
  try:
259
- # Process with LLM - includes intent detection + tool calling
260
  reply, tool_result = await process_user_message(
261
  user_message=body.message,
262
  user_id=user_id,
@@ -265,25 +182,23 @@ async def ask_ai(
265
  )
266
 
267
  logger.info(
268
- "LLM processing complete",
269
  tool_called=tool_result.get("tool"),
270
  success=tool_result.get("success")
271
  )
272
 
273
- # Add user message to history
274
  await memory.add_message("user", body.message)
275
-
276
- # Add assistant reply to history
277
  await memory.add_message("assistant", reply)
278
 
279
- # Update context with latest state and activity timestamp
280
  context["last_activity"] = datetime.utcnow().isoformat()
281
  if "tool" in tool_result:
282
  context["last_tool"] = tool_result["tool"]
283
 
284
  await memory.update_context(context)
285
 
286
- # ========== 8. RETURN RESPONSE ==========
287
  return ChatResponse(
288
  success=tool_result.get("success", True),
289
  text=reply,
@@ -293,7 +208,7 @@ async def ask_ai(
293
  )
294
 
295
  except Exception as e:
296
- logger.error("LLM processing error", exc_info=e)
297
 
298
  fallback_reply = "Sorry, I had an error processing your request. Please try again."
299
  await memory.add_message("user", body.message)
@@ -313,14 +228,14 @@ async def ask_ai(
313
  except HTTPException:
314
  raise
315
  except Exception as e:
316
- logger.error("Chat endpoint error", exc_info=e)
317
  raise HTTPException(
318
  status_code=500,
319
  detail=f"Error processing message: {str(e)}"
320
  )
321
 
322
 
323
- # ========== HEALTH CHECK ==========
324
  @router.get("/health")
325
  async def health_check():
326
  """Health check for chat service"""
@@ -330,7 +245,7 @@ async def health_check():
330
  }
331
 
332
 
333
- # ========== GET CHAT HISTORY ==========
334
  @router.get("/history/{session_id}")
335
  async def get_chat_history(
336
  session_id: str,
@@ -349,7 +264,7 @@ async def get_chat_history(
349
  messages = await memory.get_messages()
350
  summary = await memory.get_summary()
351
 
352
- logger.info("📖 Retrieved chat history", user_id=user_id, session_id=session_id)
353
 
354
  return {
355
  "success": True,
@@ -360,11 +275,11 @@ async def get_chat_history(
360
  except HTTPException:
361
  raise
362
  except Exception as e:
363
- logger.error("Failed to get history", exc_info=e)
364
  raise HTTPException(status_code=500, detail="Failed to retrieve history")
365
 
366
 
367
- # ========== CLOSE SESSION ==========
368
  @router.post("/close-session/{session_id}")
369
  async def close_session(
370
  session_id: str,
@@ -383,7 +298,7 @@ async def close_session(
383
  manager = get_memory_manager()
384
  await manager.close_session(user_id, session_id)
385
 
386
- logger.info("Session closed", user_id=user_id, session_id=session_id)
387
 
388
  return {
389
  "success": True,
@@ -393,11 +308,11 @@ async def close_session(
393
  except HTTPException:
394
  raise
395
  except Exception as e:
396
- logger.error("Failed to close session", exc_info=e)
397
  raise HTTPException(status_code=500, detail="Failed to close session")
398
 
399
 
400
- # ========== NEW: START FRESH ENDPOINT ==========
401
  @router.post("/reset-session/{session_id}")
402
  async def reset_session_endpoint(
403
  session_id: str,
@@ -405,7 +320,6 @@ async def reset_session_endpoint(
405
  ):
406
  """
407
  Explicitly reset a session to fresh state.
408
- Useful for when user wants to start over without closing.
409
  """
410
 
411
  try:
@@ -424,7 +338,7 @@ async def reset_session_endpoint(
424
  fresh_context["user_role"] = user_role
425
  await memory.update_context(fresh_context)
426
 
427
- logger.info("Session reset to fresh state", user_id=user_id, session_id=session_id)
428
 
429
  return {
430
  "success": True,
@@ -435,5 +349,5 @@ async def reset_session_endpoint(
435
  except HTTPException:
436
  raise
437
  except Exception as e:
438
- logger.error("Failed to reset session", exc_info=e)
439
  raise HTTPException(status_code=500, detail="Failed to reset session")
 
1
+ # app/ai/routes/chat.py
2
+ # Chat Endpoint - Intent detection & tool routing
3
+ # Image uploads use SEPARATE endpoint /ai/upload-image
 
 
 
 
 
 
 
 
 
4
 
5
  from fastapi import APIRouter, Depends, HTTPException
6
  from fastapi.security import HTTPBearer
7
  from pydantic import BaseModel
8
+ from typing import Optional, Dict, Any
9
  from structlog import get_logger
10
  from datetime import datetime, timedelta
11
 
12
  from app.guards.jwt_guard import decode_access_token
13
  from app.ai.memory.redis_context_memory import get_current_memory
14
  from app.ai.tools.intent_detector_tool import process_user_message
 
 
 
15
  from app.ai.memory.redis_memory import is_rate_limited
16
 
17
  logger = get_logger(__name__)
 
20
  security = HTTPBearer()
21
 
22
 
23
+ # REQUEST/RESPONSE MODELS
24
  class AskBody(BaseModel):
25
  message: str
26
  session_id: Optional[str] = None
27
  thread_id: Optional[str] = None
28
+ start_new_session: Optional[bool] = False
 
 
29
 
30
 
31
  class ChatResponse(BaseModel):
 
38
  error: Optional[str] = None
39
 
40
 
41
+ # HELPER: Clear stale context
42
  async def should_reset_context(context: Dict) -> bool:
43
+ """Determine if context should be reset"""
 
 
 
 
44
 
45
  if not context or not context.get("last_activity"):
46
  return False
 
51
 
52
  # Reset if idle for more than 30 minutes
53
  if idle_time > timedelta(minutes=30):
54
+ logger.info("Session idle for 30+ minutes, resetting context")
55
  return True
56
 
57
  return False
 
72
  }
73
 
74
 
75
+ # HELPER: Detect if user wants to start fresh
76
  def user_wants_fresh_start(message: str) -> bool:
77
+ """Detect if user is trying to start a new conversation"""
 
 
 
 
78
 
79
  message_lower = message.lower().strip()
80
  fresh_start_keywords = [
 
91
  ]
92
 
93
  if any(keyword in message_lower for keyword in fresh_start_keywords):
94
+ logger.info("User requested fresh start")
95
  return True
96
 
97
  return False
98
 
99
 
100
+ # MAIN CHAT ENDPOINT
101
  @router.post("/ask", response_model=ChatResponse)
102
  async def ask_ai(
103
  body: AskBody,
 
106
  """
107
  Main chat endpoint - Intent detection & tool routing.
108
 
109
+ IMPORTANT: Images are NOT handled here.
110
+ Use POST /ai/upload-image for image uploads.
 
 
 
 
 
 
 
111
  """
112
 
113
  try:
114
+ # AUTHENTICATE
115
  payload = decode_access_token(token.credentials)
116
  if not payload:
117
+ logger.warning("Invalid token")
118
  raise HTTPException(status_code=401, detail="Invalid token")
119
 
120
  user_id = payload["user_id"]
121
  user_role = payload.get("role", "renter")
122
 
123
+ # RATE LIMIT CHECK
124
  if await is_rate_limited(user_id):
125
+ logger.warning("Rate limit exceeded", user_id=user_id)
126
  raise HTTPException(status_code=429, detail="Rate limit exceeded")
127
 
128
+ # GET SESSION MEMORY
129
  session_id = body.session_id or "default"
130
  memory = await get_current_memory(user_id, session_id)
131
  context = await memory.get_context()
132
 
133
  logger.info(
134
+ "Chat message received",
135
  user_id=user_id,
136
  session_id=session_id,
137
  status=context.get("status"),
 
138
  message_len=len(body.message) if body.message else 0
139
  )
140
 
141
+ # CHECK IF CONTEXT SHOULD BE RESET
142
  should_reset = (
143
  body.start_new_session or
144
  user_wants_fresh_start(body.message) or
 
146
  )
147
 
148
  if should_reset:
149
+ logger.info("Resetting context", user_id=user_id, session_id=session_id)
150
  context = reset_context()
151
  context["user_role"] = user_role
152
  await memory.update_context(context)
 
 
153
  await memory.clear()
154
 
155
+ response_msg = "Fresh start! What can I help you with? I can help you list a property, search for one, or just chat!"
156
 
157
  return ChatResponse(
158
  success=True,
 
161
  state=context
162
  )
163
 
164
+ # VALIDATE MESSAGE
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
165
  if not body.message or body.message.strip() == "":
166
  return ChatResponse(
167
  success=False,
168
+ text="Please provide a message.",
169
  action="error",
170
  error="Empty message"
171
  )
172
 
173
+ # PROCESS MESSAGE WITH LLM
174
+ logger.info("Processing user message with LLM", user_id=user_id)
175
 
176
  try:
 
177
  reply, tool_result = await process_user_message(
178
  user_message=body.message,
179
  user_id=user_id,
 
182
  )
183
 
184
  logger.info(
185
+ "LLM processing complete",
186
  tool_called=tool_result.get("tool"),
187
  success=tool_result.get("success")
188
  )
189
 
190
+ # Add messages to history
191
  await memory.add_message("user", body.message)
 
 
192
  await memory.add_message("assistant", reply)
193
 
194
+ # Update context
195
  context["last_activity"] = datetime.utcnow().isoformat()
196
  if "tool" in tool_result:
197
  context["last_tool"] = tool_result["tool"]
198
 
199
  await memory.update_context(context)
200
 
201
+ # RETURN RESPONSE
202
  return ChatResponse(
203
  success=tool_result.get("success", True),
204
  text=reply,
 
208
  )
209
 
210
  except Exception as e:
211
+ logger.error("LLM processing error", exc_info=e)
212
 
213
  fallback_reply = "Sorry, I had an error processing your request. Please try again."
214
  await memory.add_message("user", body.message)
 
228
  except HTTPException:
229
  raise
230
  except Exception as e:
231
+ logger.error("Chat endpoint error", exc_info=e)
232
  raise HTTPException(
233
  status_code=500,
234
  detail=f"Error processing message: {str(e)}"
235
  )
236
 
237
 
238
+ # HEALTH CHECK
239
  @router.get("/health")
240
  async def health_check():
241
  """Health check for chat service"""
 
245
  }
246
 
247
 
248
+ # GET CHAT HISTORY
249
  @router.get("/history/{session_id}")
250
  async def get_chat_history(
251
  session_id: str,
 
264
  messages = await memory.get_messages()
265
  summary = await memory.get_summary()
266
 
267
+ logger.info("Retrieved chat history", user_id=user_id, session_id=session_id)
268
 
269
  return {
270
  "success": True,
 
275
  except HTTPException:
276
  raise
277
  except Exception as e:
278
+ logger.error("Failed to get history", exc_info=e)
279
  raise HTTPException(status_code=500, detail="Failed to retrieve history")
280
 
281
 
282
+ # CLOSE SESSION
283
  @router.post("/close-session/{session_id}")
284
  async def close_session(
285
  session_id: str,
 
298
  manager = get_memory_manager()
299
  await manager.close_session(user_id, session_id)
300
 
301
+ logger.info("Session closed", user_id=user_id, session_id=session_id)
302
 
303
  return {
304
  "success": True,
 
308
  except HTTPException:
309
  raise
310
  except Exception as e:
311
+ logger.error("Failed to close session", exc_info=e)
312
  raise HTTPException(status_code=500, detail="Failed to close session")
313
 
314
 
315
+ # RESET SESSION
316
  @router.post("/reset-session/{session_id}")
317
  async def reset_session_endpoint(
318
  session_id: str,
 
320
  ):
321
  """
322
  Explicitly reset a session to fresh state.
 
323
  """
324
 
325
  try:
 
338
  fresh_context["user_role"] = user_role
339
  await memory.update_context(fresh_context)
340
 
341
+ logger.info("Session reset to fresh state", user_id=user_id, session_id=session_id)
342
 
343
  return {
344
  "success": True,
 
349
  except HTTPException:
350
  raise
351
  except Exception as e:
352
+ logger.error("Failed to reset session", exc_info=e)
353
  raise HTTPException(status_code=500, detail="Failed to reset session")
app/ai/routes/image_upload_routes.py ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # app/ai/routes/image_upload_routes.py
2
+ # Dedicated image upload endpoint - handles image uploads to Cloudflare
3
+
4
+ import base64
5
+ from fastapi import APIRouter, Depends, HTTPException
6
+ from fastapi.security import HTTPBearer
7
+ from pydantic import BaseModel
8
+ from typing import Optional
9
+ from structlog import get_logger
10
+ from datetime import datetime
11
+
12
+ from app.guards.jwt_guard import decode_access_token
13
+ from app.ai.memory.redis_context_memory import get_current_memory
14
+ from app.ai.services.image_service import upload_to_cloudflare
15
+
16
+ logger = get_logger(__name__)
17
+
18
+ router = APIRouter()
19
+ security = HTTPBearer()
20
+
21
+
22
+ # REQUEST/RESPONSE MODELS
23
+ class UploadImageBody(BaseModel):
24
+ image_data: str # Base64 encoded image
25
+ mime_type: str # e.g., "image/jpeg"
26
+ session_id: Optional[str] = None
27
+
28
+
29
+ class ImageUploadResponse(BaseModel):
30
+ success: bool
31
+ image_url: Optional[str] = None
32
+ error: Optional[str] = None
33
+ message: Optional[str] = None
34
+
35
+
36
+ # UPLOAD IMAGE ENDPOINT
37
+ @router.post("/upload-image", response_model=ImageUploadResponse)
38
+ async def upload_image(
39
+ body: UploadImageBody,
40
+ token: str = Depends(security),
41
+ ) -> ImageUploadResponse:
42
+ """
43
+ Upload image to Cloudflare.
44
+
45
+ Flow:
46
+ 1. Authenticate user
47
+ 2. Decode base64 image
48
+ 3. Upload to Cloudflare Images API
49
+ 4. Return public URL
50
+ 5. Save URL to user's state (optional)
51
+ """
52
+
53
+ try:
54
+ # AUTHENTICATE
55
+ payload = decode_access_token(token.credentials)
56
+ if not payload:
57
+ logger.warning("Invalid token")
58
+ raise HTTPException(status_code=401, detail="Invalid token")
59
+
60
+ user_id = payload["user_id"]
61
+ session_id = body.session_id or "default"
62
+
63
+ logger.info(
64
+ "Image upload request received",
65
+ user_id=user_id,
66
+ session_id=session_id,
67
+ mime_type=body.mime_type,
68
+ data_len=len(body.image_data)
69
+ )
70
+
71
+ # DECODE BASE64
72
+ try:
73
+ image_bytes = base64.b64decode(body.image_data)
74
+ logger.info(
75
+ "Base64 decoded",
76
+ size_bytes=len(image_bytes),
77
+ size_mb=len(image_bytes) / (1024 * 1024)
78
+ )
79
+ except Exception as e:
80
+ logger.error("Failed to decode base64", exc_info=e)
81
+ return ImageUploadResponse(
82
+ success=False,
83
+ error="Invalid base64 data"
84
+ )
85
+
86
+ # VALIDATE IMAGE SIZE
87
+ max_size = 10 * 1024 * 1024 # 10 MB
88
+ if len(image_bytes) > max_size:
89
+ logger.warning(f"Image too large: {len(image_bytes)} bytes")
90
+ return ImageUploadResponse(
91
+ success=False,
92
+ error="Image too large (max 10 MB)"
93
+ )
94
+
95
+ # UPLOAD TO CLOUDFLARE
96
+ logger.info("Uploading to Cloudflare...")
97
+
98
+ success, image_url, error = await upload_to_cloudflare(
99
+ image_bytes=image_bytes,
100
+ mime_type=body.mime_type,
101
+ filename=f"property_{user_id}_{session_id}"
102
+ )
103
+
104
+ if not success:
105
+ logger.error("Cloudflare upload failed", error=error)
106
+ return ImageUploadResponse(
107
+ success=False,
108
+ error=f"Upload failed: {error}"
109
+ )
110
+
111
+ logger.info("Image uploaded to Cloudflare", url=image_url[:50] + "...")
112
+
113
+ # SAVE URL TO STATE (Optional)
114
+ try:
115
+ memory = await get_current_memory(user_id, session_id)
116
+ context = await memory.get_context()
117
+
118
+ # Add image URL to state
119
+ if "state" not in context:
120
+ context["state"] = {}
121
+ if "images" not in context["state"]:
122
+ context["state"]["images"] = []
123
+
124
+ context["state"]["images"].append(image_url)
125
+
126
+ # Update context
127
+ context["last_activity"] = datetime.utcnow().isoformat()
128
+ await memory.update_context(context)
129
+
130
+ logger.info(
131
+ "Image URL saved to state",
132
+ total_images=len(context["state"]["images"]),
133
+ user_id=user_id
134
+ )
135
+ except Exception as e:
136
+ logger.warning(f"Failed to save URL to state: {e}")
137
+
138
+ # RETURN SUCCESS
139
+ logger.info("Image upload complete", user_id=user_id)
140
+
141
+ return ImageUploadResponse(
142
+ success=True,
143
+ image_url=image_url,
144
+ message="Image uploaded successfully!"
145
+ )
146
+
147
+ except HTTPException:
148
+ raise
149
+ except Exception as e:
150
+ logger.error("Image upload endpoint error", exc_info=e)
151
+ return ImageUploadResponse(
152
+ success=False,
153
+ error=f"Server error: {str(e)}"
154
+ )
155
+
156
+
157
+ # HEALTH CHECK
158
+ @router.get("/upload-image/health")
159
+ async def image_upload_health():
160
+ """Health check for image upload service"""
161
+ return {
162
+ "status": "healthy",
163
+ "service": "Image Upload",
164
+ }
main.py CHANGED
@@ -1,7 +1,5 @@
1
- # app/main.py - FastAPI Application (FIXED - No circular imports)
2
- """
3
- Lojiz Platform with Aida AI - Modular Architecture
4
- """
5
 
6
  from fastapi import FastAPI, Request
7
  from fastapi.middleware.cors import CORSMiddleware
@@ -14,13 +12,13 @@ import os
14
  logging.basicConfig(level=logging.INFO)
15
  logger = logging.getLogger(__name__)
16
 
17
- # ========== CORE IMPORTS ==========
18
  try:
19
  from app.config import settings
20
  from app.database import connect_db, disconnect_db, ensure_indexes as ensure_auth_indexes
21
  from app.routes import auth
22
  except ImportError as e:
23
- logger.error(f"Core import error: {e}")
24
  raise
25
 
26
  try:
@@ -28,8 +26,9 @@ try:
28
  except ImportError:
29
  AuthException = Exception
30
 
31
- # ========== AI IMPORTS (DIRECT - NO CIRCULAR DEPENDENCY) ==========
32
  from app.ai.routes.chat import router as ai_chat_router
 
33
  from app.ai.config import (
34
  validate_ai_startup,
35
  check_redis_health,
@@ -43,104 +42,104 @@ from app.models.listing import ensure_listing_indexes
43
 
44
  logger = logging.getLogger(__name__)
45
 
46
- # ========== ENVIRONMENT ==========
47
  environment = os.getenv("ENVIRONMENT", "development")
48
  is_production = environment == "production"
49
 
50
- # ========== LIFESPAN ==========
51
  @asynccontextmanager
52
  async def lifespan(app: FastAPI):
53
  """Application lifespan - startup and shutdown"""
54
 
55
  logger.info("=" * 70)
56
- logger.info("🚀 Starting Lojiz Platform + Aida AI (Modular Architecture)")
57
  logger.info("=" * 70)
58
 
59
- # ========== STARTUP ==========
60
  try:
61
- logger.info("📦 Connecting to MongoDB...")
62
  await connect_db()
63
  await ensure_auth_indexes()
64
  await ensure_listing_indexes()
65
- logger.info("MongoDB connected & indexed")
66
  except Exception as e:
67
- logger.critical(f"MongoDB connection failed - aborting startup: {e}")
68
  raise
69
 
70
  try:
71
- logger.info("🔴 Connecting to Redis...")
72
  if redis_client:
73
  await redis_client.ping()
74
- logger.info("Redis connected")
75
  else:
76
- logger.warning("⚠️ Redis not available (optional)")
77
  except Exception as e:
78
- logger.warning(f"⚠️ Redis connection failed (continuing without): {e}")
79
 
80
  try:
81
- logger.info("🟦 Connecting to Qdrant...")
82
  if qdrant_client:
83
  await qdrant_client.get_collections()
84
- logger.info("Qdrant connected")
85
  else:
86
- logger.warning("⚠️ Qdrant not available (optional)")
87
  except Exception as e:
88
- logger.warning(f"⚠️ Qdrant connection failed (continuing without): {e}")
89
 
90
  try:
91
- logger.info("🧠 Validating AI components...")
92
  ai_checks = await validate_ai_startup()
93
- logger.info("AI components validated")
94
  except Exception as e:
95
- logger.warning(f"⚠️ AI validation failed: {e}")
96
 
97
  try:
98
- logger.info("🤖 Initializing ML Extractor...")
99
  ml = get_ml_extractor()
100
- logger.info("ML Extractor ready")
101
  except Exception as e:
102
- logger.warning(f"⚠️ ML Extractor initialization failed: {e}")
103
 
104
  try:
105
- logger.info("💾 Initializing Memory Manager...")
106
  manager = get_memory_manager()
107
- logger.info("Memory Manager ready")
108
  except Exception as e:
109
- logger.warning(f"⚠️ Memory Manager initialization failed: {e}")
110
 
111
  logger.info("=" * 70)
112
- logger.info("APPLICATION READY - All systems operational!")
113
  logger.info("=" * 70)
114
 
115
  yield
116
 
117
- # ========== SHUTDOWN ==========
118
  logger.info("=" * 70)
119
- logger.info("🛑 Shutting down Lojiz Platform + Aida AI")
120
  logger.info("=" * 70)
121
 
122
  try:
123
  try:
124
  ml = get_ml_extractor()
125
  ml.currency_mgr.clear_cache()
126
- logger.info("ML caches cleared")
127
  except:
128
  pass
129
 
130
  from app.database import disconnect_db
131
  await disconnect_db()
132
- logger.info("MongoDB disconnected")
133
 
134
  if redis_client:
135
  await redis_client.close()
136
- logger.info("Redis closed")
137
 
138
- logger.info("Shutdown complete")
139
  except Exception as e:
140
- logger.warning(f"⚠️ Shutdown warning: {e}")
141
 
142
 
143
- # ========== FASTAPI SETUP ==========
144
  app = FastAPI(
145
  title="Lojiz Platform + Aida AI",
146
  description="Real-estate platform with conversational AI assistant",
@@ -148,7 +147,7 @@ app = FastAPI(
148
  lifespan=lifespan,
149
  )
150
 
151
- # ========== CORS ==========
152
  cors_origins = [
153
  "https://lojiz.onrender.com",
154
  "https://lojiz.com",
@@ -173,10 +172,10 @@ app.add_middleware(
173
  max_age=600,
174
  )
175
 
176
- # ========== EXCEPTION HANDLERS ==========
177
  @app.exception_handler(RequestValidationError)
178
  async def validation_exception_handler(request: Request, exc: RequestValidationError):
179
- logger.error(f"Validation error: {exc}")
180
  errors = []
181
  for error in exc.errors():
182
  field = ".".join(str(loc) for loc in error["loc"][1:])
@@ -194,23 +193,24 @@ async def validation_exception_handler(request: Request, exc: RequestValidationE
194
 
195
  @app.exception_handler(AuthException)
196
  async def auth_exception_handler(request: Request, exc: AuthException): # type: ignore
197
- logger.warning(f"🔐 Auth error [{exc.error_code}]: {exc.message}")
198
  response = {"success": False, "message": exc.message, "error_code": exc.error_code}
199
  if exc.data:
200
  response["data"] = exc.data
201
  return JSONResponse(status_code=exc.status_code, content=response)
202
 
203
 
204
- # ========== ROUTERS ==========
205
- logger.info("📡 Registering routers...")
206
 
207
  app.include_router(auth.router, prefix="/api/auth", tags=["Authentication"])
208
  app.include_router(ai_chat_router, prefix="/ai", tags=["Aida AI Chat"])
 
209
 
210
- logger.info("All routers registered")
211
 
212
 
213
- # ========== ENDPOINTS ==========
214
 
215
  @app.get("/health", tags=["Health"])
216
  async def health_check():
@@ -252,7 +252,7 @@ async def health_check():
252
  }
253
  }
254
  except Exception as e:
255
- logger.error(f"Health check failed: {e}")
256
  return {
257
  "status": "unhealthy",
258
  "error": str(e),
@@ -278,16 +278,8 @@ async def options_handler(full_path: str):
278
  return JSONResponse(status_code=200, content={})
279
 
280
 
281
- # ========== RUN ==========
282
- """
283
- To run this application:
284
-
285
- Development:
286
- uvicorn app.main:app --reload
287
-
288
- Production:
289
- gunicorn -w 4 -k uvicorn.workers.UvicornWorker app.main:app
290
-
291
- HF Spaces:
292
- python app.py
293
- """
 
1
+ # app/main.py
2
+ # Lojiz Platform with Aida AI - Modular Architecture
 
 
3
 
4
  from fastapi import FastAPI, Request
5
  from fastapi.middleware.cors import CORSMiddleware
 
12
  logging.basicConfig(level=logging.INFO)
13
  logger = logging.getLogger(__name__)
14
 
15
+ # CORE IMPORTS
16
  try:
17
  from app.config import settings
18
  from app.database import connect_db, disconnect_db, ensure_indexes as ensure_auth_indexes
19
  from app.routes import auth
20
  except ImportError as e:
21
+ logger.error(f"Core import error: {e}")
22
  raise
23
 
24
  try:
 
26
  except ImportError:
27
  AuthException = Exception
28
 
29
+ # AI IMPORTS
30
  from app.ai.routes.chat import router as ai_chat_router
31
+ from app.ai.routes.image_upload_routes import router as image_upload_router
32
  from app.ai.config import (
33
  validate_ai_startup,
34
  check_redis_health,
 
42
 
43
  logger = logging.getLogger(__name__)
44
 
45
+ # ENVIRONMENT
46
  environment = os.getenv("ENVIRONMENT", "development")
47
  is_production = environment == "production"
48
 
49
+ # LIFESPAN
50
  @asynccontextmanager
51
  async def lifespan(app: FastAPI):
52
  """Application lifespan - startup and shutdown"""
53
 
54
  logger.info("=" * 70)
55
+ logger.info("Starting Lojiz Platform + Aida AI (Modular Architecture)")
56
  logger.info("=" * 70)
57
 
58
+ # STARTUP
59
  try:
60
+ logger.info("Connecting to MongoDB...")
61
  await connect_db()
62
  await ensure_auth_indexes()
63
  await ensure_listing_indexes()
64
+ logger.info("MongoDB connected and indexed")
65
  except Exception as e:
66
+ logger.critical(f"MongoDB connection failed - aborting startup: {e}")
67
  raise
68
 
69
  try:
70
+ logger.info("Connecting to Redis...")
71
  if redis_client:
72
  await redis_client.ping()
73
+ logger.info("Redis connected")
74
  else:
75
+ logger.warning("Redis not available (optional)")
76
  except Exception as e:
77
+ logger.warning(f"Redis connection failed (continuing without): {e}")
78
 
79
  try:
80
+ logger.info("Connecting to Qdrant...")
81
  if qdrant_client:
82
  await qdrant_client.get_collections()
83
+ logger.info("Qdrant connected")
84
  else:
85
+ logger.warning("Qdrant not available (optional)")
86
  except Exception as e:
87
+ logger.warning(f"Qdrant connection failed (continuing without): {e}")
88
 
89
  try:
90
+ logger.info("Validating AI components...")
91
  ai_checks = await validate_ai_startup()
92
+ logger.info("AI components validated")
93
  except Exception as e:
94
+ logger.warning(f"AI validation failed: {e}")
95
 
96
  try:
97
+ logger.info("Initializing ML Extractor...")
98
  ml = get_ml_extractor()
99
+ logger.info("ML Extractor ready")
100
  except Exception as e:
101
+ logger.warning(f"ML Extractor initialization failed: {e}")
102
 
103
  try:
104
+ logger.info("Initializing Memory Manager...")
105
  manager = get_memory_manager()
106
+ logger.info("Memory Manager ready")
107
  except Exception as e:
108
+ logger.warning(f"Memory Manager initialization failed: {e}")
109
 
110
  logger.info("=" * 70)
111
+ logger.info("APPLICATION READY - All systems operational!")
112
  logger.info("=" * 70)
113
 
114
  yield
115
 
116
+ # SHUTDOWN
117
  logger.info("=" * 70)
118
+ logger.info("Shutting down Lojiz Platform + Aida AI")
119
  logger.info("=" * 70)
120
 
121
  try:
122
  try:
123
  ml = get_ml_extractor()
124
  ml.currency_mgr.clear_cache()
125
+ logger.info("ML caches cleared")
126
  except:
127
  pass
128
 
129
  from app.database import disconnect_db
130
  await disconnect_db()
131
+ logger.info("MongoDB disconnected")
132
 
133
  if redis_client:
134
  await redis_client.close()
135
+ logger.info("Redis closed")
136
 
137
+ logger.info("Shutdown complete")
138
  except Exception as e:
139
+ logger.warning(f"Shutdown warning: {e}")
140
 
141
 
142
+ # FASTAPI SETUP
143
  app = FastAPI(
144
  title="Lojiz Platform + Aida AI",
145
  description="Real-estate platform with conversational AI assistant",
 
147
  lifespan=lifespan,
148
  )
149
 
150
+ # CORS
151
  cors_origins = [
152
  "https://lojiz.onrender.com",
153
  "https://lojiz.com",
 
172
  max_age=600,
173
  )
174
 
175
+ # EXCEPTION HANDLERS
176
  @app.exception_handler(RequestValidationError)
177
  async def validation_exception_handler(request: Request, exc: RequestValidationError):
178
+ logger.error(f"Validation error: {exc}")
179
  errors = []
180
  for error in exc.errors():
181
  field = ".".join(str(loc) for loc in error["loc"][1:])
 
193
 
194
  @app.exception_handler(AuthException)
195
  async def auth_exception_handler(request: Request, exc: AuthException): # type: ignore
196
+ logger.warning(f"Auth error [{exc.error_code}]: {exc.message}")
197
  response = {"success": False, "message": exc.message, "error_code": exc.error_code}
198
  if exc.data:
199
  response["data"] = exc.data
200
  return JSONResponse(status_code=exc.status_code, content=response)
201
 
202
 
203
+ # ROUTERS
204
+ logger.info("Registering routers...")
205
 
206
  app.include_router(auth.router, prefix="/api/auth", tags=["Authentication"])
207
  app.include_router(ai_chat_router, prefix="/ai", tags=["Aida AI Chat"])
208
+ app.include_router(image_upload_router, prefix="/ai", tags=["Image Upload"])
209
 
210
+ logger.info("All routers registered")
211
 
212
 
213
+ # ENDPOINTS
214
 
215
  @app.get("/health", tags=["Health"])
216
  async def health_check():
 
252
  }
253
  }
254
  except Exception as e:
255
+ logger.error(f"Health check failed: {e}")
256
  return {
257
  "status": "unhealthy",
258
  "error": str(e),
 
278
  return JSONResponse(status_code=200, content={})
279
 
280
 
281
+ # RUN
282
+ # To run this application:
283
+ # Development: uvicorn app.main:app --reload
284
+ # Production: gunicorn -w 4 -k uvicorn.workers.UvicornWorker app.main:app
285
+ # HF Spaces: python app.py