#
tokens: 32743/50000 1/14 files (page 2/2)
lines: on (toggle) GitHub
raw markdown copy reset
This is page 2 of 2. Use http://codebase.md/jotaderodriguez/bonsai_mcp?lines=true&page={x} to view the full context.

# Directory Structure

```
├── .gitignore
├── addon.py
├── bc3_writer.py
├── dockerfile
├── LICENSE.md
├── pyproject.toml
├── README.md
├── resources
│   ├── bc3_helper_files
│   │   ├── element_categories.json
│   │   ├── precios_unitarios.json
│   │   ├── spatial_labels_en.json
│   │   ├── spatial_labels_es.json
│   │   └── unit_prices.json
│   └── table_of_contents.json
├── tools.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/addon.py:
--------------------------------------------------------------------------------

```python
   1 | import bpy
   2 | import mathutils
   3 | import json
   4 | import threading
   5 | import socket
   6 | import time
   7 | import requests
   8 | import tempfile
   9 | import traceback
  10 | import os
  11 | import shutil
  12 | from bpy.props import StringProperty, IntProperty, BoolProperty, EnumProperty
  13 | import base64
  14 | 
  15 | import bpy
  16 | 
  17 | import ifcopenshell
  18 | from bonsai.bim.ifc import IfcStore
  19 | 
  20 | bl_info = {
  21 |     "name": "Bonsai MCP",
  22 |     "author": "JotaDeRodriguez",
  23 |     "version": (0, 2),
  24 |     "blender": (3, 0, 0),
  25 |     "location": "View3D > Sidebar > Bonsai MCP",
  26 |     "description": "Connect Claude to Blender via MCP. Aimed at IFC projects",
  27 |     "category": "Interface",
  28 | }
  29 | 
  30 | 
  31 | class BlenderMCPServer:
  32 |     def __init__(self, host='localhost', port=9876):
  33 |         self.host = host
  34 |         self.port = port
  35 |         self.running = False
  36 |         self.socket = None
  37 |         self.server_thread = None
  38 |     
  39 |     def start(self):
  40 |         if self.running:
  41 |             print("Server is already running")
  42 |             return
  43 |             
  44 |         self.running = True
  45 |         
  46 |         try:
  47 |             # Create socket
  48 |             self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
  49 |             self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
  50 |             self.socket.bind((self.host, self.port))
  51 |             self.socket.listen(1)
  52 |             
  53 |             # Start server thread
  54 |             self.server_thread = threading.Thread(target=self._server_loop)
  55 |             self.server_thread.daemon = True
  56 |             self.server_thread.start()
  57 |             
  58 |             print(f"BlenderMCP server started on {self.host}:{self.port}")
  59 |         except Exception as e:
  60 |             print(f"Failed to start server: {str(e)}")
  61 |             self.stop()
  62 |             
  63 |     def stop(self):
  64 |         self.running = False
  65 |         
  66 |         # Close socket
  67 |         if self.socket:
  68 |             try:
  69 |                 self.socket.close()
  70 |             except:
  71 |                 pass
  72 |             self.socket = None
  73 |         
  74 |         # Wait for thread to finish
  75 |         if self.server_thread:
  76 |             try:
  77 |                 if self.server_thread.is_alive():
  78 |                     self.server_thread.join(timeout=1.0)
  79 |             except:
  80 |                 pass
  81 |             self.server_thread = None
  82 |         
  83 |         print("BlenderMCP server stopped")
  84 |     
  85 |     def _server_loop(self):
  86 |         """Main server loop in a separate thread"""
  87 |         print("Server thread started")
  88 |         self.socket.settimeout(1.0)  # Timeout to allow for stopping
  89 |         
  90 |         while self.running:
  91 |             try:
  92 |                 # Accept new connection
  93 |                 try:
  94 |                     client, address = self.socket.accept()
  95 |                     print(f"Connected to client: {address}")
  96 |                     
  97 |                     # Handle client in a separate thread
  98 |                     client_thread = threading.Thread(
  99 |                         target=self._handle_client,
 100 |                         args=(client,)
 101 |                     )
 102 |                     client_thread.daemon = True
 103 |                     client_thread.start()
 104 |                 except socket.timeout:
 105 |                     # Just check running condition
 106 |                     continue
 107 |                 except Exception as e:
 108 |                     print(f"Error accepting connection: {str(e)}")
 109 |                     time.sleep(0.5)
 110 |             except Exception as e:
 111 |                 print(f"Error in server loop: {str(e)}")
 112 |                 if not self.running:
 113 |                     break
 114 |                 time.sleep(0.5)
 115 |         
 116 |         print("Server thread stopped")
 117 |     
 118 |     def _handle_client(self, client):
 119 |         """Handle connected client"""
 120 |         print("Client handler started")
 121 |         client.settimeout(None)  # No timeout
 122 |         buffer = b''
 123 |         
 124 |         try:
 125 |             while self.running:
 126 |                 # Receive data
 127 |                 try:
 128 |                     data = client.recv(8192)
 129 |                     if not data:
 130 |                         print("Client disconnected")
 131 |                         break
 132 |                     
 133 |                     buffer += data
 134 |                     try:
 135 |                         # Try to parse command
 136 |                         command = json.loads(buffer.decode('utf-8'))
 137 |                         buffer = b''
 138 |                         
 139 |                         # Execute command in Blender's main thread
 140 |                         def execute_wrapper():
 141 |                             try:
 142 |                                 response = self.execute_command(command)
 143 |                                 response_json = json.dumps(response)
 144 |                                 try:
 145 |                                     client.sendall(response_json.encode('utf-8'))
 146 |                                 except:
 147 |                                     print("Failed to send response - client disconnected")
 148 |                             except Exception as e:
 149 |                                 print(f"Error executing command: {str(e)}")
 150 |                                 traceback.print_exc()
 151 |                                 try:
 152 |                                     error_response = {
 153 |                                         "status": "error",
 154 |                                         "message": str(e)
 155 |                                     }
 156 |                                     client.sendall(json.dumps(error_response).encode('utf-8'))
 157 |                                 except:
 158 |                                     pass
 159 |                             return None
 160 |                         
 161 |                         # Schedule execution in main thread
 162 |                         bpy.app.timers.register(execute_wrapper, first_interval=0.0)
 163 |                     except json.JSONDecodeError:
 164 |                         # Incomplete data, wait for more
 165 |                         pass
 166 |                 except Exception as e:
 167 |                     print(f"Error receiving data: {str(e)}")
 168 |                     break
 169 |         except Exception as e:
 170 |             print(f"Error in client handler: {str(e)}")
 171 |         finally:
 172 |             try:
 173 |                 client.close()
 174 |             except:
 175 |                 pass
 176 |             print("Client handler stopped")
 177 | 
 178 |     def execute_command(self, command):
 179 |         """Execute a command in the main Blender thread"""
 180 |         try:
 181 |             cmd_type = command.get("type")
 182 |             params = command.get("params", {})
 183 |             
 184 |             # Ensure we're in the right context
 185 |             if cmd_type in ["create_object", "modify_object", "delete_object"]:
 186 |                 override = bpy.context.copy()
 187 |                 override['area'] = [area for area in bpy.context.screen.areas if area.type == 'VIEW_3D'][0]
 188 |                 with bpy.context.temp_override(**override):
 189 |                     return self._execute_command_internal(command)
 190 |             else:
 191 |                 return self._execute_command_internal(command)
 192 |                 
 193 |         except Exception as e:
 194 |             print(f"Error executing command: {str(e)}")
 195 |             traceback.print_exc()
 196 |             return {"status": "error", "message": str(e)}
 197 | 
 198 |     def _execute_command_internal(self, command):
 199 |         """Internal command execution with proper context"""
 200 |         cmd_type = command.get("type")
 201 |         params = command.get("params", {})
 202 | 
 203 |         
 204 |         # Base handlers that are always available
 205 |         handlers = {
 206 |             "execute_code": self.execute_code,
 207 |             "get_ifc_project_info": self.get_ifc_project_info,
 208 |             "list_ifc_entities": self.list_ifc_entities,
 209 |             "get_ifc_properties": self.get_ifc_properties,
 210 |             "get_ifc_spatial_structure": self.get_ifc_spatial_structure,
 211 |             "get_ifc_total_structure": self.get_ifc_total_structure,
 212 |             "get_ifc_relationships": self.get_ifc_relationships,
 213 |             "get_selected_ifc_entities": self.get_selected_ifc_entities,
 214 |             "get_current_view": self.get_current_view,
 215 |             "export_ifc_data": self.export_ifc_data,
 216 |             "place_ifc_object": self.place_ifc_object,
 217 |             "get_ifc_quantities": self.get_ifc_quantities,
 218 |             "export_drawing_png": self.export_drawing_png,
 219 |             "get_ifc_georeferencing_info": self.get_ifc_georeferencing_info,
 220 |             "georeference_ifc_model": self.georeference_ifc_model,
 221 |             "generate_ids": self.generate_ids,
 222 |         }
 223 |         
 224 | 
 225 |         handler = handlers.get(cmd_type)
 226 |         if handler:
 227 |             try:
 228 |                 print(f"Executing handler for {cmd_type}")
 229 |                 result = handler(**params)
 230 |                 print(f"Handler execution complete")
 231 |                 return {"status": "success", "result": result}
 232 |             except Exception as e:
 233 |                 print(f"Error in handler: {str(e)}")
 234 |                 traceback.print_exc()
 235 |                 return {"status": "error", "message": str(e)}
 236 |         else:
 237 |             return {"status": "error", "message": f"Unknown command type: {cmd_type}"}
 238 | 
 239 |     
 240 |     def execute_code(self, code):
 241 |         """Execute arbitrary Blender Python code"""
 242 |         # This is powerful but potentially dangerous - use with caution
 243 |         try:
 244 |             # Create a local namespace for execution
 245 |             namespace = {"bpy": bpy}
 246 |             exec(code, namespace)
 247 |             return {"executed": True}
 248 |         except Exception as e:
 249 |             raise Exception(f"Code execution error: {str(e)}")
 250 |         
 251 | 
 252 |     @staticmethod
 253 |     def get_selected_ifc_entities():
 254 |         """
 255 |         Get the IFC entities corresponding to the currently selected Blender objects.
 256 |         
 257 |         Returns:
 258 |             List of IFC entities for the selected objects
 259 |         """
 260 |         try:
 261 |             file = IfcStore.get_file()
 262 |             if file is None:
 263 |                 return {"error": "No IFC file is currently loaded"}
 264 |             
 265 |             # Get currently selected objects
 266 |             selected_objects = bpy.context.selected_objects
 267 |             if not selected_objects:
 268 |                 return {"selected_count": 0, "message": "No objects selected in Blender"}
 269 |             
 270 |             # Collect IFC entities from selected objects
 271 |             selected_entities = []
 272 |             for obj in selected_objects:
 273 |                 if hasattr(obj, "BIMObjectProperties") and obj.BIMObjectProperties.ifc_definition_id:
 274 |                     entity_id = obj.BIMObjectProperties.ifc_definition_id
 275 |                     entity = file.by_id(entity_id)
 276 |                     if entity:
 277 |                         entity_info = {
 278 |                             "id": entity.GlobalId if hasattr(entity, "GlobalId") else f"Entity_{entity.id()}",
 279 |                             "ifc_id": entity.id(),
 280 |                             "type": entity.is_a(),
 281 |                             "name": entity.Name if hasattr(entity, "Name") else None,
 282 |                             "blender_name": obj.name
 283 |                         }
 284 |                         selected_entities.append(entity_info)
 285 |             
 286 |             return {
 287 |                 "selected_count": len(selected_entities),
 288 |                 "selected_entities": selected_entities
 289 |             }
 290 |         except Exception as e:
 291 |             import traceback
 292 |             return {"error": str(e), "traceback": traceback.format_exc()}
 293 |         
 294 |     ### SPECIFIC IFC METHODS ###
 295 |         
 296 |     @staticmethod
 297 |     def get_ifc_project_info():
 298 |         """
 299 |         Get basic information about the IFC project.
 300 |         
 301 |         Returns:
 302 |             Dictionary with project name, description, and basic metrics
 303 |         """
 304 |         try:
 305 |             file = IfcStore.get_file()
 306 |             if file is None:
 307 |                 return {"error": "No IFC file is currently loaded"}
 308 |             
 309 |             # Get project information
 310 |             projects = file.by_type("IfcProject")
 311 |             if not projects:
 312 |                 return {"error": "No IfcProject found in the model"}
 313 |             
 314 |             project = projects[0]
 315 |             
 316 |             # Basic project info
 317 |             info = {
 318 |                 "id": project.GlobalId,
 319 |                 "name": project.Name if hasattr(project, "Name") else "Unnamed Project",
 320 |                 "description": project.Description if hasattr(project, "Description") else None,
 321 |                 "entity_counts": {}
 322 |             }
 323 |             
 324 |             # Count entities by type
 325 |             entity_types = ["IfcWall", "IfcDoor", "IfcWindow", "IfcSlab", "IfcBeam", "IfcColumn", "IfcSpace", "IfcBuildingStorey"]
 326 |             for entity_type in entity_types:
 327 |                 entities = file.by_type(entity_type)
 328 |                 info["entity_counts"][entity_type] = len(entities)
 329 |             
 330 |             return info
 331 |         except Exception as e:
 332 |             import traceback
 333 |             return {"error": str(e), "traceback": traceback.format_exc()}
 334 | 
 335 |     @staticmethod
 336 |     def list_ifc_entities(entity_type=None, limit=50, selected_only=False):
 337 |         """
 338 |         List IFC entities of a specific type.
 339 |         
 340 |         Parameters:
 341 |             entity_type: Type of IFC entity to list (e.g., "IfcWall")
 342 |             limit: Maximum number of entities to return
 343 |         
 344 |         Returns:
 345 |             List of entities with basic properties
 346 |         """
 347 |         try:
 348 |             file = IfcStore.get_file()
 349 |             if file is None:
 350 |                 return {"error": "No IFC file is currently loaded"}
 351 |             
 352 |             # If we're only looking at selected objects
 353 |             if selected_only:
 354 |                 selected_result = BlenderMCPServer.get_selected_ifc_entities()
 355 |                 
 356 |                 # Check for errors
 357 |                 if "error" in selected_result:
 358 |                     return selected_result
 359 |                     
 360 |                 # If no objects are selected, return early
 361 |                 if selected_result["selected_count"] == 0:
 362 |                     return selected_result
 363 |                     
 364 |                 # If entity_type is specified, filter the selected entities
 365 |                 if entity_type:
 366 |                     filtered_entities = [
 367 |                         entity for entity in selected_result["selected_entities"]
 368 |                         if entity["type"] == entity_type
 369 |                     ]
 370 |                     
 371 |                     return {
 372 |                         "type": entity_type,
 373 |                         "selected_count": len(filtered_entities),
 374 |                         "entities": filtered_entities[:limit]
 375 |                     }
 376 |                 else:
 377 |                     # Group selected entities by type
 378 |                     entity_types = {}
 379 |                     for entity in selected_result["selected_entities"]:
 380 |                         entity_type = entity["type"]
 381 |                         if entity_type in entity_types:
 382 |                             entity_types[entity_type].append(entity)
 383 |                         else:
 384 |                             entity_types[entity_type] = [entity]
 385 |                     
 386 |                     return {
 387 |                         "selected_count": selected_result["selected_count"],
 388 |                         "entity_types": [
 389 |                             {"type": t, "count": len(entities), "entities": entities[:limit]}
 390 |                             for t, entities in entity_types.items()
 391 |                         ]
 392 |                     }
 393 |             
 394 |             # Original functionality for non-selected mode
 395 |             if not entity_type:
 396 |                 # If no type specified, list available entity types
 397 |                 entity_types = {}
 398 |                 for entity in file.wrapped_data.entities:
 399 |                     entity_type = entity.is_a()
 400 |                     if entity_type in entity_types:
 401 |                         entity_types[entity_type] += 1
 402 |                     else:
 403 |                         entity_types[entity_type] = 1
 404 |                 
 405 |                 return {
 406 |                     "available_types": [{"type": k, "count": v} for k, v in entity_types.items()]
 407 |                 }
 408 |             
 409 |             # Get entities of the specified type
 410 |             entities = file.by_type(entity_type)
 411 |             
 412 |             # Prepare the result
 413 |             result = {
 414 |                 "type": entity_type,
 415 |                 "total_count": len(entities),
 416 |                 "entities": []
 417 |             }
 418 |             
 419 |             # Add entity data (limited)
 420 |             for i, entity in enumerate(entities):
 421 |                 if i >= limit:
 422 |                     break
 423 |                     
 424 |                 entity_data = {
 425 |                     "id": entity.GlobalId if hasattr(entity, "GlobalId") else f"Entity_{entity.id()}",
 426 |                     "name": entity.Name if hasattr(entity, "Name") else None
 427 |                 }
 428 |                 
 429 |                 result["entities"].append(entity_data)
 430 |             
 431 |             return result
 432 |         except Exception as e:
 433 |             import traceback
 434 |             return {"error": str(e), "traceback": traceback.format_exc()}
 435 | 
 436 |     @staticmethod
 437 |     def get_ifc_properties(global_id=None, selected_only=False):
 438 |         """
 439 |         Get all properties of a specific IFC entity.
 440 |         
 441 |         Parameters:
 442 |             global_id: GlobalId of the IFC entity
 443 |         
 444 |         Returns:
 445 |             Dictionary with entity information and properties
 446 |         """
 447 |         try:
 448 |             file = IfcStore.get_file()
 449 |             if file is None:
 450 |                 return {"error": "No IFC file is currently loaded"}
 451 |             
 452 |             # If we're only looking at selected objects
 453 |             if selected_only:
 454 |                 selected_result = BlenderMCPServer.get_selected_ifc_entities()
 455 |                 
 456 |                 # Check for errors
 457 |                 if "error" in selected_result:
 458 |                     return selected_result
 459 |                 
 460 |                 # If no objects are selected, return early
 461 |                 if selected_result["selected_count"] == 0:
 462 |                     return selected_result
 463 |                 
 464 |                 # Process each selected entity
 465 |                 result = {
 466 |                     "selected_count": selected_result["selected_count"],
 467 |                     "entities": []
 468 |                 }
 469 |                 
 470 |                 for entity_info in selected_result["selected_entities"]:
 471 |                     # Find entity by GlobalId
 472 |                     entity = file.by_guid(entity_info["id"])
 473 |                     if not entity:
 474 |                         continue
 475 |                     
 476 |                     # Get basic entity info
 477 |                     entity_data = {
 478 |                         "id": entity.GlobalId,
 479 |                         "type": entity.is_a(),
 480 |                         "name": entity.Name if hasattr(entity, "Name") else None,
 481 |                         "description": entity.Description if hasattr(entity, "Description") else None,
 482 |                         "blender_name": entity_info["blender_name"],
 483 |                         "property_sets": {}
 484 |                     }
 485 |                     
 486 |                     # Get all property sets
 487 |                     psets = ifcopenshell.util.element.get_psets(entity)
 488 |                     for pset_name, pset_data in psets.items():
 489 |                         entity_data["property_sets"][pset_name] = pset_data
 490 |                     
 491 |                     result["entities"].append(entity_data)
 492 |                 
 493 |                 return result
 494 |                 
 495 |             # If we're looking at a specific entity
 496 |             elif global_id:
 497 |                 # Find entity by GlobalId
 498 |                 entity = file.by_guid(global_id)
 499 |                 if not entity:
 500 |                     return {"error": f"No entity found with GlobalId: {global_id}"}
 501 |                 
 502 |                 # Get basic entity info
 503 |                 entity_info = {
 504 |                     "id": entity.GlobalId,
 505 |                     "type": entity.is_a(),
 506 |                     "name": entity.Name if hasattr(entity, "Name") else None,
 507 |                     "description": entity.Description if hasattr(entity, "Description") else None,
 508 |                     "property_sets": {}
 509 |                 }
 510 |                 
 511 |                 # Get all property sets
 512 |                 psets = ifcopenshell.util.element.get_psets(entity)
 513 |                 for pset_name, pset_data in psets.items():
 514 |                     entity_info["property_sets"][pset_name] = pset_data
 515 |                 
 516 |                 return entity_info
 517 |             else:
 518 |                 return {"error": "Either global_id or selected_only must be specified"}
 519 |         except Exception as e:
 520 |             import traceback
 521 |             return {"error": str(e), "traceback": traceback.format_exc()}
 522 | 
 523 |     @staticmethod
 524 |     def get_ifc_spatial_structure():
 525 |         """
 526 |         Get the spatial structure of the IFC model (site, building, storey, space hierarchy).
 527 |         
 528 |         Returns:
 529 |             Hierarchical structure of the IFC model's spatial elements
 530 |         """
 531 |         try:
 532 |             file = IfcStore.get_file()
 533 |             if file is None:
 534 |                 return {"error": "No IFC file is currently loaded"}
 535 |             
 536 |             # Start with projects
 537 |             projects = file.by_type("IfcProject")
 538 |             if not projects:
 539 |                 return {"error": "No IfcProject found in the model"}
 540 |             
 541 |             def get_children(parent):
 542 |                 """Get immediate children of the given element"""
 543 |                 if hasattr(parent, "IsDecomposedBy"):
 544 |                     rel_aggregates = parent.IsDecomposedBy
 545 |                     children = []
 546 |                     for rel in rel_aggregates:
 547 |                         children.extend(rel.RelatedObjects)
 548 |                     return children
 549 |                 return []
 550 |                 
 551 |             def create_structure(element):
 552 |                 """Recursively create the structure for an element"""
 553 |                 result = {
 554 |                     "id": element.GlobalId,
 555 |                     "type": element.is_a(),
 556 |                     "name": element.Name if hasattr(element, "Name") else None,
 557 |                     "children": []
 558 |                 }
 559 |                 
 560 |                 for child in get_children(element):
 561 |                     result["children"].append(create_structure(child))
 562 |                 
 563 |                 return result
 564 |             
 565 |             # Create the structure starting from the project
 566 |             structure = create_structure(projects[0])
 567 |             
 568 |             return structure
 569 |         except Exception as e:
 570 |             import traceback
 571 |             return {"error": str(e), "traceback": traceback.format_exc()}
 572 | 
 573 |     @staticmethod
 574 |     def get_ifc_total_structure():
 575 |         """
 576 |         Get the complete IFC structure including spatial hierarchy and building elements.
 577 |         This function extends the spatial structure to include building elements like walls,
 578 |         doors, windows, etc. that are contained in each spatial element.
 579 | 
 580 |         Returns:
 581 |             Complete hierarchical structure with spatial elements and their contained building elements
 582 |         """
 583 |         try:
 584 |             file = IfcStore.get_file()
 585 |             if file is None:
 586 |                 return {"error": "No IFC file is currently loaded"}
 587 | 
 588 |             # Start with projects
 589 |             projects = file.by_type("IfcProject")
 590 |             if not projects:
 591 |                 return {"error": "No IfcProject found in the model"}
 592 | 
 593 |             def get_spatial_children(parent):
 594 |                 """Get immediate spatial children of the given element"""
 595 |                 if hasattr(parent, "IsDecomposedBy"):
 596 |                     rel_aggregates = parent.IsDecomposedBy
 597 |                     children = []
 598 |                     for rel in rel_aggregates:
 599 |                         children.extend(rel.RelatedObjects)
 600 |                     return children
 601 |                 return []
 602 | 
 603 |             def get_contained_elements(spatial_element):
 604 |                 """Get building elements contained in this spatial element"""
 605 |                 contained_elements = []
 606 | 
 607 |                 # Check for IfcRelContainedInSpatialStructure relationships
 608 |                 if hasattr(spatial_element, "ContainsElements"):
 609 |                     for rel in spatial_element.ContainsElements:
 610 |                         for element in rel.RelatedElements:
 611 |                             element_info = {
 612 |                                 "id": element.GlobalId,
 613 |                                 "type": element.is_a(),
 614 |                                 "name": element.Name if hasattr(element, "Name") else None,
 615 |                                 "description": element.Description if hasattr(element, "Description") else None
 616 |                             }
 617 |                             contained_elements.append(element_info)
 618 | 
 619 |                 return contained_elements
 620 | 
 621 |             def create_total_structure(element):
 622 |                 """Recursively create the complete structure for an element"""
 623 |                 result = {
 624 |                     "id": element.GlobalId,
 625 |                     "type": element.is_a(),
 626 |                     "name": element.Name if hasattr(element, "Name") else None,
 627 |                     "description": element.Description if hasattr(element, "Description") else None,
 628 |                     "children": [],
 629 |                     "building_elements": []
 630 |                 }
 631 | 
 632 |                 # Add spatial children (other spatial elements)
 633 |                 for child in get_spatial_children(element):
 634 |                     result["children"].append(create_total_structure(child))
 635 | 
 636 |                 # Add contained building elements (walls, doors, windows, etc.)
 637 |                 result["building_elements"] = get_contained_elements(element)
 638 | 
 639 |                 return result
 640 | 
 641 |             # Create the complete structure starting from the project
 642 |             total_structure = create_total_structure(projects[0])
 643 | 
 644 |             return total_structure
 645 | 
 646 |         except Exception as e:
 647 |             import traceback
 648 |             return {"error": str(e), "traceback": traceback.format_exc()}
 649 | 
 650 |     @staticmethod
 651 |     def get_ifc_relationships(global_id):
 652 |         """
 653 |         Get all relationships for a specific IFC entity.
 654 |         
 655 |         Parameters:
 656 |             global_id: GlobalId of the IFC entity
 657 |         
 658 |         Returns:
 659 |             Dictionary with all relationships the entity participates in
 660 |         """
 661 |         try:
 662 |             file = IfcStore.get_file()
 663 |             if file is None:
 664 |                 return {"error": "No IFC file is currently loaded"}
 665 |             
 666 |             # Find entity by GlobalId
 667 |             entity = file.by_guid(global_id)
 668 |             if not entity:
 669 |                 return {"error": f"No entity found with GlobalId: {global_id}"}
 670 |             
 671 |             # Basic entity info
 672 |             entity_info = {
 673 |                 "id": entity.GlobalId,
 674 |                 "type": entity.is_a(),
 675 |                 "name": entity.Name if hasattr(entity, "Name") else None,
 676 |                 "relationships": {
 677 |                     "contains": [],
 678 |                     "contained_in": [],
 679 |                     "connects": [],
 680 |                     "connected_by": [],
 681 |                     "defines": [],
 682 |                     "defined_by": []
 683 |                 }
 684 |             }
 685 |             
 686 |             # Check if entity contains other elements
 687 |             if hasattr(entity, "IsDecomposedBy"):
 688 |                 for rel in entity.IsDecomposedBy:
 689 |                     for obj in rel.RelatedObjects:
 690 |                         entity_info["relationships"]["contains"].append({
 691 |                             "id": obj.GlobalId,
 692 |                             "type": obj.is_a(),
 693 |                             "name": obj.Name if hasattr(obj, "Name") else None
 694 |                         })
 695 |             
 696 |             # Check if entity is contained in other elements
 697 |             if hasattr(entity, "Decomposes"):
 698 |                 for rel in entity.Decomposes:
 699 |                     rel_obj = rel.RelatingObject
 700 |                     entity_info["relationships"]["contained_in"].append({
 701 |                         "id": rel_obj.GlobalId,
 702 |                         "type": rel_obj.is_a(),
 703 |                         "name": rel_obj.Name if hasattr(rel_obj, "Name") else None
 704 |                     })
 705 |             
 706 |             # For physical connections (depends on entity type)
 707 |             if hasattr(entity, "ConnectedTo"):
 708 |                 for rel in entity.ConnectedTo:
 709 |                     for obj in rel.RelatedElement:
 710 |                         entity_info["relationships"]["connects"].append({
 711 |                             "id": obj.GlobalId,
 712 |                             "type": obj.is_a(),
 713 |                             "name": obj.Name if hasattr(obj, "Name") else None,
 714 |                             "connection_type": rel.ConnectionType if hasattr(rel, "ConnectionType") else None
 715 |                         })
 716 |             
 717 |             if hasattr(entity, "ConnectedFrom"):
 718 |                 for rel in entity.ConnectedFrom:
 719 |                     obj = rel.RelatingElement
 720 |                     entity_info["relationships"]["connected_by"].append({
 721 |                         "id": obj.GlobalId,
 722 |                         "type": obj.is_a(),
 723 |                         "name": obj.Name if hasattr(obj, "Name") else None,
 724 |                         "connection_type": rel.ConnectionType if hasattr(rel, "ConnectionType") else None
 725 |                     })
 726 |             
 727 |             return entity_info
 728 |         except Exception as e:
 729 |             import traceback
 730 |             return {"error": str(e), "traceback": traceback.format_exc()}
 731 |         
 732 | 
 733 |     @staticmethod
 734 |     def export_ifc_data(entity_type=None, level_name=None, output_format="csv"):
 735 |         """Export IFC data to a structured file"""
 736 |         try:
 737 |             file = IfcStore.get_file()
 738 |             if file is None:
 739 |                 return {"error": "No IFC file is currently loaded"}
 740 |             
 741 |             data_list = []
 742 |             
 743 |             # Filter objects based on type
 744 |             if entity_type:
 745 |                 objects = file.by_type(entity_type)
 746 |             else:
 747 |                 objects = file.by_type("IfcElement")
 748 |             
 749 |             # Create a data dictionary for each object
 750 |             for obj in objects:
 751 |                 obj_data = {}
 752 |                 
 753 |                 # Get level/storey information
 754 |                 container_level = None
 755 |                 try:
 756 |                     containing_structure = ifcopenshell.util.element.get_container(obj)
 757 |                     if containing_structure and containing_structure.is_a("IfcBuildingStorey"):
 758 |                         container_level = containing_structure.Name
 759 |                 except Exception as e:
 760 |                     pass
 761 |                 
 762 |                 # Skip if we're filtering by level and this doesn't match
 763 |                 if level_name and container_level != level_name:
 764 |                     continue
 765 |                     
 766 |                 # Basic information
 767 |                 obj_data['ExpressId'] = obj.id()
 768 |                 obj_data['GlobalId'] = obj.GlobalId if hasattr(obj, "GlobalId") else None
 769 |                 obj_data['IfcClass'] = obj.is_a()
 770 |                 obj_data['Name'] = obj.Name if hasattr(obj, "Name") else None
 771 |                 obj_data['Description'] = obj.Description if hasattr(obj, "Description") else None
 772 |                 obj_data['LevelName'] = container_level
 773 |                 
 774 |                 # Get predefined type if available
 775 |                 try:
 776 |                     obj_data['PredefinedType'] = ifcopenshell.util.element.get_predefined_type(obj)
 777 |                 except:
 778 |                     obj_data['PredefinedType'] = None
 779 |                     
 780 |                 # Get type information
 781 |                 try:
 782 |                     type_obj = ifcopenshell.util.element.get_type(obj)
 783 |                     obj_data['TypeName'] = type_obj.Name if type_obj and hasattr(type_obj, "Name") else None
 784 |                     obj_data['TypeClass'] = type_obj.is_a() if type_obj else None
 785 |                 except:
 786 |                     obj_data['TypeName'] = None
 787 |                     obj_data['TypeClass'] = None
 788 |                 
 789 |                 # Get property sets (simplify structure for export)
 790 |                 try:
 791 |                     property_sets = ifcopenshell.util.element.get_psets(obj)
 792 |                     # Flatten property sets for better export compatibility
 793 |                     for pset_name, pset_data in property_sets.items():
 794 |                         for prop_name, prop_value in pset_data.items():
 795 |                             obj_data[f"{pset_name}.{prop_name}"] = prop_value
 796 |                 except Exception as e:
 797 |                     pass
 798 |                     
 799 |                 data_list.append(obj_data)
 800 |             
 801 |             if not data_list:
 802 |                 return "No data found matching the specified criteria"
 803 |             
 804 |             # Determine output directory - try multiple options to ensure it works in various environments
 805 |             output_dirs = [
 806 |                 "C:\\Users\\Public\\Documents" if os.name == "nt" else None,  # Public Documents
 807 |                 "/usr/share" if os.name != "nt" else None,  # Unix share directory
 808 |                 "/tmp",  # Unix temp directory
 809 |                 "C:\\Temp" if os.name == "nt" else None,  # Windows temp directory
 810 |             ]
 811 |             
 812 |             output_dir = None
 813 |             for dir_path in output_dirs:
 814 |                 if dir_path and os.path.exists(dir_path) and os.access(dir_path, os.W_OK):
 815 |                     output_dir = dir_path
 816 |                     break
 817 |                     
 818 |             if not output_dir:
 819 |                 return {"error": "Could not find a writable directory for output"}
 820 |             
 821 |             # Create filename based on filters
 822 |             filters = []
 823 |             if entity_type:
 824 |                 filters.append(entity_type)
 825 |             if level_name:
 826 |                 filters.append(level_name)
 827 |             filter_str = "_".join(filters) if filters else "all"
 828 |             
 829 |             timestamp = time.strftime("%Y%m%d_%H%M%S")
 830 |             filename = f"ifc_export_{filter_str}_{timestamp}.{output_format}"
 831 |             filepath = os.path.join(output_dir, filename)
 832 |             
 833 |             # Export based on format
 834 |             if output_format == "json":
 835 |                 with open(filepath, 'w') as f:
 836 |                     json.dump(data_list, f, indent=2)
 837 |             elif output_format == "csv":
 838 |                 import pandas as pd
 839 |                 df = pd.DataFrame(data_list)
 840 |                 df.to_csv(filepath, index=False)
 841 |             
 842 |             # Summary info for the response
 843 |             entity_count = len(data_list)
 844 |             entity_types = set(item['IfcClass'] for item in data_list)
 845 |             levels = set(item['LevelName'] for item in data_list if item['LevelName'])
 846 |             
 847 |             return {
 848 |                 "success": True,
 849 |                 "message": f"Data exported successfully to {filepath}",
 850 |                 "filepath": filepath,
 851 |                 "format": output_format,
 852 |                 "summary": {
 853 |                     "entity_count": entity_count,
 854 |                     "entity_types": list(entity_types),
 855 |                     "levels": list(levels)
 856 |                 }
 857 |             }
 858 |         
 859 |         except Exception as e:
 860 |             import traceback
 861 |             return {"error": str(e), "traceback": traceback.format_exc()}
 862 |         
 863 |     
 864 |     @staticmethod
 865 |     def place_ifc_object(type_name, location, rotation=None):
 866 |         """
 867 |         Place an IFC object at specified location with optional rotation
 868 |         
 869 |         Args:
 870 |             type_name: Name of the IFC element type
 871 |             location: [x, y, z] list or tuple for position
 872 |             rotation: Value in degrees for rotation around Z axis (optional)
 873 |         
 874 |         Returns:
 875 |             Dictionary with information about the created object
 876 |         """
 877 |         try:
 878 |             import ifcopenshell
 879 |             from bonsai.bim.ifc import IfcStore
 880 |             import math
 881 |             
 882 |             # Convert location to tuple if it's not already
 883 |             if isinstance(location, list):
 884 |                 location = tuple(location)
 885 |                 
 886 |             def find_type_by_name(name):
 887 |                 file = IfcStore.get_file()
 888 |                 for element in file.by_type("IfcElementType"):
 889 |                     if element.Name == name:
 890 |                         return element.id()
 891 |                 return None
 892 | 
 893 |             # Find the type ID
 894 |             type_id = find_type_by_name(type_name)
 895 |             if not type_id:
 896 |                 return {"error": f"Type '{type_name}' not found. Please check if this type exists in the model."}
 897 |                 
 898 |             # Store original context
 899 |             original_context = bpy.context.copy()
 900 |             
 901 |             # Ensure we're in 3D View context
 902 |             override = bpy.context.copy()
 903 |             for area in bpy.context.screen.areas:
 904 |                 if area.type == 'VIEW_3D':
 905 |                     override["area"] = area
 906 |                     override["region"] = area.regions[-1]
 907 |                     break
 908 |             
 909 |             # Set cursor location
 910 |             bpy.context.scene.cursor.location = location
 911 |             
 912 |             # Get properties to set up parameters
 913 |             props = bpy.context.scene.BIMModelProperties
 914 |             
 915 |             # Store original rl_mode and set to CURSOR to use cursor's Z position
 916 |             original_rl_mode = props.rl_mode
 917 |             props.rl_mode = 'CURSOR'
 918 |             
 919 |             # Create the object using the override context
 920 |             with bpy.context.temp_override(**override):
 921 |                 bpy.ops.bim.add_occurrence(relating_type_id=type_id)
 922 |             
 923 |             # Get the newly created object
 924 |             obj = bpy.context.active_object
 925 |             if not obj:
 926 |                 props.rl_mode = original_rl_mode
 927 |                 return {"error": "Failed to create object"}
 928 |             
 929 |             # Force the Z position explicitly
 930 |             obj.location.z = location[2]
 931 |             
 932 |             # Apply rotation if provided
 933 |             if rotation is not None:
 934 |                 # Convert degrees to radians for Blender's rotation_euler
 935 |                 full_rotation = (0, 0, math.radians(float(rotation)))
 936 |                 obj.rotation_euler = full_rotation
 937 |             
 938 |             # Sync the changes back to IFC
 939 |             # Use the appropriate method depending on what's available
 940 |             if hasattr(bpy.ops.bim, "update_representation"):
 941 |                 bpy.ops.bim.update_representation(obj=obj.name)
 942 |             
 943 |             # Restore original rl_mode
 944 |             props.rl_mode = original_rl_mode
 945 |             
 946 |             # Get the IFC entity for the new object
 947 |             entity_id = obj.BIMObjectProperties.ifc_definition_id
 948 |             if entity_id:
 949 |                 file = IfcStore.get_file()
 950 |                 entity = file.by_id(entity_id)
 951 |                 global_id = entity.GlobalId if hasattr(entity, "GlobalId") else None
 952 |             else:
 953 |                 global_id = None
 954 |             
 955 |             # Return information about the created object
 956 |             return {
 957 |                 "success": True,
 958 |                 "blender_name": obj.name,
 959 |                 "global_id": global_id,
 960 |                 "location": list(obj.location),
 961 |                 "rotation": list(obj.rotation_euler),
 962 |                 "type_name": type_name
 963 |             }
 964 |             
 965 |         except Exception as e:
 966 |             import traceback
 967 |             return {"error": str(e), "traceback": traceback.format_exc()}
 968 |     
 969 | 
 970 |     ### Ability to see
 971 |     @staticmethod
 972 |     def get_current_view():
 973 |         """Capture and return the current viewport as an image"""
 974 |         try:
 975 |             # Find a 3D View
 976 |             for area in bpy.context.screen.areas:
 977 |                 if area.type == 'VIEW_3D':
 978 |                     break
 979 |             else:
 980 |                 return {"error": "No 3D View available"}
 981 |             
 982 |             # Create temporary file to save the viewport screenshot
 983 |             temp_file = tempfile.NamedTemporaryFile(suffix='.png', delete=False)
 984 |             temp_path = temp_file.name
 985 |             temp_file.close()
 986 |             
 987 |             # Find appropriate region
 988 |             for region in area.regions:
 989 |                 if region.type == 'WINDOW':
 990 |                     break
 991 |             else:
 992 |                 return {"error": "No appropriate region found in 3D View"}
 993 |             
 994 |             # Use temp_override instead of the old override dictionary
 995 |             with bpy.context.temp_override(area=area, region=region):
 996 |                 # Save screenshot
 997 |                 bpy.ops.screen.screenshot(filepath=temp_path)
 998 |             
 999 |             # Read the image data and encode as base64
1000 |             with open(temp_path, 'rb') as f:
1001 |                 image_data = f.read()
1002 |             
1003 |             # Clean up
1004 |             os.unlink(temp_path)
1005 |             
1006 |             # Return base64 encoded image
1007 |             return {
1008 |                 "width": area.width,
1009 |                 "height": area.height,
1010 |                 "format": "png",
1011 |                 "data": base64.b64encode(image_data).decode('utf-8')
1012 |             }
1013 |         except Exception as e:
1014 |             import traceback
1015 |             return {"error": str(e), "traceback": traceback.format_exc()}
1016 | 
1017 | 
1018 |     @staticmethod
1019 |     def get_ifc_quantities(entity_type=None, selected_only=False):
1020 |         """
1021 |         Calculate and get quantities (m2, m3, etc.) for IFC elements.
1022 |         
1023 |         Parameters:
1024 |             entity_type: Type of IFC entity to get quantities for (e.g., "IfcWall", "IfcSlab")
1025 |             selected_only: If True, only get quantities for selected objects
1026 |         
1027 |         Returns:
1028 |             Dictionary with quantities for the specified elements
1029 |         """
1030 |         try:
1031 |             file = IfcStore.get_file()
1032 |             if file is None:
1033 |                 return {"error": "No IFC file is currently loaded"}
1034 | 
1035 |             # Check if BaseQuantities already exist to avoid re-calculating
1036 |             quantities_exist = False
1037 |             sample_elements = file.by_type("IfcElement")[:10] if file.by_type("IfcElement") else []
1038 | 
1039 |             for elem in sample_elements:
1040 |                 psets = ifcopenshell.util.element.get_psets(elem)
1041 |                 if any(qset in psets for qset in ["BaseQuantities", "Qto_WallBaseQuantities",
1042 |                                                    "Qto_SlabBaseQuantities", "Qto_BeamBaseQuantities"]):
1043 |                     quantities_exist = True
1044 |                     break
1045 | 
1046 |             # Only calculate quantities if they don't exist yet
1047 |             if not quantities_exist:
1048 |                 try:
1049 |                     bpy.ops.bim.perform_quantity_take_off()
1050 |                 except Exception as e:
1051 |                     return {"error": f"Failed to calculate quantities: {str(e)}"}
1052 | 
1053 |             elements_data = []
1054 |             
1055 |             # If we're only looking at selected objects
1056 |             if selected_only:
1057 |                 selected_result = BlenderMCPServer.get_selected_ifc_entities()
1058 |                 
1059 |                 # Check for errors
1060 |                 if "error" in selected_result:
1061 |                     return selected_result
1062 |                 
1063 |                 # If no objects are selected, return early
1064 |                 if selected_result["selected_count"] == 0:
1065 |                     return selected_result
1066 |                 
1067 |                 # Process each selected entity
1068 |                 for entity_info in selected_result["selected_entities"]:
1069 |                     # Find entity by GlobalId
1070 |                     entity = file.by_guid(entity_info["id"])
1071 |                     if not entity:
1072 |                         continue
1073 |                     
1074 |                     # Filter by type if specified
1075 |                     if entity_type and entity.is_a() != entity_type:
1076 |                         continue
1077 |                     
1078 |                     # Extract quantities
1079 |                     element_data = extract_quantities(entity, entity_info["blender_name"])
1080 |                     if element_data:
1081 |                         elements_data.append(element_data)
1082 |                         
1083 |             else:
1084 |                 # Get entities based on type or default to common element types
1085 |                 if entity_type:
1086 |                     entities = file.by_type(entity_type)
1087 |                 else:
1088 |                     # Get common element types that have quantities
1089 |                     entity_types = ["IfcWall", "IfcSlab", "IfcBeam", "IfcColumn", "IfcDoor", "IfcWindow"]
1090 |                     entities = []
1091 |                     for etype in entity_types:
1092 |                         entities.extend(file.by_type(etype))
1093 |                 
1094 |                 # Process each entity
1095 |                 for entity in entities:
1096 |                     element_data = extract_quantities(entity)
1097 |                     if element_data:
1098 |                         elements_data.append(element_data)
1099 |             
1100 |             # Summary statistics
1101 |             summary = {
1102 |                 "total_elements": len(elements_data),
1103 |                 "element_types": {}
1104 |             }
1105 |             
1106 |             # Group by element type for summary
1107 |             for element in elements_data:
1108 |                 etype = element["type"]
1109 |                 if etype not in summary["element_types"]:
1110 |                     summary["element_types"][etype] = {"count": 0, "total_area": 0, "total_volume": 0}
1111 |                 
1112 |                 summary["element_types"][etype]["count"] += 1
1113 |                 if element["quantities"].get("area"):
1114 |                     summary["element_types"][etype]["total_area"] += element["quantities"]["area"]
1115 |                 if element["quantities"].get("volume"):
1116 |                     summary["element_types"][etype]["total_volume"] += element["quantities"]["volume"]
1117 |             
1118 |             return {
1119 |                 "success": True,
1120 |                 "elements": elements_data,
1121 |                 "summary": summary
1122 |             }
1123 |             
1124 |         except Exception as e:
1125 |             import traceback
1126 |             return {"error": str(e), "traceback": traceback.format_exc()}
1127 |     
1128 |     @staticmethod
1129 |     def export_drawing_png(view_type="top", height_offset=0.5, resolution_x=1920, 
1130 |                              resolution_y=1080, storey_name=None, output_path=None):
1131 |         """
1132 |         Export drawings as PNG images with custom resolution.
1133 |         
1134 |         Creates 2D and 3D views of IFC building, particularly useful for architectural drawings.
1135 |         
1136 |         Args:
1137 |             view_type: "top" for plan view, "front", "right", "left" for elevations, "isometric" for 3D view
1138 |             height_offset: Height in meters above storey level for camera position  
1139 |             resolution_x: Horizontal resolution in pixels
1140 |             resolution_y: Vertical resolution in pixels
1141 |             storey_name: Specific storey name to render (None for all/ground floor)
1142 |             output_path: File path to save PNG (None for temp file)
1143 |         
1144 |         Returns:
1145 |             Dict with base64 encoded image data and metadata
1146 |         """
1147 |         try:
1148 |             import tempfile
1149 |             import os
1150 |             
1151 |             # Validate parameters
1152 |             if resolution_x > 4096 or resolution_y > 4096:
1153 |                 return {"error": "Resolution too high. Maximum: 4096x4096"}
1154 |             
1155 |             if resolution_x < 100 or resolution_y < 100:
1156 |                 return {"error": "Resolution too low. Minimum: 100x100"}
1157 |             
1158 |             # Check if IFC file is loaded
1159 |             file = IfcStore.get_file()
1160 |             if file is None:
1161 |                 return {"error": "No IFC file is currently loaded"}
1162 |             
1163 |             # Store original render settings
1164 |             scene = bpy.context.scene
1165 |             original_engine = scene.render.engine
1166 |             original_res_x = scene.render.resolution_x
1167 |             original_res_y = scene.render.resolution_y
1168 |             original_filepath = scene.render.filepath
1169 |             
1170 |             # Set up render settings for drawing
1171 |             scene.render.engine = 'BLENDER_WORKBENCH'  # Fast, good for architectural drawings
1172 |             scene.render.resolution_x = resolution_x
1173 |             scene.render.resolution_y = resolution_y
1174 |             scene.render.resolution_percentage = 100
1175 |             
1176 |             # Store original camera if exists
1177 |             original_camera = bpy.context.scene.camera
1178 |             
1179 |             # Create temporary camera for orthographic rendering
1180 |             bpy.ops.object.camera_add()
1181 |             camera = bpy.context.object
1182 |             camera.name = "TempDrawingCamera"
1183 |             bpy.context.scene.camera = camera
1184 |             
1185 |             # Set camera to orthographic
1186 |             camera.data.type = 'ORTHO'
1187 |             camera.data.ortho_scale = 50  # Adjust based on building size
1188 |             
1189 |             # Position camera based on view type and storey
1190 |             if view_type == "top":
1191 |                 # Find building bounds to position camera appropriately
1192 |                 all_objects = [obj for obj in bpy.context.scene.objects 
1193 |                               if obj.type == 'MESH' and obj.visible_get()]
1194 |                 
1195 |                 if all_objects:
1196 |                     # Calculate bounding box of all visible objects
1197 |                     min_x = min_y = min_z = float('inf')
1198 |                     max_x = max_y = max_z = float('-inf')
1199 |                     
1200 |                     for obj in all_objects:
1201 |                         bbox = [obj.matrix_world @ mathutils.Vector(corner) for corner in obj.bound_box]
1202 |                         for corner in bbox:
1203 |                             min_x = min(min_x, corner.x)
1204 |                             max_x = max(max_x, corner.x)
1205 |                             min_y = min(min_y, corner.y)  
1206 |                             max_y = max(max_y, corner.y)
1207 |                             min_z = min(min_z, corner.z)
1208 |                             max_z = max(max_z, corner.z)
1209 |                     
1210 |                     # Position camera above the building
1211 |                     center_x = (min_x + max_x) / 2
1212 |                     center_y = (min_y + max_y) / 2
1213 |                     
1214 |                     # For plan view, position camera above
1215 |                     camera_height = max_z + height_offset
1216 |                     camera.location = (center_x, center_y, camera_height)
1217 |                     camera.rotation_euler = (0, 0, 0)  # Look down
1218 |                     
1219 |                     # Adjust orthographic scale based on building size
1220 |                     building_width = max(max_x - min_x, max_y - min_y) * 1.2  # Add 20% margin
1221 |                     camera.data.ortho_scale = building_width
1222 |                 else:
1223 |                     # Default position if no objects found
1224 |                     camera.location = (0, 0, 10)
1225 |                     camera.rotation_euler = (0, 0, 0)
1226 |             
1227 |             elif view_type in ["front", "right", "left"]:
1228 |                 # For elevations, position camera accordingly
1229 |                 # This is a simplified implementation - could be enhanced
1230 |                 all_objects = [obj for obj in bpy.context.scene.objects 
1231 |                               if obj.type == 'MESH' and obj.visible_get()]
1232 |                 
1233 |                 if all_objects:
1234 |                     # Calculate bounds
1235 |                     min_x = min_y = min_z = float('inf')
1236 |                     max_x = max_y = max_z = float('-inf')
1237 |                     
1238 |                     for obj in all_objects:
1239 |                         bbox = [obj.matrix_world @ mathutils.Vector(corner) for corner in obj.bound_box]
1240 |                         for corner in bbox:
1241 |                             min_x = min(min_x, corner.x)
1242 |                             max_x = max(max_x, corner.x)
1243 |                             min_y = min(min_y, corner.y)
1244 |                             max_y = max(max_y, corner.y)
1245 |                             min_z = min(min_z, corner.z)
1246 |                             max_z = max(max_z, corner.z)
1247 |                     
1248 |                     center_x = (min_x + max_x) / 2
1249 |                     center_y = (min_y + max_y) / 2
1250 |                     center_z = (min_z + max_z) / 2
1251 |                     
1252 |                     building_depth = max(max_x - min_x, max_y - min_y) * 2
1253 |                     
1254 |                     if view_type == "front":
1255 |                         camera.location = (center_x, center_y - building_depth, center_z)
1256 |                         camera.rotation_euler = (1.5708, 0, 0)  # 90 degrees X rotation
1257 |                     elif view_type == "right":
1258 |                         camera.location = (center_x + building_depth, center_y, center_z)
1259 |                         camera.rotation_euler = (1.5708, 0, 1.5708)  # Look from right
1260 |                     elif view_type == "left":
1261 |                         camera.location = (center_x - building_depth, center_y, center_z)
1262 |                         camera.rotation_euler = (1.5708, 0, -1.5708)  # Look from left
1263 |                     
1264 |                     # Adjust scale for elevations
1265 |                     building_height = max_z - min_z
1266 |                     building_width = max(max_x - min_x, max_y - min_y)
1267 |                     camera.data.ortho_scale = max(building_height, building_width) * 1.2
1268 |             
1269 |             elif view_type == "isometric":
1270 |                 # For isometric view, use perspective camera positioned diagonally
1271 |                 camera.data.type = 'PERSP'
1272 |                 camera.data.lens = 35  # 35mm lens for nice perspective
1273 |                 
1274 |                 all_objects = [obj for obj in bpy.context.scene.objects 
1275 |                               if obj.type == 'MESH' and obj.visible_get()]
1276 |                 
1277 |                 if all_objects:
1278 |                     # Calculate bounds
1279 |                     min_x = min_y = min_z = float('inf')
1280 |                     max_x = max_y = max_z = float('-inf')
1281 |                     
1282 |                     for obj in all_objects:
1283 |                         bbox = [obj.matrix_world @ mathutils.Vector(corner) for corner in obj.bound_box]
1284 |                         for corner in bbox:
1285 |                             min_x = min(min_x, corner.x)
1286 |                             max_x = max(max_x, corner.x)
1287 |                             min_y = min(min_y, corner.y)
1288 |                             max_y = max(max_y, corner.y)
1289 |                             min_z = min(min_z, corner.z)
1290 |                             max_z = max(max_z, corner.z)
1291 |                     
1292 |                     center_x = (min_x + max_x) / 2
1293 |                     center_y = (min_y + max_y) / 2
1294 |                     center_z = (min_z + max_z) / 2
1295 |                     
1296 |                     # Calculate distance to frame the building nicely
1297 |                     building_size = max(max_x - min_x, max_y - min_y, max_z - min_z)
1298 |                     distance = building_size * 1.2  # Distance multiplier for good framing
1299 |                     
1300 |                     # Position camera for isometric view (45° angles)
1301 |                     # Classic isometric position: up and back, looking down at 30°
1302 |                     import math
1303 |                     angle_rad = math.radians(45)
1304 |                     
1305 |                     camera_x = center_x + distance * math.cos(angle_rad)
1306 |                     camera_y = center_y - distance * math.sin(angle_rad)
1307 |                     camera_z = center_z + distance * 0.3  # Lower elevation for better facade view
1308 |                     
1309 |                     camera.location = (camera_x, camera_y, camera_z)
1310 |                     
1311 |                     # Point camera at building center
1312 |                     direction = mathutils.Vector((center_x - camera_x, center_y - camera_y, center_z - camera_z))
1313 |                     camera.rotation_euler = direction.to_track_quat('-Z', 'Y').to_euler()
1314 |                 else:
1315 |                     # Default isometric position
1316 |                     camera.location = (15, -15, 10)
1317 |                     camera.rotation_euler = (1.1, 0, 0.785)  # ~63°, 0°, ~45°
1318 |             
1319 |             # Set up output file path
1320 |             if output_path:
1321 |                 render_path = output_path
1322 |             else:
1323 |                 temp_dir = tempfile.gettempdir()
1324 |                 render_path = os.path.join(temp_dir, f"drawing_{view_type}_{int(time.time())}.png")
1325 |             
1326 |             scene.render.filepath = render_path
1327 |             scene.render.image_settings.file_format = 'PNG'
1328 |             
1329 |             # Render the image
1330 |             bpy.ops.render.render(write_still=True)
1331 |             
1332 |             # Read the rendered image and encode as base64
1333 |             if os.path.exists(render_path):
1334 |                 with open(render_path, 'rb') as f:
1335 |                     image_data = f.read()
1336 |                 
1337 |                 # Clean up temporary file if we created it
1338 |                 if not output_path:
1339 |                     os.remove(render_path)
1340 |                 
1341 |                 # Restore original settings
1342 |                 scene.render.engine = original_engine
1343 |                 scene.render.resolution_x = original_res_x
1344 |                 scene.render.resolution_y = original_res_y
1345 |                 scene.render.filepath = original_filepath
1346 |                 bpy.context.scene.camera = original_camera
1347 |                 
1348 |                 # Delete temporary camera
1349 |                 bpy.data.objects.remove(camera, do_unlink=True)
1350 |                 
1351 |                 # Return base64 encoded image
1352 |                 import base64
1353 |                 return {
1354 |                     "success": True,
1355 |                     "data": base64.b64encode(image_data).decode('utf-8'),
1356 |                     "format": "png",
1357 |                     "resolution": f"{resolution_x}x{resolution_y}",
1358 |                     "view_type": view_type,
1359 |                     "output_path": render_path if output_path else None
1360 |                 }
1361 |             else:
1362 |                 return {"error": "Failed to create render file"}
1363 |                 
1364 |         except Exception as e:
1365 |             # Restore settings on error
1366 |             try:
1367 |                 scene = bpy.context.scene
1368 |                 scene.render.engine = original_engine
1369 |                 scene.render.resolution_x = original_res_x
1370 |                 scene.render.resolution_y = original_res_y 
1371 |                 scene.render.filepath = original_filepath
1372 |                 bpy.context.scene.camera = original_camera
1373 |                 
1374 |                 # Clean up camera if it exists
1375 |                 if 'camera' in locals() and camera:
1376 |                     bpy.data.objects.remove(camera, do_unlink=True)
1377 |             except:
1378 |                 pass
1379 |                 
1380 |             import traceback
1381 |             return {"error": f"Error creating drawing: {str(e)}", 
1382 |                     "traceback": traceback.format_exc()}
1383 | 
1384 |     @staticmethod
1385 |     def get_ifc_georeferencing_info(include_contexts: bool = False):
1386 |         """
1387 |         Retrieves georeferencing information from the currently opened IFC file (CRS, MapConversion, WCS, TrueNorth, IfcSite).
1388 | 
1389 |         Args:
1390 |             include_contexts (bool): If True, adds the breakdown of RepresentationContexts and operations
1391 | 
1392 |         Returns:
1393 |             dict: Structure with:
1394 |             {
1395 |             "georeferenced": bool,
1396 |             "crs": {
1397 |                 "name": str|None,
1398 |                 "geodetic_datum": str|None,
1399 |                 "vertical_datum": str|None,
1400 |                 "map_unit": str|None
1401 |             },
1402 |             "map_conversion": {
1403 |                 "eastings": float|None,
1404 |                 "northings": float|None,
1405 |                 "orthogonal_height": float|None,
1406 |                 "scale": float|None,
1407 |                 "x_axis_abscissa": float|None,
1408 |                 "x_axis_ordinate": float|None
1409 |             },
1410 |             "world_coordinate_system": {"origin": [x,y,z]|None},
1411 |             "true_north": {"direction_ratios": [x,y]|None},
1412 |             "site": {
1413 |                 "local_placement_origin": [x,y,z]|None,
1414 |                 "ref_latitude": [deg,min,sec,millionth]|None,
1415 |                 "ref_longitude": [deg,min,sec,millionth]|None,
1416 |                 "ref_elevation": float|None
1417 |             },
1418 |             "contexts": [...],     # only if include_contexts=True
1419 |             "warnings": [...]
1420 |             }
1421 |         """
1422 |         try:
1423 |                         
1424 |             file = IfcStore.get_file()
1425 |             debug = {"entered": True, "has_ifc": file is not None, "projects": 0, "sites": 0, "contexts": 0}
1426 |             if file is None:
1427 |                 return {"error": "No IFC file is currently loaded", "debug": debug}
1428 | 
1429 |             warnings = []
1430 |             result = {
1431 |                 "georeferenced": False,
1432 |                 "crs": {
1433 |                     "name": None,
1434 |                     "geodetic_datum": None,
1435 |                     "vertical_datum": None,
1436 |                     "map_unit": None
1437 |                 },
1438 |                 "map_conversion": {
1439 |                     "eastings": None,
1440 |                     "northings": None,
1441 |                     "orthogonal_height": None,
1442 |                     "scale": None,
1443 |                     "x_axis_abscissa": None,
1444 |                     "x_axis_ordinate": None
1445 |                 },
1446 |                 "world_coordinate_system": {"origin": None},
1447 |                 "true_north": {"direction_ratios": None},
1448 |                 "site": {
1449 |                     "local_placement_origin": None,
1450 |                     "ref_latitude": None,
1451 |                     "ref_longitude": None,
1452 |                     "ref_elevation": None
1453 |                 },
1454 |                 "contexts": [],
1455 |                 "warnings": warnings,
1456 |                 "debug":debug,
1457 |             }
1458 | 
1459 |             # --- IfcProject & RepresentationContexts ---
1460 |             projects = file.by_type("IfcProject")
1461 |             debug["projects"] = len(projects)
1462 |             if projects:
1463 |                 project = projects[0]
1464 |                 contexts = getattr(project, "RepresentationContexts", None) or []
1465 |                 debug["contexts"] = len(contexts)
1466 |                 for ctx in contexts:
1467 |                     ctx_entry = {
1468 |                         "context_identifier": getattr(ctx, "ContextIdentifier", None),
1469 |                         "context_type": getattr(ctx, "ContextType", None),
1470 |                         "world_origin": None,
1471 |                         "true_north": None,
1472 |                         "has_coordinate_operation": []
1473 |                     }
1474 | 
1475 |                     # WorldCoordinateSystem → Local origin
1476 |                     try:
1477 |                         wcs = getattr(ctx, "WorldCoordinateSystem", None)
1478 |                         if wcs and getattr(wcs, "Location", None):
1479 |                             loc = wcs.Location
1480 |                             if getattr(loc, "Coordinates", None):
1481 |                                 coords = list(loc.Coordinates)
1482 |                                 result["world_coordinate_system"]["origin"] = coords
1483 |                                 ctx_entry["world_origin"] = coords
1484 |                     except Exception as e:
1485 |                         warnings.append(f"WorldCoordinateSystem read error: {str(e)}")
1486 | 
1487 |                     # TrueNorth
1488 |                     try:
1489 |                         if hasattr(ctx, "TrueNorth") and ctx.TrueNorth:
1490 |                             tn = ctx.TrueNorth
1491 |                             ratios = list(getattr(tn, "DirectionRatios", []) or [])
1492 |                             result["true_north"]["direction_ratios"] = ratios
1493 |                             ctx_entry["true_north"] = ratios
1494 |                     except Exception as e:
1495 |                         warnings.append(f"TrueNorth read error: {str(e)}")
1496 | 
1497 |                     # HasCoordinateOperation → IfcMapConversion / TargetCRS
1498 |                     try:
1499 |                         if hasattr(ctx, "HasCoordinateOperation") and ctx.HasCoordinateOperation:
1500 |                             for op in ctx.HasCoordinateOperation:
1501 |                                 op_entry = {"type": op.is_a(), "target_crs": None, "map_conversion": None}
1502 | 
1503 |                                 # TargetCRS
1504 |                                 crs = getattr(op, "TargetCRS", None)
1505 |                                 if crs:
1506 |                                     result["crs"]["name"] = getattr(crs, "Name", None)
1507 |                                     result["crs"]["geodetic_datum"] = getattr(crs, "GeodeticDatum", None)
1508 |                                     result["crs"]["vertical_datum"] = getattr(crs, "VerticalDatum", None)
1509 |                                     try:
1510 |                                         map_unit = getattr(crs, "MapUnit", None)
1511 |                                         result["crs"]["map_unit"] = map_unit.Name if map_unit else None
1512 |                                     except Exception:
1513 |                                         result["crs"]["map_unit"] = None
1514 | 
1515 |                                     op_entry["target_crs"] = {
1516 |                                         "name": result["crs"]["name"],
1517 |                                         "geodetic_datum": result["crs"]["geodetic_datum"],
1518 |                                         "vertical_datum": result["crs"]["vertical_datum"],
1519 |                                         "map_unit": result["crs"]["map_unit"]
1520 |                                     }
1521 | 
1522 |                                 # IfcMapConversion
1523 |                                 if op.is_a("IfcMapConversion"):
1524 |                                     mc = {
1525 |                                         "eastings": getattr(op, "Eastings", None),
1526 |                                         "northings": getattr(op, "Northings", None),
1527 |                                         "orthogonal_height": getattr(op, "OrthogonalHeight", None),
1528 |                                         "scale": getattr(op, "Scale", None),
1529 |                                         "x_axis_abscissa": getattr(op, "XAxisAbscissa", None),
1530 |                                         "x_axis_ordinate": getattr(op, "XAxisOrdinate", None)
1531 |                                     }
1532 |                                     result["map_conversion"].update(mc)
1533 |                                     op_entry["map_conversion"] = mc
1534 | 
1535 |                                 ctx_entry["has_coordinate_operation"].append(op_entry)
1536 |                     except Exception as e:
1537 |                         warnings.append(f"HasCoordinateOperation read error: {str(e)}")
1538 | 
1539 |                     if include_contexts:
1540 |                         result["contexts"].append(ctx_entry)
1541 |             else:
1542 |                 warnings.append("IfcProject entity was not found.")
1543 | 
1544 |             # --- IfcSite (lat/long/alt local origin of placement) ---
1545 |             try:
1546 |                 sites = file.by_type("IfcSite")
1547 |                 debug["sites"] = len(sites)
1548 |                 if sites:
1549 |                     site = sites[0]
1550 |                     # LocalPlacement
1551 |                     try:
1552 |                         if getattr(site, "ObjectPlacement", None):
1553 |                             placement = site.ObjectPlacement
1554 |                             axisPlacement = getattr(placement, "RelativePlacement", None)
1555 |                             if axisPlacement and getattr(axisPlacement, "Location", None):
1556 |                                 loc = axisPlacement.Location
1557 |                                 if getattr(loc, "Coordinates", None):
1558 |                                     result["site"]["local_placement_origin"] = list(loc.Coordinates)
1559 |                     except Exception as e:
1560 |                         warnings.append(f"IfcSite.ObjectPlacement read error: {str(e)}")
1561 | 
1562 |                     # Lat/Long/Alt
1563 |                     try:
1564 |                         lat = getattr(site, "RefLatitude", None)
1565 |                         lon = getattr(site, "RefLongitude", None)
1566 |                         ele = getattr(site, "RefElevation", None)
1567 |                         result["site"]["ref_latitude"]  = list(lat) if lat else None
1568 |                         result["site"]["ref_longitude"] = list(lon) if lon else None
1569 |                         result["site"]["ref_elevation"] = ele
1570 |                     except Exception as e:
1571 |                         warnings.append(f"IfcSite (lat/long/elev) read error: {str(e)}")
1572 |                 else:
1573 |                     warnings.append("IfcSite was not found.")
1574 |             except Exception as e:
1575 |                 warnings.append(f"Error while querying IfcSite: {str(e)}")
1576 | 
1577 |             # --- Heuristic to determine georeferencing ---
1578 |             geo_flags = [
1579 |                 any(result["crs"].values()),
1580 |                 any(v is not None for v in result["map_conversion"].values())          
1581 |             ]
1582 |             result["georeferenced"] = all(geo_flags)
1583 | 
1584 |             return result
1585 | 
1586 |         except Exception as e:
1587 |             import traceback
1588 |             return {"error": str(e), "traceback": traceback.format_exc()} 
1589 |     
1590 |     @staticmethod
1591 |     def georeference_ifc_model(
1592 |         crs_mode: str,
1593 |         epsg: int = None,
1594 |         crs_name: str = None,
1595 |         geodetic_datum: str = None,
1596 |         map_projection: str = None,
1597 |         map_zone: str = None,
1598 |         eastings: float = None,
1599 |         northings: float = None,
1600 |         orthogonal_height: float = 0.0,
1601 |         scale: float = 1.0,
1602 |         x_axis_abscissa: float = None,
1603 |         x_axis_ordinate: float = None,
1604 |         true_north_azimuth_deg: float = None,
1605 |         context_filter: str = "Model",
1606 |         context_index: int = None,
1607 |         site_ref_latitude: list = None,         # IFC format [deg, min, sec, millionth]
1608 |         site_ref_longitude: list = None,        # IFC format [deg, min, sec, millionth]
1609 |         site_ref_elevation: float = None,
1610 |         site_ref_latitude_dd: float = None,     # Decimal degrees (optional)
1611 |         site_ref_longitude_dd: float = None,    # Decimal degrees (optional)
1612 |         overwrite: bool = False,
1613 |         dry_run: bool = False,
1614 |         write_path: str = None,
1615 |     ):
1616 |         """
1617 |         Usage:
1618 |         Creates/updates IfcProjectedCRS + IfcMapConversion in the opened IFC.
1619 |         Optionally updates IfcSite.RefLatitude/RefLongitude/RefElevation.
1620 |         If `pyproj` is available, it can convert Lat/Long (degrees) ⇄ E/N (meters)
1621 |         according to the given EPSG.
1622 | 
1623 |         Requirements:
1624 |         CRS declaration is ALWAYS required:
1625 |         - crs_mode="epsg" + epsg=XXXX    OR
1626 |         - crs_mode="custom" + (crs_name, geodetic_datum, map_projection [, map_zone])
1627 | 
1628 |         Minimum MapConversion information:
1629 |         - eastings + northings
1630 |         (if missing but lat/long + EPSG + pyproj are available, they are computed)
1631 |         """
1632 |         import math
1633 |         from bonsai.bim.ifc import IfcStore
1634 |         file = IfcStore.get_file()
1635 |         if file is None:
1636 |             return {"success": False, "error": "No IFC file is currently loaded"}
1637 | 
1638 |         warnings = []
1639 |         actions = {"created_crs": False, "created_map_conversion": False,
1640 |                 "updated_map_conversion": False, "updated_site": False,
1641 |                 "overwrote": False, "wrote_file": False}
1642 |         debug = {}
1643 | 
1644 |         # ---------- helpers ----------
1645 |         def dd_to_ifc_dms(dd: float):
1646 |             """Converts decimal degrees to [deg, min, sec, millionth] (sign carried by degrees)."""
1647 |             if dd is None:
1648 |                 return None
1649 |             sign = -1 if dd < 0 else 1
1650 |             v = abs(dd)
1651 |             deg = int(v)
1652 |             rem = (v - deg) * 60
1653 |             minutes = int(rem)
1654 |             sec_float = (rem - minutes) * 60
1655 |             seconds = int(sec_float)
1656 |             millionth = int(round((sec_float - seconds) * 1_000_000))
1657 |             # Normalizes rounding (e.g. 59.999999 → 60)
1658 |             if millionth == 1_000_000:
1659 |                 seconds += 1
1660 |                 millionth = 0
1661 |             if seconds == 60:
1662 |                 minutes += 1
1663 |                 seconds = 0
1664 |             if minutes == 60:
1665 |                 deg += 1
1666 |                 minutes = 0
1667 |             return [sign * deg, minutes, seconds, millionth]
1668 | 
1669 |         def select_context():
1670 |             ctxs = file.by_type("IfcGeometricRepresentationContext") or []
1671 |             if not ctxs:
1672 |                 return None, "No IfcGeometricRepresentationContext found"
1673 |             if context_index is not None and 0 <= context_index < len(ctxs):
1674 |                 return ctxs[context_index], None
1675 |             # By filter (default "Model", case-insensitive)
1676 |             if context_filter:
1677 |                 for c in ctxs:
1678 |                     if (getattr(c, "ContextType", None) or "").lower() == context_filter.lower():
1679 |                         return c, None
1680 |             # Fallback to the first one
1681 |             return ctxs[0], None
1682 | 
1683 |         # ---------- 1) CRS Validation ----------
1684 |         if crs_mode not in ("epsg", "custom"):
1685 |             return {"success": False, "error": "crs_mode must be 'epsg' or 'custom'"}
1686 | 
1687 |         if crs_mode == "epsg":
1688 |             if not epsg:
1689 |                 return {"success": False, "error": "epsg code required when crs_mode='epsg'"}
1690 |             crs_name_final = f"EPSG:{epsg}"
1691 |             geodetic_datum = geodetic_datum or "WGS84"
1692 |             map_projection = map_projection or "TransverseMercator"  # usual UTM
1693 |             # map_zone is optional
1694 |         else:
1695 |             # custom
1696 |             missing = [k for k in ("crs_name", "geodetic_datum", "map_projection") if locals().get(k) in (None, "")]
1697 |             if missing:
1698 |                 return {"success": False, "error": f"Missing fields for custom CRS: {', '.join(missing)}"}
1699 |             crs_name_final = crs_name
1700 | 
1701 |         # ---------- 2) Complete E/N from Lat/Long (if missing and pyproj is available) ----------
1702 |         proj_used = None
1703 |         try:
1704 |             if (eastings is None or northings is None) and (site_ref_latitude_dd is not None and site_ref_longitude_dd is not None) and crs_mode == "epsg":
1705 |                 try:
1706 |                     from pyproj import Transformer
1707 |                     # Assume lat/long in WGS84; if the EPSG is not WGS84-derived, pyproj handles the conversion
1708 |                     transformer = Transformer.from_crs("EPSG:4326", f"EPSG:{epsg}", always_xy=True)
1709 |                     e, n = transformer.transform(site_ref_longitude_dd, site_ref_latitude_dd)
1710 |                     eastings = e if eastings is None else eastings
1711 |                     northings = n if northings is None else northings
1712 |                     proj_used = f"EPSG:4326->EPSG:{epsg}"
1713 |                 except Exception as _e:
1714 |                     warnings.append(f"Could not convert Lat/Long to E/N: {_e}. Provide eastings/northings manually.")
1715 |         except Exception as _e:
1716 |             warnings.append(f"pyproj not available to compute E/N: {_e}. Provide eastings/northings manually.")
1717 | 
1718 |         # ---------- E/N Validation ----------
1719 |         if eastings is None or northings is None:
1720 |             return {"success": False, "error": "eastings and northings are required (or provide lat/long + EPSG with pyproj installed)"}
1721 | 
1722 |         # ---------- 3) Select context ----------
1723 |         context, ctx_err = select_context()
1724 |         if not context:
1725 |             return {"success": False, "error": ctx_err or "No context found"}
1726 | 
1727 |         # ---------- 4) Detect existing ones and handle overwrite ----------
1728 |         # Inverse: context.HasCoordinateOperation is already handled by ifcopenshell as an attribute
1729 |         existing_ops = list(getattr(context, "HasCoordinateOperation", []) or [])
1730 |         existing_map = None
1731 |         existing_crs = None
1732 |         for op in existing_ops:
1733 |             if op.is_a("IfcMapConversion"):
1734 |                 existing_map = op
1735 |                 existing_crs = getattr(op, "TargetCRS", None)
1736 |                 break
1737 | 
1738 |         if existing_map and not overwrite:
1739 |             return {
1740 |                 "success": True,
1741 |                 "georeferenced": True,
1742 |                 "message": "MapConversion already exists. Use overwrite=True to replace it.",
1743 |                 "context_used": {"identifier": getattr(context, "ContextIdentifier", None), "type": getattr(context, "ContextType", None)},
1744 |                 "map_conversion": {
1745 |                     "eastings": getattr(existing_map, "Eastings", None),
1746 |                     "northings": getattr(existing_map, "Northings", None),
1747 |                     "orthogonal_height": getattr(existing_map, "OrthogonalHeight", None),
1748 |                     "scale": getattr(existing_map, "Scale", None),
1749 |                     "x_axis_abscissa": getattr(existing_map, "XAxisAbscissa", None),
1750 |                     "x_axis_ordinate": getattr(existing_map, "XAxisOrdinate", None),
1751 |                 },
1752 |                 "crs": {
1753 |                     "name": getattr(existing_crs, "Name", None) if existing_crs else None,
1754 |                     "geodetic_datum": getattr(existing_crs, "GeodeticDatum", None) if existing_crs else None,
1755 |                     "map_projection": getattr(existing_crs, "MapProjection", None) if existing_crs else None,
1756 |                     "map_zone": getattr(existing_crs, "MapZone", None) if existing_crs else None,
1757 |                 },
1758 |                 "warnings": warnings,
1759 |                 "actions": actions,
1760 |             }
1761 | 
1762 |         # ---------- 5) Build/Update CRS ----------
1763 |         if existing_crs and overwrite:
1764 |             actions["overwrote"] = True
1765 |             try:
1766 |                 file.remove(existing_crs)
1767 |             except Exception:
1768 |                 warnings.append("Could not remove the existing CRS; a new one will be created anyway.")
1769 | 
1770 |         # If custom, use the provided values; if EPSG, build the name and defaults
1771 |         crs_kwargs = {
1772 |             "Name": crs_name_final,
1773 |             "GeodeticDatum": geodetic_datum,
1774 |             "MapProjection": map_projection,
1775 |         }
1776 |         if map_zone:
1777 |             crs_kwargs["MapZone"] = map_zone
1778 | 
1779 |         crs_entity = file.create_entity("IfcProjectedCRS", **crs_kwargs)
1780 |         actions["created_crs"] = True
1781 | 
1782 |         # ---------- 6) Calculate orientation (optional) ----------
1783 |         # If true_north_azimuth_deg is given as the azimuth from North (model +Y axis) towards East (clockwise),
1784 |         # We can derive an approximate X vector: X = (cos(az+90°), sin(az+90°)).
1785 |         if (x_axis_abscissa is None or x_axis_ordinate is None) and (true_north_azimuth_deg is not None):
1786 |             az = math.radians(true_north_azimuth_deg)
1787 |             # Estimated X vector rotated 90° from North:
1788 |             x_axis_abscissa = math.cos(az + math.pi / 2.0)
1789 |             x_axis_ordinate = math.sin(az + math.pi / 2.0)
1790 | 
1791 |         # Defaults if still missing
1792 |         x_axis_abscissa = 1.0 if x_axis_abscissa is None else float(x_axis_abscissa)
1793 |         x_axis_ordinate = 0.0 if x_axis_ordinate is None else float(x_axis_ordinate)
1794 |         scale = 1.0 if scale is None else float(scale)
1795 |         orthogonal_height = 0.0 if orthogonal_height is None else float(orthogonal_height)
1796 | 
1797 |         # ---------- 7) Build/Update IfcMapConversion ----------
1798 |         if existing_map and overwrite:
1799 |             try:
1800 |                 file.remove(existing_map)
1801 |             except Exception:
1802 |                 warnings.append("Could not remove the existing MapConversion; another one will be created anyway.")
1803 | 
1804 |         map_kwargs = {
1805 |             "SourceCRS": context,
1806 |             "TargetCRS": crs_entity,
1807 |             "Eastings": float(eastings),
1808 |             "Northings": float(northings),
1809 |             "OrthogonalHeight": float(orthogonal_height),
1810 |             "XAxisAbscissa": float(x_axis_abscissa),
1811 |             "XAxisOrdinate": float(x_axis_ordinate),
1812 |             "Scale": float(scale),
1813 |         }
1814 |         map_entity = file.create_entity("IfcMapConversion", **map_kwargs)
1815 |         actions["created_map_conversion"] = True
1816 | 
1817 |         # ---------- 8) (Optional) Update IfcSite ----------
1818 |         try:
1819 |             sites = file.by_type("IfcSite") or []
1820 |             if sites:
1821 |                 site = sites[0]
1822 |                 # If no IFC lists are provided but decimal degrees are, convert them
1823 |                 if site_ref_latitude is None and site_ref_latitude_dd is not None:
1824 |                     site_ref_latitude = dd_to_ifc_dms(site_ref_latitude_dd)
1825 |                 if site_ref_longitude is None and site_ref_longitude_dd is not None:
1826 |                     site_ref_longitude = dd_to_ifc_dms(site_ref_longitude_dd)
1827 | 
1828 |                 changed = False
1829 |                 if site_ref_latitude is not None:
1830 |                     site.RefLatitude = site_ref_latitude
1831 |                     changed = True
1832 |                 if site_ref_longitude is not None:
1833 |                     site.RefLongitude = site_ref_longitude
1834 |                     changed = True
1835 |                 if site_ref_elevation is not None:
1836 |                     site.RefElevation = float(site_ref_elevation)
1837 |                     changed = True
1838 |                 if changed:
1839 |                     actions["updated_site"] = True
1840 |             else:
1841 |                 warnings.append("No IfcSite found; lat/long/elevation were not updated.")
1842 |         except Exception as e:
1843 |             warnings.append(f"Could not update IfcSite: {e}")
1844 | 
1845 |         # ---------- 9) (Optional) Save ----------
1846 |         if write_path and not dry_run:
1847 |             try:
1848 |                 file.write(write_path)
1849 |                 actions["wrote_file"] = True
1850 |             except Exception as e:
1851 |                 warnings.append(f"Could not write IFC to'{write_path}': {e}")
1852 | 
1853 |         # ---------- 10) Response ----------
1854 |         return {
1855 |             "success": True,
1856 |             "georeferenced": True,
1857 |             "crs": {
1858 |                 "name": getattr(crs_entity, "Name", None),
1859 |                 "geodetic_datum": getattr(crs_entity, "GeodeticDatum", None),
1860 |                 "map_projection": getattr(crs_entity, "MapProjection", None),
1861 |                 "map_zone": getattr(crs_entity, "MapZone", None),
1862 |             },
1863 |             "map_conversion": {
1864 |                 "eastings": float(eastings),
1865 |                 "northings": float(northings),
1866 |                 "orthogonal_height": float(orthogonal_height),
1867 |                 "scale": float(scale),
1868 |                 "x_axis_abscissa": float(x_axis_abscissa),
1869 |                 "x_axis_ordinate": float(x_axis_ordinate),
1870 |             },
1871 |             "context_used": {
1872 |                 "identifier": getattr(context, "ContextIdentifier", None),
1873 |                 "type": getattr(context, "ContextType", None),
1874 |             },
1875 |             "site": {
1876 |                 "ref_latitude": site_ref_latitude,
1877 |                 "ref_longitude": site_ref_longitude,
1878 |                 "ref_elevation": site_ref_elevation,
1879 |             },
1880 |             "proj_used": proj_used,
1881 |             "warnings": warnings,
1882 |             "actions": actions,
1883 |         }
1884 |     
1885 |     @staticmethod
1886 |     def generate_ids(
1887 |         title: str,
1888 |         specs: list,
1889 |         description: str = "",
1890 |         author: str = "",
1891 |         ids_version: str = "",
1892 |         purpose: str = "",
1893 |         milestone: str = "",
1894 |         output_path: str = None,
1895 |         date_iso: str = None,
1896 |     ):
1897 |         """
1898 |         Generates an .ids file with robust handling of:
1899 |             - Synonyms: 'name' → 'baseName', 'minValue/maxValue' + inclusivity, 'minOccurs/maxOccurs' → cardinality.
1900 |             - Operators inside 'value' ("> 30", "≤0.45"), in keys (op/target/threshold/limit), and extracted from 'description'
1901 |             (ONLY within requirements; never in applicability).
1902 |             - Correct restriction mapping:
1903 |                 * Numeric → ids.Restriction(base="double" | "integer", options={...})
1904 |                 * Textual (IFCLABEL/TEXT) → ids.Restriction(base="string", options={"pattern": [anchored regexes]})
1905 |             - Automatic dataType inference with hints 
1906 |             (ThermalTransmittance → IFCTHERMALTRANSMITTANCEMEASURE, IsExternal → IFCBOOLEAN, etc.).
1907 |             - PredefinedType remains as an Attribute within APPLICABILITY 
1908 |             (NOT absorbed into Entity.predefinedType).
1909 |         """
1910 |         
1911 |         #Libraries/Dependencies
1912 |         # -----------------------------------------------------------------------------------------------------------
1913 |         try:
1914 |             from ifctester import ids
1915 |         except Exception as e:
1916 |             return {"ok": False, "error": "Could not import ifctester.ids", "details": str(e)}
1917 | 
1918 |         import os, datetime, re
1919 |         from numbers import Number
1920 | 
1921 |         #Validations
1922 |         # -----------------------------------------------------------------------------------------------------------    
1923 |         if not isinstance(title, str) or not title.strip():
1924 |             return {"ok": False, "error": "Invalid or empty 'title' parameter."}
1925 |         if not isinstance(specs, list) or len(specs) == 0:
1926 |             return {"ok": False, "error": "You must provide at least one specification in 'specs'."}
1927 | 
1928 |         # Utils
1929 |         # -----------------------------------------------------------------------------------------------------------
1930 |         def _norm_card(c):
1931 |             """
1932 |             Usage:
1933 |                 Normalizes the given cardinality value, ensuring it matches one of the valid terms.
1934 |             Inputs:
1935 |                 c (str | None): Cardinality value to normalize. Can be 'required', 'optional', or 'prohibited'.
1936 |             Output:
1937 |                 str | None: Normalized lowercase value if valid, or None if not provided.
1938 |             Exceptions:
1939 |                 ValueError: Raised if the input value does not correspond to a valid cardinality.
1940 |             """
1941 |             if c is None: return None
1942 |             c = str(c).strip().lower()
1943 |             if c in ("required", "optional", "prohibited"): return c
1944 |             raise ValueError("Invalid cardinality: use 'required', 'optional', or 'prohibited'.")
1945 | 
1946 |         def _card_from_occurs(minOccurs, maxOccurs):
1947 |             """
1948 |             Usage:
1949 |                 Derives the cardinality ('required' or 'optional') based on the values of minOccurs and maxOccurs.
1950 |             Inputs:
1951 |                 minOccurs (int | str | None): Minimum number of occurrences. If greater than 0, the field is considered 'required'.
1952 |                 maxOccurs (int | str | None): Maximum number of occurrences. Not used directly, included for completeness.
1953 |             Output:
1954 |                 str | None: Returns 'required' if minOccurs > 0, 'optional' if minOccurs == 0, or None if conversion fails.
1955 |             """
1956 |             try:
1957 |                 if minOccurs is None: return None
1958 |                 m = int(minOccurs)
1959 |                 return "required" if m > 0 else "optional"
1960 |             except Exception:
1961 |                 return None
1962 | 
1963 |         def _is_bool_like(v):
1964 |             """
1965 |             Usage:
1966 |                 Checks whether a given value can be interpreted as a boolean.
1967 |             Inputs:
1968 |                 v (any): Value to evaluate. Can be of any type (bool, str, int, etc.).
1969 |             Output:
1970 |                 bool: Returns True if the value represents a boolean-like token 
1971 |                     (e.g., True, False, "yes", "no", "1", "0", "y", "n", "t", "f"), 
1972 |                     otherwise returns False.
1973 |             """
1974 |             if isinstance(v, bool): return True
1975 |             if v is None: return False
1976 |             s = str(v).strip().lower()
1977 |             return s in ("true", "false", "1", "0", "yes", "no", "y", "n", "t", "f")
1978 | 
1979 |         def _to_bool_token(v):
1980 |             """
1981 |             Usage:
1982 |                 Converts a boolean-like value into a standardized string token ("TRUE" or "FALSE").
1983 |             Inputs:
1984 |                 v (any): Value to convert. Can be a boolean, string, or numeric value representing truthiness.
1985 |             Output:
1986 |                 str | None: Returns "TRUE" or "FALSE" if the value matches a recognized boolean pattern,
1987 |                             or None if it cannot be interpreted as boolean.
1988 |             """        
1989 |             if isinstance(v, bool): return "TRUE" if v else "FALSE"
1990 |             s = str(v).strip().lower()
1991 |             if s in ("true", "1", "yes", "y", "t"): return "TRUE"
1992 |             if s in ("false", "0", "no", "n", "f"): return "FALSE"
1993 |             return None
1994 | 
1995 |         # Hints for *MEASURE* types and by property name
1996 |         MEASURE_HINTS = {
1997 |             "THERMALTRANSMITTANCE": "IFCTHERMALTRANSMITTANCEMEASURE",
1998 |             "UVALUE": "IFCTHERMALTRANSMITTANCEMEASURE",
1999 |             "RATIOMEASURE": "IFCRATIOMEASURE",
2000 |             "AREAMEASURE": "IFCAREAMEASURE",
2001 |             "LENGTHMEASURE": "IFCLENGTHMEASURE",
2002 |             "SOUNDPRESSURELEVELMEASURE": "IFCSOUNDPRESSURELEVELMEASURE",
2003 |         }
2004 |         PROPERTY_DATATYPE_HINTS = {
2005 |             "THERMALTRANSMITTANCE": "IFCTHERMALTRANSMITTANCEMEASURE",
2006 |             "ISEXTERNAL": "IFCBOOLEAN",
2007 |             "ACOUSTICRATING": "IFCLABEL",
2008 |         }
2009 | 
2010 |         def _norm_ifc_version(v: str | None) -> str | None:
2011 |             """
2012 |             Usage:
2013 |                 Normalizes the given IFC schema version string to a standardized format.
2014 |             Inputs:
2015 |                 v (str | None): Input version value (e.g., "4", "IFC 4", "2x3", "IFC4.3").
2016 |             Output:
2017 |                 str | None: Returns the normalized IFC version (e.g., "IFC4", "IFC2X3", "IFC4X3"),
2018 |                             or None if the input is empty or invalid.
2019 |             """
2020 |             if not v: return None
2021 |             s = str(v).strip().upper()
2022 |             m = {"4": "IFC4", "IFC 4": "IFC4", "2X3": "IFC2X3", "IFC 2X3": "IFC2X3", "IFC4.3": "IFC4X3"}
2023 |             return m.get(s, s)
2024 | 
2025 |         def _strip_ifc_prefix(dt: str | None) -> str | None:
2026 |             """
2027 |             Usage:
2028 |                 Removes leading and trailing spaces from the given string and converts it to uppercase.
2029 |                 Typically used to normalize IFC data type names.
2030 |             Inputs:
2031 |                 dt (str | None): Data type string to normalize (e.g., " ifcreal ").
2032 |             Output:
2033 |                 str | None: Uppercase, trimmed string (e.g., "IFCREAL"), or None if the input is empty or None.
2034 |             """       
2035 |             return dt.strip().upper() if dt else None
2036 | 
2037 |         def _is_number_like(v) -> bool:
2038 |             """
2039 |             Usage:
2040 |                 Checks whether the given value can be interpreted as a numeric value.
2041 |             Inputs:
2042 |                 v (any): Value to evaluate. Can be of any type (int, float, str, etc.).
2043 |             Output:
2044 |                 bool: Returns True if the value represents a number (including numeric strings like "3.5" or "2,7"),
2045 |                     otherwise returns False.
2046 |             """
2047 |             if isinstance(v, Number): return True
2048 |             if v is None: return False
2049 |             try:
2050 |                 float(str(v).strip().replace(",", "."))
2051 |                 return True
2052 |             except Exception:
2053 |                 return False
2054 | 
2055 |         def _guess_numeric_base_from_ifc(dt_upper: str | None) -> str:
2056 |             """
2057 |             Usage:
2058 |                 Determines the numeric base type ('integer' or 'double') from an IFC data type string.
2059 |             Inputs:
2060 |                 dt_upper (str | None): Uppercase IFC data type name (e.g., "IFCINTEGER", "IFCREAL").
2061 |             Output:
2062 |                 str: Returns "integer" if the type contains "INTEGER"; otherwise returns "double".
2063 |                     Defaults to "double" when no input is provided.
2064 |             """
2065 |             if not dt_upper: return "double"
2066 |             if "INTEGER" in dt_upper: return "integer"
2067 |             return "double"
2068 | 
2069 |         # comparators in string ("> 30", "<=0.45", "≥3", "≤ 3")
2070 |         _cmp_regex = re.compile(r"^\s*(>=|=>|≤|<=|≥|>|<)\s*([0-9]+(?:[.,][0-9]+)?)\s*$")
2071 |         _normalize_op = {">=":">=", "=>":">=", "≥":">=", "<=":"<=", "≤":"<="}
2072 |         
2073 |         def _extract_op_target_from_string(s: str):
2074 |             """
2075 |                 Usage:
2076 |                     Extracts a comparison operator and its numeric target value from a string expression.
2077 |                 Inputs:
2078 |                     s (str): String containing a comparison, e.g., "> 30", "<=0.45", "≥3", or "≤ 3".
2079 |                 Output:
2080 |                     tuple(str | None, float | None): Returns a tuple (operator, target_value),
2081 |                                                     where operator is one of ">", ">=", "<", or "<=".
2082 |                                                     Returns (None, None) if the string does not match a valid pattern.
2083 |             """
2084 |             m = _cmp_regex.match(s)
2085 |             if not m: return None, None
2086 |             op, num = m.group(1), m.group(2)
2087 |             op = _normalize_op.get(op, op)
2088 |             try: tgt = float(num.replace(",", "."))
2089 |             except Exception: return None, None
2090 |             return op, tgt
2091 | 
2092 |         # English descriptions (>= before >)
2093 |         _desc_ops = [
2094 |             (r"(greater\s+than\s+or\s+equal\s+to|greater\s+or\s+equal\s+to|equal\s+or\s+greater\s+than|≥)", ">="),
2095 |             (r"(less\s+than\s+or\s+equal\s+to|not\s+greater\s+than|≤|at\s+most|maximum)", "<="),
2096 |             (r"(greater\s+than|more\s+than|>)", ">"),
2097 |             (r"(less\s+than|fewer\s+than|<)", "<"),
2098 |         ]
2099 |         _num_regex = re.compile(r"([0-9]+(?:[.,][0-9]+)?)")
2100 | 
2101 |         
2102 |         def _extract_from_description(desc: str):
2103 |             """
2104 |             Usage:
2105 |                 Extracts a comparison operator and numeric target value from a descriptive text.
2106 |                 Designed to interpret expressions such as "greater than 30" or "less than or equal to 0.45".
2107 |             Inputs:
2108 |                 desc (str): Description text potentially containing a numeric comparison.
2109 |             Output:
2110 |                 tuple(str | None, float | None): Returns a tuple (operator, target_value),
2111 |                                                 where operator is one of ">", ">=", "<", or "<=",
2112 |                                                 and target_value is the numeric value extracted.
2113 |                                                 Returns (None, None) if no valid pattern is found.
2114 |             """
2115 |             if not desc: return None, None
2116 |             text = desc.strip().lower()
2117 |             for pat, op in _desc_ops:
2118 |                 if re.search(pat, text):
2119 |                     m = _num_regex.search(text)
2120 |                     if m:
2121 |                         try:
2122 |                             tgt = float(m.group(1).replace(",", "."))
2123 |                             return op, tgt
2124 |                         except Exception:
2125 |                             pass
2126 |             return None, None
2127 | 
2128 |         # anchored regexes for integers (numeric fallback for decimals)
2129 |         def _regex_for_threshold(threshold: float, op: str) -> list[str]:
2130 |             """
2131 |                 Usage:
2132 |                     Builds one or more anchored regular expressions to validate integer values 
2133 |                     against a numeric threshold and comparison operator.
2134 |                     For non-integer thresholds, returns a generic numeric pattern as fallback.
2135 |                 Inputs:
2136 |                     threshold (float): Numeric limit used for the comparison (e.g., 30, 10.5).
2137 |                     op (str): Comparison operator, one of ">", ">=", "<", or "<=".
2138 |                 Output:
2139 |                     list[str]: A list containing one or more anchored regex patterns that match 
2140 |                             integer strings satisfying the given condition.
2141 |                             Returns a generic numeric regex pattern as fallback for decimals.
2142 |             """
2143 |             if abs(threshold - round(threshold)) < 1e-9:
2144 |                 t = int(round(threshold))
2145 |                 def gt_int(n):
2146 |                     if n <= 8:  return rf"^([{n+1}-9]|[1-9]\d|[1-9]\d{{2,}})$"
2147 |                     if n <= 98:
2148 |                         tens, units = divmod(n + 1, 10)
2149 |                         p1 = rf"{tens}[{units}-9]" if units > 0 else rf"{tens}\d"
2150 |                         p2 = rf"[{tens+1}-9]\d" if tens < 9 else ""
2151 |                         parts = [p1, p2, r"[1-9]\d{2,}"]
2152 |                         return "^(" + "|".join([p for p in parts if p]) + ")$"
2153 |                     return r"^[1-9]\d{2,}$"
2154 |                 def ge_int(n):
2155 |                     if n <= 9:  return rf"^([{n}-9]|[1-9]\d|[1-9]\d{{2,}})$"
2156 |                     if n <= 99:
2157 |                         tens, units = divmod(n, 10)
2158 |                         p1 = rf"{tens}[{units}-9]"
2159 |                         p2 = rf"[{tens+1}-9]\d" if tens < 9 else ""
2160 |                         parts = [p1, p2, r"[1-9]\d{2,}"]
2161 |                         return "^(" + "|".join([p for p in parts if p]) + ")$"
2162 |                     return r"^[1-9]\d{2,}$"
2163 |                 def lt_int(n):
2164 |                     if n <= 0: return r"^(?!)$"
2165 |                     if n <= 10: return rf"^[0-9]$" if n == 10 else rf"^[0-{n-1}]$"
2166 |                     tens, units = divmod(n - 1, 10)
2167 |                     if tens == 1: return r"^([0-9]|1[0-9])$"
2168 |                     return rf"^([0-9]|[1-{tens-1}]\d|{tens}[0-{units}])$"
2169 |                 def le_int(n):
2170 |                     if n < 10: return rf"^[0-{n}]$"
2171 |                     tens, units = divmod(n, 10)
2172 |                     if tens == 1:
2173 |                         return r"^([0-9]|1[0-9])$" if units == 9 else rf"^([0-9]|1[0-{units}])$"
2174 |                     parts = [r"[0-9]"]
2175 |                     if tens > 1: parts.append(rf"[1-{tens-1}]\d")
2176 |                     parts.append(rf"{tens}[0-{units}]")
2177 |                     return "^(" + "|".join(parts) + ")$"
2178 |                 if   op == ">":  return [gt_int(t)]
2179 |                 elif op == ">=": return [ge_int(t)]
2180 |                 elif op == "<":  return [lt_int(t)]
2181 |                 elif op == "<=": return [le_int(t)]
2182 |             return [r"^\d+(?:[.,]\d+)?$"]  # fallback for decimals (plain numeric string)
2183 | 
2184 |         def _build_restriction_for_text(op: str | None, target, bounds: dict):
2185 |             """
2186 |             Usage:
2187 |                 Builds a text-based IDS restriction (ids.Restriction) using regex patterns derived 
2188 |                 from numeric thresholds and comparison operators. 
2189 |                 Used when a property has textual dataType (e.g., IFCLABEL) but represents numeric conditions.
2190 |             Inputs:
2191 |                 op (str | None): Comparison operator (">", ">=", "<", "<=") if explicitly provided.
2192 |                 target (any): Target value for the comparison. Can be numeric or string.
2193 |                 bounds (dict): Dictionary of limit values such as 
2194 |                             {"minInclusive": ..., "maxExclusive": ..., "maxInclusive": ...}.
2195 |             Output:
2196 |                 ids.Restriction | None: Returns an ids.Restriction object with regex patterns 
2197 |                                         for matching the specified numeric range in string form,
2198 |                                         or None if no valid pattern can be built.
2199 |             """
2200 |             if op and target is not None and _is_number_like(target):
2201 |                 return ids.Restriction(base="string", options={"pattern": _regex_for_threshold(float(target), op)})
2202 |             patterns = []
2203 |             if bounds.get("minExclusive") is not None:
2204 |                 patterns += _regex_for_threshold(float(bounds["minExclusive"]), ">")
2205 |             if bounds.get("minInclusive") is not None:
2206 |                 patterns += _regex_for_threshold(float(bounds["minInclusive"]), ">=")
2207 |             if bounds.get("maxExclusive") is not None:
2208 |                 patterns += _regex_for_threshold(float(bounds["maxExclusive"]), "<")
2209 |             if bounds.get("maxInclusive") is not None:
2210 |                 patterns += _regex_for_threshold(float(bounds["maxInclusive"]), "<=")
2211 |             return ids.Restriction(base="string", options={"pattern": patterns}) if patterns else None
2212 | 
2213 |         def _build_numeric_restriction(dt_upper: str | None, op: str | None, target, bounds: dict):
2214 |             """
2215 |             Usage:
2216 |                 Builds a numeric IDS restriction (ids.Restriction) from a data type, comparison operator, 
2217 |                 target value, and optional numeric bounds.
2218 |             Inputs:
2219 |                 dt_upper (str | None): Uppercase IFC data type name (e.g., "IFCREAL", "IFCINTEGER").
2220 |                 op (str | None): Comparison operator (">", ">=", "<", "<=") if provided.
2221 |                 target (any): Target value for the comparison. Converted to float when applicable.
2222 |                 bounds (dict): Dictionary containing optional boundary values such as 
2223 |                             {"minInclusive": ..., "maxExclusive": ..., "maxInclusive": ...}.
2224 |             Output:
2225 |                 ids.Restriction | None: Returns an ids.Restriction object with the appropriate numeric limits,
2226 |                                         or None if no valid restriction can be created.
2227 |             """
2228 |             if not (op or any(v is not None for v in bounds.values())): return None
2229 |             base_num = _guess_numeric_base_from_ifc(dt_upper)
2230 |             opts = {}
2231 |             if op and target is not None:
2232 |                 v = float(str(target).replace(",", "."))
2233 |                 if   op == ">":  opts["minExclusive"] = v
2234 |                 elif op == ">=": opts["minInclusive"] = v
2235 |                 elif op == "<":  opts["maxExclusive"] = v
2236 |                 elif op == "<=": opts["maxInclusive"] = v
2237 |             for k in ("minInclusive","maxInclusive","minExclusive","maxExclusive"):
2238 |                 if bounds.get(k) is not None:
2239 |                     opts[k] = float(str(bounds[k]).replace(",", "."))
2240 |             if not opts: return None
2241 |             return ids.Restriction(base=base_num, options=opts)
2242 | 
2243 |         def _infer_ids_datatype(pset: str | None, baseName: str | None,
2244 |                                 provided_dt: str | None, value, op: str | None, bounds: dict) -> str:
2245 |             """
2246 |             Usage:
2247 |                 Infers the appropriate IFC data type (e.g., IFCREAL, IFCINTEGER, IFCBOOLEAN, IFCLABEL)
2248 |                 for a given property based on its name, provided data type, value, and restrictions.
2249 |             Inputs:
2250 |                 pset (str | None): Name of the property set to which the property belongs.
2251 |                 baseName (str | None): Base name of the property (e.g., "ThermalTransmittance", "IsExternal").
2252 |                 provided_dt (str | None): Data type explicitly provided in the input, if any.
2253 |                 value (any): Property value or an ids.Restriction object.
2254 |                 op (str | None): Comparison operator (">", ">=", "<", "<=") if defined.
2255 |                 bounds (dict): Dictionary containing limit values such as 
2256 |                             {"minInclusive": ..., "maxExclusive": ..., "maxInclusive": ...}.
2257 |             Output:
2258 |                 str: Returns the inferred IFC data type string, such as "IFCREAL", "IFCINTEGER", 
2259 |                     "IFCBOOLEAN", or "IFCLABEL".
2260 |             """
2261 |             # if a dataType is provided, normalize and promote it if applicable
2262 |             if provided_dt:
2263 |                 dtU = _strip_ifc_prefix(provided_dt)
2264 |                 if baseName and dtU in ("IFCREAL", "IFCNUMBER", "NUMBER", "REAL"):
2265 |                     hint = PROPERTY_DATATYPE_HINTS.get(str(baseName).strip().upper())
2266 |                     if hint: return hint
2267 |                 if dtU in MEASURE_HINTS: return MEASURE_HINTS[dtU]
2268 |                 return dtU
2269 |             # hints by name
2270 |             if baseName:
2271 |                 hint = PROPERTY_DATATYPE_HINTS.get(str(baseName).strip().upper())
2272 |                 if hint: return hint
2273 |             # value = Restriction
2274 |             if isinstance(value, ids.Restriction):
2275 |                 base = getattr(value, "base", "").lower()
2276 |                 if base in ("integer",): return "IFCINTEGER"
2277 |                 if base in ("double","number","real","float"): return "IFCREAL"
2278 |                 return "IFCLABEL"
2279 |             # if op/bounds -> numeric
2280 |             if op or any(v is not None for v in bounds.values()):
2281 |                 return "IFCREAL"
2282 |             # booleans
2283 |             if _is_bool_like(value): return "IFCBOOLEAN"
2284 |             # literal numbers
2285 |             if _is_number_like(value):
2286 |                 try:
2287 |                     iv = int(str(value))
2288 |                     if float(str(value)) == float(iv): return "IFCINTEGER"
2289 |                 except Exception:
2290 |                     pass
2291 |                 return "IFCREAL"
2292 |             # text
2293 |             return "IFCLABEL"
2294 | 
2295 |         # (optional) Absorption of PredefinedType into Entity.predefinedType — DISABLED
2296 |         def _absorb_predefined_type(applicability_list: list):
2297 |             """
2298 |             Usage:
2299 |                 Transfers the value of a PREDEFINEDTYPE attribute into the corresponding Entity's 
2300 |                 predefinedType field within the applicability list. 
2301 |                 This operation effectively absorbs the PREDEFINEDTYPE entry into the Entity definition.
2302 |             Inputs:
2303 |                 applicability_list (list): List of facet dictionaries containing 'Entity' and 'Attribute' definitions.
2304 |             Output:
2305 |                 list: The updated applicability list where the PREDEFINEDTYPE value has been moved 
2306 |                     to the Entity's 'predefinedType' field, if applicable. 
2307 |                     Returns the original list if no valid Entity or PREDEFINEDTYPE attribute is found.
2308 |             """
2309 |             if not isinstance(applicability_list, list): return applicability_list
2310 |             idx = next((i for i,f in enumerate(applicability_list) if (f.get("type") == "Entity")), None)
2311 |             if idx is None: return applicability_list
2312 |             for i,f in enumerate(list(applicability_list)):
2313 |                 if f.get("type") == "Attribute" and str(f.get("name","")).strip().upper() == "PREDEFINEDTYPE":
2314 |                     val = f.get("value")
2315 |                     if val not in (None, ""):
2316 |                         applicability_list[idx]["predefinedType"] = val
2317 |                         applicability_list.pop(i)
2318 |                         break
2319 |             return applicability_list
2320 | 
2321 |         # IDS Root 
2322 |         # -----------------------------------------------------------------------------------------------------------
2323 |         try:
2324 |             ids_root = ids.Ids(
2325 |                 title=(title or "Untitled"),
2326 |                 description=(description or None),
2327 |                 author=(author or None),
2328 |                 version=(str(ids_version) if ids_version else None),
2329 |                 purpose=(purpose or None),
2330 |                 milestone=(milestone or None),
2331 |                 date=(date_iso or datetime.date.today().isoformat()),
2332 |             )
2333 |             try: ids_root.title = (title or "Untitled")
2334 |             except Exception: pass
2335 |             try: ids_root.info.title = (title or "Untitled")
2336 |             except Exception: pass
2337 |         except Exception as e:
2338 |             return {"ok": False, "error": "Could not initialize the IDS", "details": str(e)}
2339 | 
2340 |         # Facets (with context)
2341 |         # -----------------------------------------------------------------------------------------------------------
2342 |         def _facet_from_dict(f, spec_desc: str | None, context: str):
2343 |             """
2344 |             Usage:
2345 |                 Builds an IDS facet object (e.g., Entity, Attribute, Property, Material, Classification, or PartOf)
2346 |                 from a dictionary definition. Handles data normalization, type inference, comparison extraction,
2347 |                 and restriction creation for both applicability and requirements contexts.
2348 |             Inputs:
2349 |                 f (dict): Dictionary describing a facet, including its type and relevant attributes.
2350 |                 spec_desc (str | None): Optional specification description used to infer operators or targets
2351 |                                         when not explicitly provided.
2352 |                 context (str): Indicates the facet context, either 'applicability' or 'requirements'.
2353 |                             Only in 'requirements' can operator/target be extracted from the description.
2354 |             Output:
2355 |                 ids.Entity | ids.Attribute | ids.Property | ids.Material | ids.Classification | ids.PartOf:
2356 |                     Returns the corresponding ids.* object based on the facet type.
2357 |             Exceptions:
2358 |                 ValueError: Raised if the facet type is unsupported or required fields are missing
2359 |                             (e.g., Property without propertySet or baseName, Attribute without name).
2360 |             """    
2361 |                         
2362 |             t = (f.get("type") or "").strip()
2363 | 
2364 |             if t == "Entity":
2365 |                 ent_name = f.get("name", "") or f.get("entity", "") or f.get("Name", "")
2366 |                 ent_name = ent_name.strip()
2367 |                 if ent_name.lower().startswith("ifc") and not ent_name.isupper():
2368 |                     ent_name = ent_name.upper()  # 'IfcWall' -> 'IFCWALL'
2369 |                 return ids.Entity(
2370 |                     name=ent_name,
2371 |                     predefinedType=f.get("predefinedType", ""),  # we keep it separate (not absorbed)
2372 |                     instructions=f.get("instructions", ""),
2373 |                 )
2374 | 
2375 |             elif t == "Attribute":
2376 |                 name = f.get("name") or f.get("Name")
2377 |                 if not name: raise ValueError("Attribute requires 'name'.")
2378 |                 kwargs = dict(name=name)
2379 |                 if f.get("value") not in (None, ""):
2380 |                     val = f["value"]
2381 |                     if _is_bool_like(val):
2382 |                         tok = _to_bool_token(val)
2383 |                         kwargs["value"] = tok if tok else val
2384 |                     else:
2385 |                         kwargs["value"] = val
2386 |                 # Cardinality from occurs
2387 |                 card = _card_from_occurs(f.get("minOccurs"), f.get("maxOccurs"))
2388 |                 if card: kwargs["cardinality"] = card
2389 |                 if f.get("cardinality"): kwargs["cardinality"] = _norm_card(f.get("cardinality"))
2390 |                 if f.get("instructions"): kwargs["instructions"] = f["instructions"]
2391 |                 return ids.Attribute(**kwargs)
2392 | 
2393 |             elif t == "Property":
2394 |                 pset = f.get("propertySet") or f.get("pset") or f.get("psetName")
2395 |                 base = f.get("baseName") or f.get("name") or f.get("Name")
2396 |                 if not pset or not base: raise ValueError("Property requires 'propertySet' and 'baseName'.")
2397 | 
2398 |                 val_in = f.get("value", None)
2399 |                 bounds = {
2400 |                     "minInclusive": f.get("minInclusive"),
2401 |                     "maxInclusive": f.get("maxInclusive"),
2402 |                     "minExclusive": f.get("minExclusive"),
2403 |                     "maxExclusive": f.get("maxExclusive"),
2404 |                 }
2405 |                 # minValue/maxValue + inclusivity
2406 |                 if f.get("minValue") is not None:
2407 |                     if bool(f.get("minInclusive")): bounds["minInclusive"] = f.get("minValue")
2408 |                     else:                            bounds["minExclusive"] = f.get("minValue")
2409 |                 if f.get("maxValue") is not None:
2410 |                     if bool(f.get("maxInclusive")): bounds["maxInclusive"] = f.get("maxValue")
2411 |                     else:                            bounds["maxExclusive"] = f.get("maxValue")
2412 | 
2413 |                 if isinstance(val_in, dict):
2414 |                     for k in ("minInclusive","maxInclusive","minExclusive","maxExclusive"):
2415 |                         if k in val_in and bounds.get(k) is None:
2416 |                             bounds[k] = val_in[k]
2417 | 
2418 |                 # explicit operator
2419 |                 op = f.get("op") or f.get("operator") or f.get("comparison") or f.get("cmp") or f.get("relation")
2420 |                 target = f.get("target") or f.get("threshold") or f.get("limit")
2421 | 
2422 |                 # operator in 'value' string ("> 30")
2423 |                 if target is None and isinstance(val_in, str):
2424 |                     _op2, _tg2 = _extract_op_target_from_string(val_in)
2425 |                     if _op2 and _tg2 is not None:
2426 |                         op, target, val_in = _op2, _tg2, None
2427 | 
2428 |                 # ONLY IN REQUIREMENTS: extract from description
2429 |                 if context == "requirements" and (not op and all(v is None for v in bounds.values()) and target is None and spec_desc):
2430 |                     _op3, _tg3 = _extract_from_description(spec_desc)
2431 |                     if _op3 and _tg3 is not None:
2432 |                         op, target = _op3, _tg3
2433 | 
2434 |                 # cardinality from occurs
2435 |                 card = _card_from_occurs(f.get("minOccurs"), f.get("maxOccurs"))
2436 | 
2437 |                 dt = _infer_ids_datatype(pset, base, f.get("dataType"), val_in, op, bounds)
2438 | 
2439 |                 # boolean normalization
2440 |                 if _is_bool_like(val_in):
2441 |                     tok = _to_bool_token(val_in)
2442 |                     if tok is not None:
2443 |                         val_in = tok
2444 |                         if not dt: dt = "IFCBOOLEAN"
2445 | 
2446 |                 # Restriction when applicable
2447 |                 restriction_obj = None
2448 |                 if op or any(v is not None for v in bounds.values()):
2449 |                     if dt in ("IFCLABEL","IFCTEXT"):
2450 |                         restriction_obj = _build_restriction_for_text(op, target if target is not None else val_in, bounds)
2451 |                     else:
2452 |                         restriction_obj = _build_numeric_restriction(dt, op, target if target is not None else val_in, bounds)
2453 |                 if isinstance(val_in, ids.Restriction):
2454 |                     restriction_obj = val_in
2455 | 
2456 |                 kwargs = dict(propertySet=pset, baseName=base)
2457 |                 if restriction_obj is not None:
2458 |                     kwargs["value"] = restriction_obj
2459 |                     if dt: kwargs["dataType"] = dt
2460 |                 else:
2461 |                     if val_in not in (None, ""): kwargs["value"] = val_in
2462 |                     if dt: kwargs["dataType"] = dt
2463 | 
2464 |                 if f.get("uri"): kwargs["uri"] = f["uri"]
2465 |                 if f.get("instructions"): kwargs["instructions"] = f["instructions"]
2466 |                 if card: kwargs["cardinality"] = card
2467 |                 if f.get("cardinality"): kwargs["cardinality"] = _norm_card(f.get("cardinality"))
2468 |                 if (op or any(v is not None for v in bounds.values())) and "cardinality" not in kwargs:
2469 |                     kwargs["cardinality"] = "required"
2470 | 
2471 |                 return ids.Property(**kwargs)
2472 | 
2473 |             elif t == "Material":
2474 |                 kwargs = {}
2475 |                 if f.get("value"): kwargs["value"] = f["value"]
2476 |                 if f.get("uri"): kwargs["uri"] = f["uri"]
2477 |                 if f.get("cardinality"): kwargs["cardinality"] = _norm_card(f["cardinality"])
2478 |                 if f.get("instructions"): kwargs["instructions"] = f["instructions"]
2479 |                 return ids.Material(**kwargs)
2480 | 
2481 |             elif t == "Classification":
2482 |                 return ids.Classification(
2483 |                     value=f.get("value", ""),
2484 |                     system=f.get("system", ""),
2485 |                     uri=f.get("uri", ""),
2486 |                     cardinality=_norm_card(f.get("cardinality")),
2487 |                     instructions=f.get("instructions", ""),
2488 |                 )
2489 | 
2490 |             elif t == "PartOf":
2491 |                 return ids.PartOf(
2492 |                     name=f.get("name", ""),
2493 |                     predefinedType=f.get("predefinedType", ""),
2494 |                     relation=f.get("relation", ""),
2495 |                     cardinality=_norm_card(f.get("cardinality")),
2496 |                     instructions=f.get("instructions", ""),
2497 |                 )
2498 | 
2499 |             else:
2500 |                 raise ValueError(f"Unsupported or empty facet type: '{t}'.")
2501 | 
2502 |         # Construction
2503 |         # -----------------------------------------------------------------------------------------------------------
2504 |         total_specs = total_app = total_req = 0
2505 |         try:
2506 |             for s in specs:
2507 |                 if not isinstance(s, dict):
2508 |                     raise ValueError("Each 'spec' must be a dict.")
2509 |                 applicability = s.get("applicability", [])
2510 |                 requirements  = s.get("requirements", [])
2511 |                 if not isinstance(applicability, list) or not isinstance(requirements, list):
2512 |                     raise ValueError("'applicability' and 'requirements' must be lists.")
2513 | 
2514 |                 # Do NOT absorb PredefinedType (it remains as an Attribute in applicability)
2515 |                 # applicability = _absorb_predefined_type(applicability)
2516 | 
2517 |                 spec_obj = ids.Specification()
2518 |                 if s.get("name"):
2519 |                     try: spec_obj.name = s["name"]
2520 |                     except Exception: pass
2521 |                 if s.get("description"):
2522 |                     try: spec_obj.description = s["description"]
2523 |                     except Exception: pass
2524 | 
2525 |                 # ifcVersion: use the provided one; if not, default to IFC4
2526 |                 canon = _norm_ifc_version(s.get("ifcVersion") or "IFC4")
2527 |                 try: spec_obj.ifcVersion = canon
2528 |                 except Exception: pass
2529 | 
2530 |                 for f in applicability:
2531 |                     facet = _facet_from_dict(f, s.get("description"), context="applicability")
2532 |                     spec_obj.applicability.append(facet); total_app += 1
2533 | 
2534 |                 for f in requirements:
2535 |                     facet = _facet_from_dict(f, s.get("description"), context="requirements")
2536 |                     spec_obj.requirements.append(facet); total_req += 1
2537 | 
2538 |                 ids_root.specifications.append(spec_obj); total_specs += 1
2539 | 
2540 |         except Exception as e:
2541 |             return {"ok": False, "error": "Error while building the IDS specifications", "details": str(e)}
2542 | 
2543 |         if total_specs == 0:
2544 |             return {"ok": False, "error": "No Specification was created. Check 'specs'."}
2545 | 
2546 |         # Saved
2547 |         # -----------------------------------------------------------------------------------------------------------
2548 |         try:
2549 |             if not output_path:
2550 |                 safe_title = "".join(c for c in title if c.isalnum() or c in (" ","-","_")).rstrip() or "ids"
2551 |                 today = (date_iso if date_iso else datetime.date.today().isoformat())
2552 |                 output_path = os.path.abspath(f"{safe_title}_{today}.ids")
2553 |             os.makedirs(os.path.dirname(output_path) or ".", exist_ok=True)
2554 |             ids_root.to_xml(output_path)
2555 |         except Exception as e:
2556 |             return {"ok": False, "error": "Could not save the IDS file", "details": str(e)}
2557 | 
2558 |         return {
2559 |             "ok": True,
2560 |             "output_path": output_path,
2561 |             "message": f"IDS '{title}' generated. Specs: {total_specs}, facets: {total_app} appl. / {total_req} req."
2562 |         }
2563 |     
2564 |     #endregion
2565 | 
2566 | 
2567 | def extract_quantities(entity, blender_name=None):
2568 |     """
2569 |     Extract quantity information from an IFC entity.
2570 |     
2571 |     Parameters:
2572 |         entity: IFC entity object
2573 |         blender_name: Optional Blender object name
2574 |     
2575 |     Returns:
2576 |         Dictionary with element info and quantities
2577 |     """
2578 |     try:
2579 |         # Get all property sets
2580 |         psets = ifcopenshell.util.element.get_psets(entity)
2581 |         
2582 |         # Basic element info
2583 |         element_data = {
2584 |             "id": entity.GlobalId if hasattr(entity, "GlobalId") else f"Entity_{entity.id()}",
2585 |             "name": entity.Name if hasattr(entity, "Name") else None,
2586 |             "type": entity.is_a(),
2587 |             "blender_name": blender_name,
2588 |             "quantities": {}
2589 |         }
2590 |         
2591 |         # Look for quantity information in different property sets
2592 |         quantity_sources = ["BaseQuantities", "ArchiCADQuantities", "Qto_WallBaseQuantities", 
2593 |                            "Qto_SlabBaseQuantities", "Qto_BeamBaseQuantities", "Qto_ColumnBaseQuantities"]
2594 |         
2595 |         # Extract quantities from property sets - keep original names
2596 |         for pset_name in quantity_sources:
2597 |             if pset_name in psets:
2598 |                 pset_data = psets[pset_name]
2599 |                 for prop_name, prop_value in pset_data.items():
2600 |                     # Only include numeric values and skip the 'id' field
2601 |                     if isinstance(prop_value, (int, float)) and prop_name != 'id':
2602 |                         element_data["quantities"][prop_name] = prop_value
2603 |             
2604 |         return element_data if element_data["quantities"] else None
2605 |         
2606 |     except Exception as e:
2607 |         return None
2608 | 
2609 | 
2610 | # Blender UI Panel
2611 | class BLENDERMCP_PT_Panel(bpy.types.Panel):
2612 |     bl_label = "Bonsai MCP"
2613 |     bl_idname = "BLENDERMCP_PT_Panel"
2614 |     bl_space_type = 'VIEW_3D'
2615 |     bl_region_type = 'UI'
2616 |     bl_category = 'Bonsai MCP'
2617 |     
2618 |     def draw(self, context):
2619 |         layout = self.layout
2620 |         scene = context.scene
2621 |         
2622 |         layout.prop(scene, "blendermcp_port")
2623 |         
2624 |         if not scene.blendermcp_server_running:
2625 |             layout.operator("blendermcp.start_server", text="Start MCP Server")
2626 |         else:
2627 |             layout.operator("blendermcp.stop_server", text="Stop MCP Server")
2628 |             layout.label(text=f"Running on port {scene.blendermcp_port}")
2629 | 
2630 | 
2631 | # Operator to start the server
2632 | class BLENDERMCP_OT_StartServer(bpy.types.Operator):
2633 |     bl_idname = "blendermcp.start_server"
2634 |     bl_label = "Connect to Claude"
2635 |     bl_description = "Start the BlenderMCP server to connect with Claude"
2636 |     
2637 |     def execute(self, context):
2638 |         scene = context.scene
2639 |         
2640 |         # Create a new server instance
2641 |         if not hasattr(bpy.types, "blendermcp_server") or not bpy.types.blendermcp_server:
2642 |             bpy.types.blendermcp_server = BlenderMCPServer(port=scene.blendermcp_port)
2643 |         
2644 |         # Start the server
2645 |         bpy.types.blendermcp_server.start()
2646 |         scene.blendermcp_server_running = True
2647 |         
2648 |         return {'FINISHED'}
2649 | 
2650 | # Operator to stop the server
2651 | class BLENDERMCP_OT_StopServer(bpy.types.Operator):
2652 |     bl_idname = "blendermcp.stop_server"
2653 |     bl_label = "Stop the connection to Claude"
2654 |     bl_description = "Stop the connection to Claude"
2655 |     
2656 |     def execute(self, context):
2657 |         scene = context.scene
2658 |         
2659 |         # Stop the server if it exists
2660 |         if hasattr(bpy.types, "blendermcp_server") and bpy.types.blendermcp_server:
2661 |             bpy.types.blendermcp_server.stop()
2662 |             del bpy.types.blendermcp_server
2663 |         
2664 |         scene.blendermcp_server_running = False
2665 |         
2666 |         return {'FINISHED'}
2667 | 
2668 | # Registration functions
2669 | def register():
2670 |     bpy.types.Scene.blendermcp_port = IntProperty(
2671 |         name="Port",
2672 |         description="Port for the BlenderMCP server",
2673 |         default=9876,
2674 |         min=1024,
2675 |         max=65535
2676 |     )
2677 |     
2678 |     bpy.types.Scene.blendermcp_server_running = bpy.props.BoolProperty(
2679 |         name="Server Running",
2680 |         default=False
2681 |     )
2682 |     
2683 |     
2684 |     bpy.utils.register_class(BLENDERMCP_PT_Panel)
2685 |     bpy.utils.register_class(BLENDERMCP_OT_StartServer)
2686 |     bpy.utils.register_class(BLENDERMCP_OT_StopServer)
2687 |     
2688 |     print("BlenderMCP addon registered")
2689 | 
2690 | def unregister():
2691 |     # Stop the server if it's running
2692 |     if hasattr(bpy.types, "blendermcp_server") and bpy.types.blendermcp_server:
2693 |         bpy.types.blendermcp_server.stop()
2694 |         del bpy.types.blendermcp_server
2695 |     
2696 |     bpy.utils.unregister_class(BLENDERMCP_PT_Panel)
2697 |     bpy.utils.unregister_class(BLENDERMCP_OT_StartServer)
2698 |     bpy.utils.unregister_class(BLENDERMCP_OT_StopServer)
2699 |     
2700 |     del bpy.types.Scene.blendermcp_port
2701 |     del bpy.types.Scene.blendermcp_server_running
2702 | 
2703 |     print("BlenderMCP addon unregistered")
2704 | 
2705 | if __name__ == "__main__":
2706 |     register()
2707 | 
```
Page 2/2FirstPrevNextLast