Coverage for  / home / runner / work / netbox-data-import-plugin / netbox-data-import-plugin / netbox-data-import-plugin / netbox_data_import / engine.py: 98%

432 statements  

« prev     ^ index     » next       coverage.py v7.13.5, created at 2026-04-01 12:13 +0000

1# SPDX-License-Identifier: Apache-2.0 

2# Copyright (C) 2025 Marcin Zieba <marcinpsk@gmail.com> 

3"""Import engine: parse Excel files and run (or preview) imports into NetBox. 

4 

5Public API 

6---------- 

7parse_file(file_obj, profile) -> list[dict] 

8run_import(rows, profile, context, dry_run=True) -> ImportResult 

9""" 

10 

11from __future__ import annotations 

12 

13import logging 

14import re 

15from dataclasses import dataclass, field 

16from typing import Literal 

17from io import BytesIO 

18 

19from django.utils.text import slugify 

20import openpyxl 

21 

22from .models import ImportProfile 

23 

24logger = logging.getLogger(__name__) 

25 

26 

27class ParseError(Exception): 

28 """Raised when the source file cannot be parsed.""" 

29 

30 

31# --------------------------------------------------------------------------- 

32# Result dataclasses 

33# --------------------------------------------------------------------------- 

34 

35 

36@dataclass 

37class RowResult: 

38 """Holds the result of processing a single source row.""" 

39 

40 row_number: int 

41 source_id: str 

42 name: str 

43 action: Literal["create", "update", "skip", "error", "ignore"] 

44 object_type: Literal["rack", "device", "manufacturer", "device_type", ""] 

45 detail: str 

46 netbox_url: str = "" 

47 rack_name: str = "" 

48 # Contextual metadata used by the preview template for inline quick-fix actions 

49 extra_data: dict = field(default_factory=dict) 

50 

51 def to_dict(self) -> dict: 

52 """Serialize this result to a plain dict.""" 

53 return self.__dict__.copy() 

54 

55 @classmethod 

56 def from_dict(cls, d: dict) -> "RowResult": 

57 """Deserialize a RowResult from a plain dict.""" 

58 d = dict(d) 

59 d.setdefault("extra_data", {}) 

60 return cls(**d) 

61 

62 

63@dataclass 

64class ImportResult: 

65 """Aggregates all RowResult objects and summary counts for an import run.""" 

66 

67 rows: list[RowResult] = field(default_factory=list) 

68 counts: dict = field(default_factory=dict) 

69 has_errors: bool = False 

70 

71 def _recompute_counts(self): 

72 c: dict = {} 

73 for r in self.rows: 

74 if r.action == "error": 

75 c["errors"] = c.get("errors", 0) + 1 

76 elif r.action == "skip": 

77 c["skipped"] = c.get("skipped", 0) + 1 

78 elif r.action == "ignore": 

79 c["ignored"] = c.get("ignored", 0) + 1 

80 elif r.action in ("create", "update"): 

81 key = f"{r.object_type}s_{r.action}d" 

82 c[key] = c.get(key, 0) + 1 

83 self.counts = c 

84 self.has_errors = c.get("errors", 0) > 0 

85 

86 def to_session_dict(self) -> dict: 

87 """Serialize this result to a session-safe dict.""" 

88 # Store parsed rows so the execute step can re-use them 

89 return { 

90 "rows": [r.to_dict() for r in self.rows], 

91 "counts": self.counts, 

92 "has_errors": self.has_errors, 

93 } 

94 

95 @classmethod 

96 def from_session_dict(cls, d: dict) -> "ImportResult": 

97 """Deserialize an ImportResult from a session-stored dict.""" 

98 result = cls() 

99 result.rows = [RowResult.from_dict(r) for r in d.get("rows", [])] 

100 result.counts = d.get("counts", {}) 

101 result.has_errors = d.get("has_errors", False) 

102 return result 

103 

104 @property 

105 def rack_groups(self) -> dict: 

106 """Return rows grouped by rack name for the rack view template.""" 

107 groups: dict = {} 

108 for row in self.rows: 

109 if row.object_type == "rack": 

110 if row.name not in groups: 

111 groups[row.name] = {"rack_row": row, "devices": []} 

112 else: 

113 groups[row.name]["rack_row"] = row 

114 elif row.object_type == "device": 

115 rack = row.rack_name or "(No rack)" 

116 if rack not in groups: 

117 groups[rack] = {"rack_row": None, "devices": []} 

118 groups[rack]["devices"].append(row) 

119 return groups 

120 

121 

122# --------------------------------------------------------------------------- 

123# Parsing 

124# --------------------------------------------------------------------------- 

125 

126 

127def _build_header_index_map(ws) -> dict[str, int]: 

128 """Build a header-name → column-index map from the first worksheet row. 

129 

130 First occurrence wins when duplicate headers exist. 

131 """ 

132 raw_headers: dict[str, int] = {} 

133 for idx, cell in enumerate(ws[1]): 

134 if cell.value is not None: 

135 header = str(cell.value).strip() 

136 if header not in raw_headers: 

137 raw_headers[header] = idx 

138 return raw_headers 

139 

140 

141def _apply_transform_rules(row_dict: dict, raw_row, raw_headers: dict, transform_rules) -> None: 

142 """Apply column transform rules in-place to *row_dict*.""" 

143 for rule in transform_rules: 

144 idx = raw_headers.get(rule.source_column) 

145 if idx is None: 

146 continue 

147 raw_value = raw_row[idx] if idx < len(raw_row) else None 

148 if raw_value is None: 

149 continue 

150 raw_str = str(raw_value).strip() 

151 try: 

152 m = re.fullmatch(rule.pattern, raw_str) 

153 except re.error as exc: 

154 raise ParseError( 

155 f"Invalid regex pattern '{rule.pattern}' in transform rule for column " 

156 f"'{rule.source_column}' (value: {raw_str!r}): {exc}" 

157 ) from exc 

158 if m and rule.group_1_target and len(m.groups()) >= 1: 

159 row_dict[rule.group_1_target] = m.group(1) 

160 if m and rule.group_2_target and len(m.groups()) >= 2: 

161 row_dict[rule.group_2_target] = m.group(2) 

162 

163 

164def parse_file(file_obj, profile: ImportProfile) -> list[dict]: 

165 """Read the Excel file and return a list of row-dicts keyed by target_field name. 

166 

167 Raises ParseError if the file or sheet is invalid. 

168 """ 

169 try: 

170 content = file_obj.read() 

171 wb = openpyxl.load_workbook(BytesIO(content), data_only=True) 

172 except Exception as exc: 

173 raise ParseError(f"Cannot open Excel file: {exc}") from exc 

174 

175 if profile.sheet_name not in wb.sheetnames: 

176 available = ", ".join(wb.sheetnames) 

177 raise ParseError(f"Sheet '{profile.sheet_name}' not found. Available sheets: {available}") 

178 

179 ws = wb[profile.sheet_name] 

180 raw_headers = _build_header_index_map(ws) 

181 

182 # Build source_column→target_field map from profile 

183 col_map: dict[str, str] = {cm.source_column: cm.target_field for cm in profile.column_mappings.all()} 

184 

185 # Pre-fetch transform rules for efficiency 

186 transform_rules = list(profile.column_transform_rules.all()) 

187 

188 rows = [] 

189 for row_num, row in enumerate(ws.iter_rows(min_row=2, values_only=True), start=2): 

190 # Skip fully empty rows 

191 if all(v is None for v in row): 

192 continue 

193 

194 row_dict: dict[str, object] = {"_row_number": row_num} 

195 for source_col, target_field in col_map.items(): 

196 idx = raw_headers.get(source_col) 

197 if idx is None: 

198 continue 

199 value = row[idx] if idx < len(row) else None 

200 if isinstance(value, str): 

201 value = value.strip() 

202 row_dict[target_field] = value 

203 

204 _apply_transform_rules(row_dict, row, raw_headers, transform_rules) 

205 

206 # Apply saved resolutions (rerere) 

207 source_id = row_dict.get("source_id", "") 

208 if source_id: 

209 for res in profile.source_resolutions.filter(source_id=str(source_id)): 

210 row_dict.update(res.resolved_fields) 

211 

212 rows.append(row_dict) 

213 

214 return rows 

215 

216 

217# --------------------------------------------------------------------------- 

218# Device-type slug resolution 

219# --------------------------------------------------------------------------- 

220 

221 

222def _resolve_device_type_slugs(make: str, model: str, profile: ImportProfile) -> tuple[str, str, bool]: 

223 """Return (manufacturer_slug, device_type_slug, is_explicit_mapping). 

224 

225 Check DeviceTypeMapping first; fall back to auto-slugify. 

226 Both make and model are expected to be whitespace-normalized. 

227 """ 

228 import re as _re 

229 

230 def _normalize(s: str) -> str: 

231 r"""Normalize whitespace and decode JS-style \uXXXX escape sequences.""" 

232 s = _re.sub(r"\\u([0-9a-fA-F]{4})", lambda m: chr(int(m.group(1), 16)), s) 

233 return " ".join(s.split()) 

234 

235 # Direct lookup (fast path — matches normalized stored records) 

236 mapping = profile.device_type_mappings.filter(source_make=make, source_model=model).first() 

237 # Fallback: stored records may have un-normalized whitespace or JS unicode escapes 

238 if not mapping: 

239 for m in profile.device_type_mappings.filter(source_make__iexact=make): 

240 if _normalize(m.source_model) == model: 

241 mapping = m 

242 break 

243 if mapping: 

244 return mapping.netbox_manufacturer_slug, mapping.netbox_device_type_slug, True 

245 

246 # Check manufacturer-only mapping (maps source make to existing mfg slug) 

247 mfg_mapping = profile.manufacturer_mappings.filter(source_make=make).first() 

248 if not mfg_mapping: 

249 for m in profile.manufacturer_mappings.all(): 

250 if _normalize(m.source_make) == make: 

251 mfg_mapping = m 

252 break 

253 manufacturer_slug = mfg_mapping.netbox_manufacturer_slug if mfg_mapping else slugify(make)[:50] 

254 device_type_slug = slugify(f"{make}-{model}")[:50] 

255 return manufacturer_slug, device_type_slug, False 

256 

257 

258# --------------------------------------------------------------------------- 

259# Main import runner — pass helpers 

260# --------------------------------------------------------------------------- 

261 

262# Value-translation maps (shared across passes) 

263_STATUS_MAP = { 

264 "live": "active", 

265 "production": "active", 

266 "planned": "planned", 

267 "staged": "staged", 

268 "failed": "failed", 

269 "offline": "offline", 

270 "decommissioning": "decommissioning", 

271} 

272 

273 

274def _get_translation_maps(): 

275 """Return (SIDE_MAP, AIRFLOW_MAP, STATUS_MAP) with lazy-imported choice values.""" 

276 from dcim.choices import DeviceAirflowChoices, DeviceFaceChoices 

277 

278 side = { 

279 "front": DeviceFaceChoices.FACE_FRONT, 

280 "back": DeviceFaceChoices.FACE_REAR, 

281 "rear": DeviceFaceChoices.FACE_REAR, 

282 } 

283 airflow = { 

284 "front to back": DeviceAirflowChoices.AIRFLOW_FRONT_TO_REAR, 

285 "back to front": DeviceAirflowChoices.AIRFLOW_REAR_TO_FRONT, 

286 "passive": DeviceAirflowChoices.AIRFLOW_PASSIVE, 

287 } 

288 return side, airflow, _STATUS_MAP 

289 

290 

291def _ensure_manufacturer(mfg_slug, make, seen_manufacturers, profile, result, dry_run, row, Manufacturer): 

292 """Create (or preview-create) a manufacturer if not yet seen.""" 

293 if mfg_slug in seen_manufacturers: 

294 return 

295 seen_manufacturers.add(mfg_slug) 

296 if not dry_run: 

297 if profile.create_missing_device_types: 

298 Manufacturer.objects.get_or_create(slug=mfg_slug, defaults={"name": make}) 

299 elif not Manufacturer.objects.filter(slug=mfg_slug).exists() and profile.create_missing_device_types: 

300 result.rows.append( 

301 RowResult( 

302 row_number=row["_row_number"], 

303 source_id=str(row.get("source_id", "")), 

304 name=make, 

305 action="create", 

306 object_type="manufacturer", 

307 detail=f"Would create manufacturer '{make}' (slug: {mfg_slug})", 

308 extra_data={"source_make": make, "mfg_slug": mfg_slug}, 

309 ) 

310 ) 

311 

312 

313def _ensure_device_type( 

314 mfg_slug, dt_slug, make, model, u_height, seen_device_types, profile, result, dry_run, row, Manufacturer, DeviceType 

315): # noqa: E501 

316 """Create (or preview-create) a device type if not yet seen.""" 

317 dt_key = (mfg_slug, dt_slug) 

318 if dt_key in seen_device_types: 

319 return 

320 seen_device_types.add(dt_key) 

321 if not dry_run: 

322 if profile.create_missing_device_types: 

323 mfg, _ = Manufacturer.objects.get_or_create(slug=mfg_slug, defaults={"name": make}) 

324 DeviceType.objects.get_or_create( 

325 manufacturer=mfg, slug=dt_slug, defaults={"model": model, "u_height": u_height} 

326 ) 

327 return 

328 exists = DeviceType.objects.filter(manufacturer__slug=mfg_slug, slug=dt_slug).exists() 

329 if exists: 

330 return 

331 if profile.create_missing_device_types: 

332 result.rows.append( 

333 RowResult( 

334 row_number=row["_row_number"], 

335 source_id=str(row.get("source_id", "")), 

336 name=f"{make} / {model}", 

337 action="create", 

338 object_type="device_type", 

339 detail=f"Would create device type '{model}' under '{make}'", 

340 extra_data={ 

341 "source_make": make, 

342 "source_model": model, 

343 "mfg_slug": mfg_slug, 

344 "dt_slug": dt_slug, 

345 "u_height": u_height, 

346 }, 

347 ) 

348 ) 

349 else: 

350 result.rows.append( 

351 RowResult( 

352 row_number=row["_row_number"], 

353 source_id=str(row.get("source_id", "")), 

354 name=f"{make} / {model}", 

355 action="error", 

356 object_type="device_type", 

357 detail=f"Device type not found: {make} / {model} — add a mapping or enable 'Create missing device types'", 

358 extra_data={ 

359 "source_make": make, 

360 "source_model": model, 

361 "mfg_slug": mfg_slug, 

362 "dt_slug": dt_slug, 

363 "u_height": u_height, 

364 }, 

365 ) 

366 ) 

367 

368 

369def _ensure_device_role(crm, seen_roles, dry_run, DeviceRole): 

370 """Create a device role if not yet seen.""" 

371 if not (crm and crm.role_slug and crm.role_slug not in seen_roles): 

372 return 

373 seen_roles.add(crm.role_slug) 

374 if not dry_run: 

375 DeviceRole.objects.get_or_create( 

376 slug=crm.role_slug, 

377 defaults={"name": crm.role_slug.replace("-", " ").title(), "color": "9e9e9e"}, 

378 ) 

379 

380 

381def _pass1_ensure_types(rows, profile, class_role_map, result, dry_run): 

382 """Pass 1: ensure Manufacturer, DeviceType, and DeviceRole objects exist.""" 

383 from dcim.models import DeviceRole, DeviceType, Manufacturer 

384 

385 seen_manufacturers: set[str] = set() 

386 seen_device_types: set[tuple] = set() 

387 seen_roles: set[str] = set() 

388 

389 for row in rows: 

390 device_class = str(row.get("device_class", "")).strip() 

391 crm = class_role_map.get(device_class) 

392 if crm is None or crm.creates_rack or crm.ignore: 

393 continue 

394 

395 make = " ".join(str(row.get("make", "Unknown")).split()) or "Unknown" 

396 model = " ".join(str(row.get("model", "Unknown")).split()) or "Unknown" 

397 u_height_raw = row.get("u_height", 1) 

398 try: 

399 u_height = max(1, int(float(u_height_raw))) 

400 except (TypeError, ValueError): 

401 u_height = 1 

402 

403 mfg_slug, dt_slug, _ = _resolve_device_type_slugs(make, model, profile) 

404 _ensure_manufacturer(mfg_slug, make, seen_manufacturers, profile, result, dry_run, row, Manufacturer) 

405 _ensure_device_type( 

406 mfg_slug, 

407 dt_slug, 

408 make, 

409 model, 

410 u_height, 

411 seen_device_types, 

412 profile, 

413 result, 

414 dry_run, 

415 row, 

416 Manufacturer, 

417 DeviceType, 

418 ) # noqa: E501 

419 _ensure_device_role(crm, seen_roles, dry_run, DeviceRole) 

420 

421 

422def _write_rack_to_db( 

423 rack_name, 

424 site, 

425 location, 

426 tenant, 

427 u_height, 

428 serial, 

429 profile, 

430 source_id, 

431 row, 

432 rack_map, 

433 result, 

434 update_existing, 

435 Rack, 

436): # noqa: E501 

437 """Write or update a rack in the database and record the result.""" 

438 try: 

439 rack = Rack.objects.get(site=site, name=rack_name) 

440 if update_existing: 

441 rack.u_height = u_height 

442 rack.serial = serial or rack.serial 

443 if location: 

444 rack.location = location 

445 if tenant: 

446 rack.tenant = tenant 

447 rack.save() 

448 rack_map[rack_name] = rack 

449 result.rows.append( 

450 RowResult( 

451 row_number=row["_row_number"], 

452 source_id=source_id, 

453 name=rack_name, 

454 action="update", 

455 object_type="rack", 

456 detail=f"Updated rack '{rack_name}'", 

457 netbox_url=rack.get_absolute_url(), 

458 ) 

459 ) 

460 else: 

461 rack_map[rack_name] = rack 

462 result.rows.append( 

463 RowResult( 

464 row_number=row["_row_number"], 

465 source_id=source_id, 

466 name=rack_name, 

467 action="skip", 

468 object_type="rack", 

469 detail=f"Rack '{rack_name}' already exists (update_existing=False)", 

470 ) 

471 ) 

472 except Rack.DoesNotExist: 

473 rack = Rack.objects.create( 

474 site=site, location=location, name=rack_name, tenant=tenant, u_height=u_height, serial=serial 

475 ) 

476 _store_source_id(rack, profile, source_id) 

477 rack_map[rack_name] = rack 

478 result.rows.append( 

479 RowResult( 

480 row_number=row["_row_number"], 

481 source_id=source_id, 

482 name=rack_name, 

483 action="create", 

484 object_type="rack", 

485 detail=f"Created rack '{rack_name}' ({u_height}U)", 

486 netbox_url=rack.get_absolute_url(), 

487 ) 

488 ) 

489 

490 

491def _pass2_process_racks(rows, profile, site, location, tenant, class_role_map, result, dry_run): 

492 """Pass 2: create or update Rack objects. Returns rack_name→Rack map.""" 

493 from dcim.models import Rack 

494 

495 rack_map: dict[str, object] = {} 

496 

497 for row in rows: 

498 device_class = str(row.get("device_class", "")).strip() 

499 crm = class_role_map.get(device_class) 

500 if not (crm and crm.creates_rack): 

501 continue 

502 

503 rack_name = str(row.get("rack_name", "")).strip() 

504 source_id = str(row.get("source_id", "")) 

505 u_height_raw = row.get("u_height", 42) 

506 serial = str(row.get("serial", "")).strip() 

507 

508 try: 

509 u_height = max(1, int(float(u_height_raw))) 

510 except (TypeError, ValueError): 

511 u_height = 42 

512 

513 if not rack_name: 

514 result.rows.append( 

515 RowResult( 

516 row_number=row["_row_number"], 

517 source_id=source_id, 

518 name="", 

519 action="error", 

520 object_type="rack", 

521 detail="Missing rack name", 

522 ) 

523 ) 

524 continue 

525 

526 if dry_run: 

527 try: 

528 Rack.objects.get(site=site, name=rack_name) 

529 action = "update" if profile.update_existing else "skip" 

530 detail = f"Rack '{rack_name}' already exists" 

531 except Rack.DoesNotExist: 

532 action = "create" 

533 detail = f"Would create rack '{rack_name}' ({u_height}U) at site '{site}'" 

534 rack_map[rack_name] = rack_name 

535 result.rows.append( 

536 RowResult( 

537 row_number=row["_row_number"], 

538 source_id=source_id, 

539 name=rack_name, 

540 action=action, 

541 object_type="rack", 

542 detail=detail, 

543 ) 

544 ) 

545 else: 

546 _write_rack_to_db( 

547 rack_name, 

548 site, 

549 location, 

550 tenant, 

551 u_height, 

552 serial, 

553 profile, 

554 source_id, 

555 row, 

556 rack_map, 

557 result, 

558 profile.update_existing, 

559 Rack, 

560 ) 

561 

562 return rack_map 

563 

564 

565def _find_existing_device(profile, source_id, site, device_name, serial, asset_tag, Device): 

566 """Look up a pre-existing NetBox device by source-ID link, serial, asset_tag, or name. 

567 

568 Returns (device, match_method) or (None, None). 

569 Matching priority: source-ID link → serial → asset_tag → name. 

570 When *site* is provided the name lookup is scoped to that site; without a 

571 site the name is matched globally (any site). 

572 """ 

573 existing_match = profile.device_matches.filter(source_id=source_id).first() if source_id else None 

574 matched_device = None 

575 match_method = None 

576 if existing_match: 

577 try: 

578 matched_device = Device.objects.get(pk=existing_match.netbox_device_id) 

579 match_method = "source ID link" 

580 except Device.DoesNotExist: 

581 pass 

582 

583 if matched_device is None and serial: 

584 try: 

585 matched_device = Device.objects.get(serial=serial) 

586 match_method = "serial" 

587 except Device.DoesNotExist: 

588 pass 

589 except Device.MultipleObjectsReturned: 

590 logger.warning("Ambiguous serial match for serial=%r; skipping auto-match", serial) 

591 

592 if matched_device is None and asset_tag: 

593 try: 

594 matched_device = Device.objects.get(asset_tag=asset_tag) 

595 match_method = "asset tag" 

596 except Device.DoesNotExist: 

597 pass 

598 except Device.MultipleObjectsReturned: 

599 logger.warning("Ambiguous asset_tag match for asset_tag=%r; skipping auto-match", asset_tag) 

600 

601 if matched_device is None and device_name: 

602 name_filter = {"name": device_name} 

603 if site is not None: 

604 name_filter["site"] = site 

605 try: 

606 matched_device = Device.objects.get(**name_filter) 

607 match_method = "name" 

608 except Device.DoesNotExist: 

609 pass 

610 except Device.MultipleObjectsReturned: 

611 logger.warning("Ambiguous name match for device_name=%r; skipping auto-match", device_name) 

612 

613 return matched_device, match_method 

614 

615 

616def _preview_device_row( 

617 row, 

618 profile, 

619 site, 

620 rack_map, 

621 make, 

622 model, 

623 mfg_slug, 

624 dt_slug, 

625 source_id, 

626 device_name, 

627 serial, 

628 asset_tag, 

629 DeviceType, 

630 Device, 

631): # noqa: E501 

632 """Return a RowResult for *dry_run* mode (no DB writes).""" 

633 dt_exists = DeviceType.objects.filter(manufacturer__slug=mfg_slug, slug=dt_slug).exists() 

634 if not dt_exists and not profile.create_missing_device_types: 

635 return RowResult( 

636 row_number=row["_row_number"], 

637 source_id=source_id, 

638 name=device_name, 

639 action="error", 

640 object_type="device", 

641 detail=f"Device type not found: {make} / {model} (slug: {mfg_slug}/{dt_slug})", 

642 ) 

643 

644 rack_name = str(row.get("rack_name", "")).strip() 

645 if rack_name: 

646 rack_label = rack_name if rack_name in rack_map else f"{rack_name} (not found)" 

647 else: 

648 rack_label = "(no rack)" 

649 raw_position = row.get("u_position") 

650 try: 

651 position = int(float(raw_position)) if raw_position is not None and str(raw_position).strip() != "" else None 

652 except (TypeError, ValueError): 

653 position = None 

654 

655 # _find_existing_device checks DeviceExistingMatch → serial → asset_tag → name in that order, 

656 # ensuring explicit operator mappings always take precedence over coincidental name matches. 

657 matched_device, match_method = _find_existing_device( 

658 profile, source_id, site, device_name, serial, asset_tag, Device 

659 ) 

660 if matched_device is not None: 

661 action = "update" if profile.update_existing else "skip" 

662 if match_method == "name": 

663 detail = f"Device '{device_name}' already exists" 

664 else: 

665 detail = f"Matched to existing device '{matched_device.name}' (by {match_method})" 

666 else: 

667 action = "create" 

668 _pos_label = f" U{position}" if position is not None else "" 

669 detail = f"Would create device '{device_name}' in {rack_label}{_pos_label}" 

670 

671 return RowResult( 

672 row_number=row["_row_number"], 

673 source_id=source_id, 

674 name=device_name, 

675 action=action, 

676 object_type="device", 

677 detail=detail, 

678 rack_name=rack_name, 

679 extra_data={"source_make": make, "source_model": model, "asset_tag": asset_tag or ""}, 

680 ) 

681 

682 

683def _write_device_row( 

684 row, 

685 profile, 

686 site, 

687 location, 

688 tenant, 

689 rack_map, 

690 make, 

691 model, 

692 crm, 

693 mfg_slug, 

694 dt_slug, 

695 source_id, 

696 device_name, 

697 serial, 

698 asset_tag, 

699 position, 

700 face, 

701 airflow, 

702 status, 

703 DeviceType, 

704 DeviceRole, 

705 Rack, 

706 Device, 

707): # noqa: E501 

708 """Write or update a device in the DB and return a RowResult.""" 

709 rack_name = str(row.get("rack_name", "")).strip() 

710 try: 

711 device_type = DeviceType.objects.get(manufacturer__slug=mfg_slug, slug=dt_slug) 

712 except DeviceType.DoesNotExist: 

713 return RowResult( 

714 row_number=row["_row_number"], 

715 source_id=source_id, 

716 name=device_name, 

717 action="error", 

718 object_type="device", 

719 detail=f"Device type not found: {mfg_slug}/{dt_slug}", 

720 ) 

721 

722 try: 

723 device_role = DeviceRole.objects.get(slug=crm.role_slug) 

724 except DeviceRole.DoesNotExist: 

725 return RowResult( 

726 row_number=row["_row_number"], 

727 source_id=source_id, 

728 name=device_name, 

729 action="error", 

730 object_type="device", 

731 detail=f"Device role not found: {crm.role_slug}", 

732 ) 

733 

734 rack = rack_map.get(rack_name) if rack_name else None 

735 if rack_name and rack is None: 

736 rack = Rack.objects.filter(site=site, name=rack_name).first() 

737 

738 # _find_existing_device checks DeviceExistingMatch → serial → asset_tag → name in that order, 

739 # ensuring explicit operator mappings always take precedence over coincidental name matches. 

740 device, match_method = _find_existing_device(profile, source_id, site, device_name, serial, asset_tag, Device) 

741 

742 if device is not None: 

743 if profile.update_existing: 

744 device.device_type = device_type 

745 device.role = device_role 

746 device.rack = rack if isinstance(rack, Rack) else None 

747 device.position = position 

748 device.face = face 

749 device.airflow = airflow 

750 device.status = status 

751 device.serial = serial or device.serial 

752 if asset_tag: 

753 device.asset_tag = asset_tag 

754 if tenant: 

755 device.tenant = tenant 

756 device.save() 

757 _store_source_id(device, profile, source_id) 

758 return RowResult( 

759 row_number=row["_row_number"], 

760 source_id=source_id, 

761 name=device_name, 

762 action="update", 

763 object_type="device", 

764 detail=f"Updated device '{device.name}' (matched by {match_method})", 

765 netbox_url=device.get_absolute_url(), 

766 rack_name=rack_name, 

767 extra_data={"source_make": make, "source_model": model, "asset_tag": asset_tag or ""}, 

768 ) 

769 return RowResult( 

770 row_number=row["_row_number"], 

771 source_id=source_id, 

772 name=device_name, 

773 action="skip", 

774 object_type="device", 

775 detail=f"Device '{device.name}' already exists (update_existing=False)", 

776 rack_name=rack_name, 

777 extra_data={"source_make": make, "source_model": model, "asset_tag": asset_tag or ""}, 

778 ) 

779 

780 device = Device.objects.create( 

781 site=site, 

782 location=location, 

783 name=device_name, 

784 device_type=device_type, 

785 role=device_role, 

786 rack=rack if isinstance(rack, Rack) else None, 

787 position=position, 

788 face=face, 

789 airflow=airflow, 

790 status=status, 

791 serial=serial, 

792 asset_tag=asset_tag, 

793 tenant=tenant, 

794 ) 

795 _store_source_id(device, profile, source_id) 

796 _rack_label = rack_name if rack_name else "(no rack)" 

797 _pos_label = f" U{position}" if position is not None else "" 

798 return RowResult( 

799 row_number=row["_row_number"], 

800 source_id=source_id, 

801 name=device_name, 

802 action="create", 

803 object_type="device", 

804 detail=f"Created device '{device_name}' in {_rack_label}{_pos_label}", 

805 netbox_url=device.get_absolute_url(), 

806 rack_name=rack_name, 

807 extra_data={"source_make": make, "source_model": model, "asset_tag": asset_tag or ""}, 

808 ) 

809 

810 

811def _pass3_process_devices(rows, profile, site, location, tenant, class_role_map, rack_map, result, dry_run): 

812 """Pass 3: create or update Device objects.""" 

813 from dcim.models import Device, DeviceRole, DeviceType, Rack 

814 from .models import IgnoredDevice 

815 

816 side_map, airflow_map, status_map = _get_translation_maps() 

817 

818 for row in rows: 

819 device_class = str(row.get("device_class", "")).strip() 

820 crm = class_role_map.get(device_class) 

821 if crm and crm.creates_rack: 

822 continue 

823 

824 source_id = str(row.get("source_id", "")) 

825 device_name = str(row.get("device_name", "")).strip() 

826 rack_name = str(row.get("rack_name", "")).strip() 

827 make = " ".join(str(row.get("make", "Unknown")).split()) or "Unknown" 

828 model = " ".join(str(row.get("model", "Unknown")).split()) or "Unknown" 

829 serial = str(row.get("serial", "")).strip() 

830 asset_tag_raw = str(row.get("asset_tag", "")).strip() or None 

831 asset_tag = asset_tag_raw[:50] if asset_tag_raw else None 

832 

833 u_position_raw = row.get("u_position") 

834 try: 

835 position = int(float(u_position_raw)) 

836 if position < 1: 

837 result.rows.append( 

838 RowResult( 

839 row_number=row["_row_number"], 

840 source_id=source_id, 

841 name=device_name, 

842 action="skip", 

843 object_type="device", 

844 detail=f"Skipped: position {position} < 1 (under-rack/blanking panel)", 

845 rack_name=rack_name, 

846 ) 

847 ) 

848 continue 

849 except (TypeError, ValueError): 

850 position = None 

851 

852 if not device_name: 

853 result.rows.append( 

854 RowResult( 

855 row_number=row["_row_number"], 

856 source_id=source_id, 

857 name="", 

858 action="error", 

859 object_type="device", 

860 detail="Missing device name", 

861 ) 

862 ) 

863 continue 

864 

865 if IgnoredDevice.objects.filter(profile=profile, source_id=source_id).exists(): 

866 result.rows.append( 

867 RowResult( 

868 row_number=row["_row_number"], 

869 source_id=source_id, 

870 name=device_name, 

871 action="ignore", 

872 object_type="device", 

873 detail="Ignored device", 

874 rack_name=rack_name, 

875 ) 

876 ) 

877 continue 

878 

879 if not crm: 

880 result.rows.append( 

881 RowResult( 

882 row_number=row["_row_number"], 

883 source_id=source_id, 

884 name=device_name, 

885 action="error", 

886 object_type="device", 

887 detail=f"No class→role mapping for class '{device_class}'", 

888 extra_data={ 

889 "source_class": device_class, 

890 "profile_id": profile.pk, 

891 "source_make": make, 

892 "source_model": model, 

893 "asset_tag": asset_tag or "", 

894 }, 

895 ) 

896 ) 

897 continue 

898 

899 if crm.ignore: 

900 result.rows.append( 

901 RowResult( 

902 row_number=row["_row_number"], 

903 source_id=source_id, 

904 name=device_name, 

905 action="ignore", 

906 object_type="device", 

907 detail=f"Ignored: class '{device_class}'", 

908 rack_name=rack_name, 

909 ) 

910 ) 

911 continue 

912 

913 mfg_slug, dt_slug, _ = _resolve_device_type_slugs(make, model, profile) 

914 device_status = status_map.get(str(row.get("status", "")).strip().lower(), "active") 

915 device_face = side_map.get(str(row.get("face", "")).strip().lower()) 

916 device_airflow = airflow_map.get(str(row.get("airflow", "")).strip().lower()) 

917 

918 if dry_run: 

919 row_result = _preview_device_row( 

920 row, 

921 profile, 

922 site, 

923 rack_map, 

924 make, 

925 model, 

926 mfg_slug, 

927 dt_slug, 

928 source_id, 

929 device_name, 

930 serial, 

931 asset_tag, 

932 DeviceType, 

933 Device, 

934 ) # noqa: E501 

935 else: 

936 row_result = _write_device_row( 

937 row, 

938 profile, 

939 site, 

940 location, 

941 tenant, 

942 rack_map, 

943 make, 

944 model, 

945 crm, 

946 mfg_slug, 

947 dt_slug, 

948 source_id, 

949 device_name, 

950 serial, 

951 asset_tag, 

952 position, 

953 device_face, 

954 device_airflow, 

955 device_status, 

956 DeviceType, 

957 DeviceRole, 

958 Rack, 

959 Device, 

960 ) # noqa: E501 

961 result.rows.append(row_result) 

962 

963 

964# --------------------------------------------------------------------------- 

965# Main import runner 

966# --------------------------------------------------------------------------- 

967 

968 

969def run_import(rows: list[dict], profile: ImportProfile, context: dict, dry_run: bool = True) -> ImportResult: 

970 """Run (or preview) the import. 

971 

972 context keys: site, location (optional), tenant (optional) 

973 dry_run=True → no DB writes, returns what *would* happen 

974 dry_run=False → writes to DB 

975 """ 

976 site = context["site"] 

977 location = context.get("location") 

978 tenant = context.get("tenant") 

979 

980 class_role_map: dict[str, object] = {crm.source_class: crm for crm in profile.class_role_mappings.all()} 

981 result = ImportResult() 

982 

983 _pass1_ensure_types(rows, profile, class_role_map, result, dry_run) 

984 rack_map = _pass2_process_racks(rows, profile, site, location, tenant, class_role_map, result, dry_run) 

985 _pass3_process_devices(rows, profile, site, location, tenant, class_role_map, rack_map, result, dry_run) 

986 

987 result._recompute_counts() 

988 return result 

989 

990 

991# --------------------------------------------------------------------------- 

992# Helpers 

993# --------------------------------------------------------------------------- 

994 

995 

996def _store_source_id(obj, profile: ImportProfile, source_id: str): 

997 """Store source ID in the configured custom field and in the plugin's JSON metadata field.""" 

998 changed = False 

999 

1000 # Per-profile custom field (e.g. cans_id → plain string) 

1001 if profile.custom_field_name and source_id: 

1002 try: 

1003 obj.custom_field_data[profile.custom_field_name] = source_id 

1004 changed = True 

1005 except (AttributeError, KeyError): 

1006 pass 

1007 

1008 # Plugin-managed JSON field: data_import_source 

1009 try: 

1010 obj.custom_field_data["data_import_source"] = { 

1011 "source_id": source_id or "", 

1012 "profile_id": profile.pk, 

1013 "profile_name": profile.name, 

1014 } 

1015 changed = True 

1016 except (AttributeError, KeyError): 

1017 pass 

1018 

1019 if changed: 

1020 try: 

1021 obj.save(update_fields=["custom_field_data"]) 

1022 except Exception: 

1023 pass