13
13
#include " mlir/IR/Location.h"
14
14
#include " llvm/ADT/STLExtras.h"
15
15
#include " llvm/ADT/ScopeExit.h"
16
+ #include " llvm/ADT/SetOperations.h"
16
17
#include " llvm/ADT/TypeSwitch.h"
17
18
#include " llvm/BinaryFormat/Dwarf.h"
18
19
#include " llvm/IR/Constants.h"
@@ -25,6 +26,10 @@ using namespace mlir;
25
26
using namespace mlir ::LLVM;
26
27
using namespace mlir ::LLVM::detail;
27
28
29
+ DebugImporter::DebugImporter (ModuleOp mlirModule)
30
+ : recursionPruner(mlirModule.getContext()),
31
+ context(mlirModule.getContext()), mlirModule(mlirModule) {}
32
+
28
33
Location DebugImporter::translateFuncLocation (llvm::Function *func) {
29
34
llvm::DISubprogram *subprogram = func->getSubprogram ();
30
35
if (!subprogram)
@@ -246,42 +251,13 @@ DINodeAttr DebugImporter::translate(llvm::DINode *node) {
246
251
if (DINodeAttr attr = nodeToAttr.lookup (node))
247
252
return attr;
248
253
249
- // If the node type is capable of being recursive, check if it's seen before.
250
- auto recSelfCtor = getRecSelfConstructor (node);
251
- if (recSelfCtor) {
252
- // If a cyclic dependency is detected since the same node is being traversed
253
- // twice, emit a recursive self type, and mark the duplicate node on the
254
- // translationStack so it can emit a recursive decl type.
255
- auto [iter, inserted] = translationStack.try_emplace (node, nullptr );
256
- if (!inserted) {
257
- // The original node may have already been assigned a recursive ID from
258
- // a different self-reference. Use that if possible.
259
- DistinctAttr recId = iter->second ;
260
- if (!recId) {
261
- recId = DistinctAttr::create (UnitAttr::get (context));
262
- iter->second = recId;
263
- }
264
- unboundRecursiveSelfRefs.back ().insert (recId);
265
- return cast<DINodeAttr>(recSelfCtor (recId));
266
- }
267
- }
268
-
269
- unboundRecursiveSelfRefs.emplace_back ();
270
-
271
- auto guard = llvm::make_scope_exit ([&]() {
272
- if (recSelfCtor)
273
- translationStack.pop_back ();
254
+ // Register with the recursive translator. If it can be handled without
255
+ // recursing into it, return the result immediately.
256
+ if (DINodeAttr attr = recursionPruner.pruneOrPushTranslationStack (node))
257
+ return attr;
274
258
275
- // Copy unboundRecursiveSelfRefs down to the previous level.
276
- if (unboundRecursiveSelfRefs.size () == 1 )
277
- assert (unboundRecursiveSelfRefs.back ().empty () &&
278
- " internal error: unbound recursive self reference at top level." );
279
- else
280
- unboundRecursiveSelfRefs[unboundRecursiveSelfRefs.size () - 2 ].insert (
281
- unboundRecursiveSelfRefs.back ().begin (),
282
- unboundRecursiveSelfRefs.back ().end ());
283
- unboundRecursiveSelfRefs.pop_back ();
284
- });
259
+ auto guard = llvm::make_scope_exit (
260
+ [&]() { recursionPruner.popTranslationStack (node); });
285
261
286
262
// Convert the debug metadata if possible.
287
263
auto translateNode = [this ](llvm::DINode *node) -> DINodeAttr {
@@ -318,22 +294,130 @@ DINodeAttr DebugImporter::translate(llvm::DINode *node) {
318
294
return nullptr ;
319
295
};
320
296
if (DINodeAttr attr = translateNode (node)) {
321
- // If this node was marked as recursive, set its recId.
322
- if (auto recType = dyn_cast<DIRecursiveTypeAttrInterface>(attr)) {
323
- if (DistinctAttr recId = translationStack.lookup (node)) {
324
- attr = cast<DINodeAttr>(recType.withRecId (recId));
325
- // Remove the unbound recursive ID from the set of unbound self
326
- // references in the translation stack.
327
- unboundRecursiveSelfRefs.back ().erase (recId);
297
+ auto [result, isSelfContained] =
298
+ recursionPruner.finalizeTranslation (node, attr);
299
+ // Only cache fully self-contained nodes.
300
+ if (isSelfContained)
301
+ nodeToAttr.try_emplace (node, result);
302
+ return result;
303
+ }
304
+ return nullptr ;
305
+ }
306
+
307
+ // ===----------------------------------------------------------------------===//
308
+ // RecursionPruner
309
+ // ===----------------------------------------------------------------------===//
310
+
311
+ // / Get the `getRecSelf` constructor for the translated type of `node` if its
312
+ // / translated DITypeAttr supports recursion. Otherwise, returns nullptr.
313
+ static function_ref<DIRecursiveTypeAttrInterface(DistinctAttr)>
314
+ getRecSelfConstructor (llvm::DINode *node) {
315
+ using CtorType = function_ref<DIRecursiveTypeAttrInterface (DistinctAttr)>;
316
+ return TypeSwitch<llvm::DINode *, CtorType>(node)
317
+ .Case ([&](llvm::DICompositeType *) {
318
+ return CtorType (DICompositeTypeAttr::getRecSelf);
319
+ })
320
+ .Default (CtorType ());
321
+ }
322
+
323
+ DINodeAttr DebugImporter::RecursionPruner::pruneOrPushTranslationStack (
324
+ llvm::DINode *node) {
325
+ // If the node type is capable of being recursive, check if it's seen
326
+ // before.
327
+ auto recSelfCtor = getRecSelfConstructor (node);
328
+ if (recSelfCtor) {
329
+ // If a cyclic dependency is detected since the same node is being
330
+ // traversed twice, emit a recursive self type, and mark the duplicate
331
+ // node on the translationStack so it can emit a recursive decl type.
332
+ auto [iter, inserted] = translationStack.try_emplace (node);
333
+ if (!inserted) {
334
+ // The original node may have already been assigned a recursive ID from
335
+ // a different self-reference. Use that if possible.
336
+ DIRecursiveTypeAttrInterface recSelf = iter->second .recSelf ;
337
+ if (!recSelf) {
338
+ DistinctAttr recId = nodeToRecId.lookup (node);
339
+ if (!recId) {
340
+ recId = DistinctAttr::create (UnitAttr::get (context));
341
+ nodeToRecId[node] = recId;
342
+ }
343
+ recSelf = recSelfCtor (recId);
344
+ iter->second .recSelf = recSelf;
328
345
}
346
+ // Inject the self-ref into the previous layer.
347
+ translationStack.back ().second .unboundSelfRefs .insert (recSelf);
348
+ return cast<DINodeAttr>(recSelf);
329
349
}
350
+ }
330
351
331
- // Only cache fully self-contained nodes.
332
- if (unboundRecursiveSelfRefs.back ().empty ())
333
- nodeToAttr.try_emplace (node, attr);
334
- return attr;
352
+ return lookup (node);
353
+ }
354
+
355
+ std::pair<DINodeAttr, bool >
356
+ DebugImporter::RecursionPruner::finalizeTranslation (llvm::DINode *node,
357
+ DINodeAttr result) {
358
+ // If `node` is not a potentially recursive type, it will not be on the
359
+ // translation stack. Nothing to set in this case.
360
+ if (translationStack.empty ())
361
+ return {result, true };
362
+ if (translationStack.back ().first != node)
363
+ return {result, translationStack.back ().second .unboundSelfRefs .empty ()};
364
+
365
+ TranslationState &state = translationStack.back ().second ;
366
+
367
+ // If this node is actually recursive, set the recId onto `result`.
368
+ if (DIRecursiveTypeAttrInterface recSelf = state.recSelf ) {
369
+ auto recType = cast<DIRecursiveTypeAttrInterface>(result);
370
+ result = cast<DINodeAttr>(recType.withRecId (recSelf.getRecId ()));
371
+ // Remove this recSelf from the set of unbound selfRefs.
372
+ state.unboundSelfRefs .erase (recSelf);
335
373
}
336
- return nullptr ;
374
+
375
+ // Insert the result into our internal cache if it's not self-contained.
376
+ if (!state.unboundSelfRefs .empty ()) {
377
+ auto [_, inserted] = dependentCache.try_emplace (
378
+ node, DependentTranslation{result, state.unboundSelfRefs });
379
+ assert (inserted && " invalid state: caching the same DINode twice" );
380
+ return {result, false };
381
+ }
382
+ return {result, true };
383
+ }
384
+
385
+ void DebugImporter::RecursionPruner::popTranslationStack (llvm::DINode *node) {
386
+ // If `node` is not a potentially recursive type, it will not be on the
387
+ // translation stack. Nothing to handle in this case.
388
+ if (translationStack.empty () || translationStack.back ().first != node)
389
+ return ;
390
+
391
+ // At the end of the stack, all unbound self-refs must be resolved already,
392
+ // and the entire cache should be accounted for.
393
+ TranslationState &currLayerState = translationStack.back ().second ;
394
+ if (translationStack.size () == 1 ) {
395
+ assert (currLayerState.unboundSelfRefs .empty () &&
396
+ " internal error: unbound recursive self reference at top level." );
397
+ translationStack.pop_back ();
398
+ return ;
399
+ }
400
+
401
+ // Copy unboundSelfRefs down to the previous level.
402
+ TranslationState &nextLayerState = (++translationStack.rbegin ())->second ;
403
+ nextLayerState.unboundSelfRefs .insert (currLayerState.unboundSelfRefs .begin (),
404
+ currLayerState.unboundSelfRefs .end ());
405
+ translationStack.pop_back ();
406
+ }
407
+
408
+ DINodeAttr DebugImporter::RecursionPruner::lookup (llvm::DINode *node) {
409
+ auto cacheIter = dependentCache.find (node);
410
+ if (cacheIter == dependentCache.end ())
411
+ return {};
412
+
413
+ DependentTranslation &entry = cacheIter->second ;
414
+ if (llvm::set_is_subset (entry.unboundSelfRefs ,
415
+ translationStack.back ().second .unboundSelfRefs ))
416
+ return entry.attr ;
417
+
418
+ // Stale cache entry.
419
+ dependentCache.erase (cacheIter);
420
+ return {};
337
421
}
338
422
339
423
// ===----------------------------------------------------------------------===//
@@ -394,13 +478,3 @@ DistinctAttr DebugImporter::getOrCreateDistinctID(llvm::DINode *node) {
394
478
id = DistinctAttr::create (UnitAttr::get (context));
395
479
return id;
396
480
}
397
-
398
- function_ref<DIRecursiveTypeAttrInterface(DistinctAttr)>
399
- DebugImporter::getRecSelfConstructor (llvm::DINode *node) {
400
- using CtorType = function_ref<DIRecursiveTypeAttrInterface (DistinctAttr)>;
401
- return TypeSwitch<llvm::DINode *, CtorType>(node)
402
- .Case ([&](llvm::DICompositeType *concreteNode) {
403
- return CtorType (DICompositeTypeAttr::getRecSelf);
404
- })
405
- .Default (CtorType ());
406
- }
0 commit comments