LLZK 0.1.0
Veridise's ZK Language IR
Loading...
Searching...
No Matches
LLZKInlineStructsPass.cpp
Go to the documentation of this file.
1//===-- LLZKInlineStructsPass.cpp -------------------------------*- C++ -*-===//
2//
3// Part of the LLZK Project, under the Apache License v2.0.
4// See LICENSE.txt for license information.
5// Copyright 2025 Veridise Inc.
6// SPDX-License-Identifier: Apache-2.0
7//
8//===----------------------------------------------------------------------===//
19//===----------------------------------------------------------------------===//
20
30#include "llzk/Util/Debug.h"
33
34#include <mlir/IR/BuiltinOps.h>
35#include <mlir/Transforms/InliningUtils.h>
36#include <mlir/Transforms/WalkPatternRewriteDriver.h>
37
38#include <llvm/ADT/PostOrderIterator.h>
39#include <llvm/ADT/SmallPtrSet.h>
40#include <llvm/ADT/SmallVector.h>
41#include <llvm/ADT/StringMap.h>
42#include <llvm/ADT/TypeSwitch.h>
43#include <llvm/Support/Debug.h>
44
45#include <concepts>
46
47// Include the generated base pass class definitions.
48namespace llzk {
49// the *DECL* macro is required when a pass has options to declare the option struct
50#define GEN_PASS_DECL_INLINESTRUCTSPASS
51#define GEN_PASS_DEF_INLINESTRUCTSPASS
53} // namespace llzk
54
55using namespace mlir;
56using namespace llzk;
57using namespace llzk::component;
58using namespace llzk::function;
59
60#define DEBUG_TYPE "llzk-inline-structs"
61
62namespace {
63
64using DestFieldWithSrcStructType = FieldDefOp;
65using DestCloneOfSrcStructField = FieldDefOp;
69using SrcStructFieldToCloneInDest = std::map<StringRef, DestCloneOfSrcStructField>;
72using DestToSrcToClonedSrcInDest =
73 DenseMap<DestFieldWithSrcStructType, SrcStructFieldToCloneInDest>;
74
77static inline Value getSelfValue(FuncDefOp f) {
78 if (f.nameIsCompute()) {
79 return f.getSelfValueFromCompute();
80 } else if (f.nameIsConstrain()) {
82 } else {
83 llvm_unreachable("expected \"@compute\" or \"@constrain\" function");
84 }
85}
86
89static inline FieldDefOp getDef(SymbolTableCollection &tables, FieldRefOpInterface fRef) {
90 auto r = fRef.getFieldDefOp(tables);
91 assert(succeeded(r));
92 return r->get();
93}
94
97static FailureOr<FieldWriteOp>
98findOpThatStoresSubcmp(Value writtenValue, function_ref<InFlightDiagnostic()> emitError) {
99 FieldWriteOp foundWrite = nullptr;
100 for (Operation *user : writtenValue.getUsers()) {
101 if (FieldWriteOp writeOp = llvm::dyn_cast<FieldWriteOp>(user)) {
102 // Find the write op that stores the created value
103 if (writeOp.getVal() == writtenValue) {
104 if (foundWrite) {
105 // Note: There is no reason for a subcomponent to be stored to more than one field.
106 auto diag = emitError().append("result should not be written to more than one field.");
107 diag.attachNote(foundWrite.getLoc()).append("written here");
108 diag.attachNote(writeOp.getLoc()).append("written here");
109 return diag;
110 } else {
111 foundWrite = writeOp;
113 }
115 }
116 if (!foundWrite) {
117 // Note: There is no reason to construct a subcomponent and not store it to a field.
118 return emitError().append("result should be written to a field.");
120 return foundWrite;
122
124/// described in `combineReadChain()` or `combineNewThenReadChain()`), replace it with a
125/// new `FieldReadOp` that directly reads from the given cloned field and delete it.
126static bool combineHelper(
127 FieldReadOp readOp, SymbolTableCollection &tables,
128 const DestToSrcToClonedSrcInDest &destToSrcToClone, FieldRefOpInterface destFieldRefOp
129) {
130 LLVM_DEBUG({ llvm::dbgs() << "[combineHelper] " << readOp << " => " << destFieldRefOp << '\n'; });
131
132 auto srcToClone = destToSrcToClone.find(getDef(tables, destFieldRefOp));
133 if (srcToClone == destToSrcToClone.end()) {
134 return false;
136 SrcStructFieldToCloneInDest oldToNewFields = srcToClone->second;
137 auto resNewField = oldToNewFields.find(readOp.getFieldName());
138 if (resNewField == oldToNewFields.end()) {
139 return false;
140 }
141
142 // Replace this FieldReadOp with a new one that targets the cloned field.
143 OpBuilder builder(readOp);
144 FieldReadOp newRead = builder.create<FieldReadOp>(
145 readOp.getLoc(), readOp.getType(), destFieldRefOp.getComponent(),
146 resNewField->second.getNameAttr()
147 );
148 readOp.replaceAllUsesWith(newRead.getOperation());
149 readOp.erase(); // delete the original FieldReadOp
150 return true;
151}
152
164
166static bool combineReadChain(
167 FieldReadOp readOp, SymbolTableCollection &tables,
168 const DestToSrcToClonedSrcInDest &destToSrcToClone
169) {
170 LLVM_DEBUG({ llvm::dbgs() << "[combineReadChain] " << readOp << '\n'; });
171
172 FieldReadOp readThatDefinesBaseComponent =
173 llvm::dyn_cast_if_present<FieldReadOp>(readOp.getComponent().getDefiningOp());
174 if (!readThatDefinesBaseComponent) {
175 return false;
176 }
177 return combineHelper(readOp, tables, destToSrcToClone, readThatDefinesBaseComponent);
178}
179
196static LogicalResult combineNewThenReadChain(
197 FieldReadOp readOp, SymbolTableCollection &tables,
198 const DestToSrcToClonedSrcInDest &destToSrcToClone
199) {
200 LLVM_DEBUG({ llvm::dbgs() << "[combineNewThenReadChain] " << readOp << '\n'; });
201
202 CreateStructOp createThatDefinesBaseComponent =
203 llvm::dyn_cast_if_present<CreateStructOp>(readOp.getComponent().getDefiningOp());
204 if (!createThatDefinesBaseComponent) {
205 return success(); // No error. The pattern simply doesn't match.
206 }
207 FailureOr<FieldWriteOp> foundWrite =
208 findOpThatStoresSubcmp(createThatDefinesBaseComponent, [&createThatDefinesBaseComponent]() {
209 return createThatDefinesBaseComponent.emitOpError();
210 });
211 if (failed(foundWrite)) {
212 return failure(); // error already printed within findOpThatStoresSubcmp()
213 }
214 return success(combineHelper(readOp, tables, destToSrcToClone, foundWrite.value()));
215}
216
217static inline FieldReadOp getFieldReadThatDefinesSelfValuePassedToConstrain(CallOp callOp) {
218 Value selfArgFromCall = callOp.getSelfValueFromConstrain();
219 return llvm::dyn_cast_if_present<FieldReadOp>(selfArgFromCall.getDefiningOp());
220}
221
224struct PendingErasure {
225 SmallPtrSet<Operation *, 8> fieldReadOps;
226 SmallPtrSet<Operation *, 8> fieldWriteOps;
227 SmallVector<CreateStructOp> newStructOps;
228 SmallVector<DestFieldWithSrcStructType> fieldDefs;
229};
230
232class StructInliner {
233 SymbolTableCollection &tables;
234 PendingErasure &toDelete;
236 StructDefOp srcStruct;
238 StructDefOp destStruct;
239
240 inline FieldDefOp getDef(FieldRefOpInterface fRef) const { return ::getDef(tables, fRef); }
241
242 // Update field read/write ops that target the "self" value of the FuncDefOp plus some key in
243 // `oldToNewFieldDef` to instead target the new base Value provided to the constructor plus the
244 // mapped Value from `oldToNewFieldDef`.
245 // Example:
246 // old: %1 = struct.readf %0[@f1] : <@Component1A>, !felt.type
247 // new: %1 = struct.readf %self[@"f2:!s<@Component1A>+f1"] : <@Component1B>, !felt.type
248 class FieldRefRewriter final : public OpInterfaceRewritePattern<FieldRefOpInterface> {
251 FuncDefOp funcRef;
253 Value oldBaseVal;
255 Value newBaseVal;
256 const SrcStructFieldToCloneInDest &oldToNewFields;
257
258 public:
259 FieldRefRewriter(
260 FuncDefOp originalFunc, Value newRefBase,
261 const SrcStructFieldToCloneInDest &oldToNewFieldDef
262 )
263 : OpInterfaceRewritePattern(originalFunc.getContext()), funcRef(originalFunc),
264 oldBaseVal(nullptr), newBaseVal(newRefBase), oldToNewFields(oldToNewFieldDef) {}
265
266 LogicalResult match(FieldRefOpInterface op) const final {
267 assert(oldBaseVal); // ensure it's used via `cloneWithFieldRefUpdate()` only
268 // Check if the FieldRef accesses a field of "self" within the `oldToNewFields` map.
269 // Per `cloneWithFieldRefUpdate()`, `oldBaseVal` is the "self" value of `funcRef` so
270 // check for a match there and then check that the referenced field name is in the map.
271 return success(op.getComponent() == oldBaseVal && oldToNewFields.contains(op.getFieldName()));
272 }
273
274 void rewrite(FieldRefOpInterface op, PatternRewriter &rewriter) const final {
275 rewriter.modifyOpInPlace(op, [this, &op]() {
276 DestCloneOfSrcStructField newF = oldToNewFields.at(op.getFieldName());
277 op.setFieldName(newF.getSymName());
278 op.getComponentMutable().set(this->newBaseVal);
279 });
280 }
281
284 static FuncDefOp cloneWithFieldRefUpdate(std::unique_ptr<FieldRefRewriter> thisPat) {
285 IRMapping mapper;
286 FuncDefOp srcFuncClone = thisPat->funcRef.clone(mapper);
287 // Update some data in the `FieldRefRewriter` instance before moving it.
288 thisPat->funcRef = srcFuncClone;
289 thisPat->oldBaseVal = getSelfValue(srcFuncClone);
290 // Run the rewriter to replace read/write ops
291 MLIRContext *ctx = thisPat->getContext();
292 RewritePatternSet patterns(ctx, std::move(thisPat));
293 walkAndApplyPatterns(srcFuncClone, std::move(patterns));
294
295 return srcFuncClone;
296 }
297 };
298
300 class ImplBase {
301 protected:
302 const StructInliner &data;
303 const DestToSrcToClonedSrcInDest &destToSrcToClone;
304
307 virtual FieldRefOpInterface getSelfRefField(CallOp callOp) = 0;
308 virtual void processCloneBeforeInlining(FuncDefOp func) {}
309 virtual ~ImplBase() = default;
310
311 public:
312 ImplBase(const StructInliner &inliner, const DestToSrcToClonedSrcInDest &destToSrcToCloneRef)
313 : data(inliner), destToSrcToClone(destToSrcToCloneRef) {}
314
315 LogicalResult doInlining(FuncDefOp srcFunc, FuncDefOp destFunc) {
316 LLVM_DEBUG({
317 llvm::dbgs() << "[doInlining] SOURCE FUNCTION:\n";
318 srcFunc.dump();
319 llvm::dbgs() << "[doInlining] DESTINATION FUNCTION:\n";
320 destFunc.dump();
321 });
322
323 InlinerInterface inliner(destFunc.getContext());
324
326 auto callHandler = [this, &inliner, &srcFunc](CallOp callOp) {
327 // Ensure the CallOp targets `srcFunc`
328 auto callOpTarget = callOp.getCalleeTarget(this->data.tables);
329 assert(succeeded(callOpTarget));
330 if (callOpTarget->get() != srcFunc) {
331 return WalkResult::advance();
332 }
333
334 // Get the "self" struct parameter from the CallOp and determine which field that struct
335 // was stored in within the caller (i.e. `destFunc`).
336 FieldRefOpInterface selfFieldRefOp = this->getSelfRefField(callOp);
337 if (!selfFieldRefOp) {
338 // Note: error message was already printed within `getSelfRefField()`
339 return WalkResult::interrupt(); // use interrupt to signal failure
340 }
341
342 // Create a clone of the source function (must do the whole function not just the body
343 // region because `inlineCall()` expects the Region to have a parent op) and update field
344 // references to the old struct fields to instead use the new struct fields.
345 FuncDefOp srcFuncClone = FieldRefRewriter::cloneWithFieldRefUpdate(
346 std::make_unique<FieldRefRewriter>(
347 srcFunc, selfFieldRefOp.getComponent(),
348 this->destToSrcToClone.at(this->data.getDef(selfFieldRefOp))
349 )
350 );
351 this->processCloneBeforeInlining(srcFuncClone);
352
353 // Inline the cloned function in place of `callOp`
354 LogicalResult inlineCallRes =
355 inlineCall(inliner, callOp, srcFuncClone, &srcFuncClone.getBody(), false);
356 if (failed(inlineCallRes)) {
357 callOp.emitError().append("Failed to inline ", srcFunc.getFullyQualifiedName()).report();
358 return WalkResult::interrupt(); // use interrupt to signal failure
359 }
360 srcFuncClone.erase(); // delete what's left after transferring the body elsewhere
361 callOp.erase(); // delete the original CallOp
362 return WalkResult::skip(); // Must skip because the CallOp was erased.
363 };
364
365 auto fieldWriteHandler = [this](FieldWriteOp writeOp) {
366 // Check if the field ref op should be deleted in the end
367 if (this->destToSrcToClone.contains(this->data.getDef(writeOp))) {
368 this->data.toDelete.fieldWriteOps.insert(writeOp);
369 }
370 return WalkResult::advance();
371 };
372
375 auto fieldReadHandler = [this](FieldReadOp readOp) {
376 // Check if the field ref op should be deleted in the end
377 if (this->destToSrcToClone.contains(this->data.getDef(readOp))) {
378 this->data.toDelete.fieldReadOps.insert(readOp);
379 }
380 // If the FieldReadOp was replaced/erased, must skip.
381 return combineReadChain(readOp, this->data.tables, destToSrcToClone)
382 ? WalkResult::skip()
383 : WalkResult::advance();
384 };
385
386 WalkResult walkRes = destFunc.getBody().walk<WalkOrder::PreOrder>([&](Operation *op) {
387 return TypeSwitch<Operation *, WalkResult>(op)
388 .Case<CallOp>(callHandler)
389 .Case<FieldWriteOp>(fieldWriteHandler)
390 .Case<FieldReadOp>(fieldReadHandler)
391 .Default([](Operation *) { return WalkResult::advance(); });
392 });
393
394 return failure(walkRes.wasInterrupted());
395 }
396 };
397
398 class ConstrainImpl : public ImplBase {
399 using ImplBase::ImplBase;
400
401 FieldRefOpInterface getSelfRefField(CallOp callOp) override {
402 LLVM_DEBUG({ llvm::dbgs() << "[ConstrainImpl::getSelfRefField] " << callOp << '\n'; });
403
404 // The typical pattern is to read a struct instance from a field and then call "constrain()"
405 // on it. Get the Value passed as the "self" struct to the CallOp and determine which field it
406 // was read from in the current struct (i.e., `destStruct`).
407 FieldRefOpInterface selfFieldRef = getFieldReadThatDefinesSelfValuePassedToConstrain(callOp);
408 if (selfFieldRef &&
409 selfFieldRef.getComponent().getType() == this->data.destStruct.getType()) {
410 return selfFieldRef;
411 }
412 callOp.emitError()
413 .append(
414 "expected \"self\" parameter to \"@", FUNC_NAME_CONSTRAIN,
415 "\" to be passed a value read from a field in the current stuct."
416 )
417 .report();
418 return nullptr;
419 }
420 };
421
422 class ComputeImpl : public ImplBase {
423 using ImplBase::ImplBase;
424
425 FieldRefOpInterface getSelfRefField(CallOp callOp) override {
426 LLVM_DEBUG({ llvm::dbgs() << "[ComputeImpl::getSelfRefField] " << callOp << '\n'; });
427
428 // The typical pattern is to write the return value of "compute()" to a field in
429 // the current struct (i.e., `destStruct`).
430 // It doesn't really make sense (although there is no semantic restriction against it) to just
431 // pass the "compute()" result into another function and never write it to a field since that
432 // leaves no way for the "constrain()" function to call "constrain()" on that result struct.
433 FailureOr<FieldWriteOp> foundWrite =
434 findOpThatStoresSubcmp(callOp.getSelfValueFromCompute(), [&callOp]() {
435 return callOp.emitOpError().append("\"@", FUNC_NAME_COMPUTE, "\" ");
436 });
437 return static_cast<FieldRefOpInterface>(foundWrite.value_or(nullptr));
438 }
439
440 void processCloneBeforeInlining(FuncDefOp func) override {
441 // Within the compute function, find `CreateStructOp` with `srcStruct` type and mark them
442 // for later deletion. The deletion must occur later because these values may still have
443 // uses until ALL callees of a function have been inlined.
444 func.getBody().walk([this](CreateStructOp newStructOp) {
445 if (newStructOp.getType() == this->data.srcStruct.getType()) {
446 this->data.toDelete.newStructOps.push_back(newStructOp);
447 }
448 });
449 }
450 };
451
452 // Find any field(s) in `destStruct` whose type matches `srcStruct` (allowing any parameters, if
453 // applicable). For each such field, clone all fields from `srcStruct` into `destStruct` and cache
454 // the mapping of `destStruct` to `srcStruct` to cloned fields in the return value.
455 DestToSrcToClonedSrcInDest cloneFields() {
456 DestToSrcToClonedSrcInDest destToSrcToClone;
457
458 SymbolTable &destStructSymTable = tables.getSymbolTable(destStruct);
459 StructType srcStructType = srcStruct.getType();
460 for (FieldDefOp destField : destStruct.getFieldDefs()) {
461 if (StructType destFieldType = llvm::dyn_cast<StructType>(destField.getType())) {
462 UnificationMap unifications;
463 if (!structTypesUnify(srcStructType, destFieldType, {}, &unifications)) {
464 continue;
465 }
466 assert(unifications.empty()); // `makePlan()` reports failure earlier
467 // Mark the original `destField` for deletion
468 toDelete.fieldDefs.push_back(destField);
469 // Clone each field from 'srcStruct' into 'destStruct'. Add an entry to `destToSrcToClone`
470 // even if there are no fields in `srcStruct` so its presence can be used as a marker.
471 SrcStructFieldToCloneInDest &srcToClone = destToSrcToClone[destField];
472 std::vector<FieldDefOp> srcFields = srcStruct.getFieldDefs();
473 if (srcFields.empty()) {
474 continue;
475 }
476 OpBuilder builder(destField);
477 std::string newNameBase =
478 destField.getName().str() + ':' + BuildShortTypeString::from(destFieldType);
479 for (FieldDefOp srcField : srcFields) {
480 DestCloneOfSrcStructField newF = llvm::cast<FieldDefOp>(builder.clone(*srcField));
481 newF.setName(builder.getStringAttr(newNameBase + '+' + newF.getName()));
482 srcToClone[srcField.getSymNameAttr()] = newF;
483 // Also update the cached SymbolTable
484 destStructSymTable.insert(newF);
485 }
486 }
487 }
488 return destToSrcToClone;
489 }
490
492 inline LogicalResult inlineConstrainCall(const DestToSrcToClonedSrcInDest &destToSrcToClone) {
493 return ConstrainImpl(*this, destToSrcToClone)
494 .doInlining(srcStruct.getConstrainFuncOp(), destStruct.getConstrainFuncOp());
495 }
496
498 inline LogicalResult inlineComputeCall(const DestToSrcToClonedSrcInDest &destToSrcToClone) {
499 return ComputeImpl(*this, destToSrcToClone)
500 .doInlining(srcStruct.getComputeFuncOp(), destStruct.getComputeFuncOp());
501 }
502
503public:
504 StructInliner(
505 SymbolTableCollection &tbls, PendingErasure &opsToDelete, StructDefOp from, StructDefOp into
506 )
507 : tables(tbls), toDelete(opsToDelete), srcStruct(from), destStruct(into) {}
508
509 FailureOr<DestToSrcToClonedSrcInDest> doInline() {
510 LLVM_DEBUG(
511 llvm::dbgs() << "[StructInliner] merge " << srcStruct.getSymNameAttr() << " into "
512 << destStruct.getSymNameAttr() << '\n'
513 );
514
515 DestToSrcToClonedSrcInDest destToSrcToClone = cloneFields();
516 if (failed(inlineConstrainCall(destToSrcToClone)) ||
517 failed(inlineComputeCall(destToSrcToClone))) {
518 return failure(); // error already printed within doInlining()
519 }
520 return destToSrcToClone;
521 }
522};
523
524template <typename T>
525concept HasContainsOp = requires(const T &t, Operation *p) {
526 { t.contains(p) } -> std::convertible_to<bool>;
527};
528
530template <typename... PendingDeletionSets>
532class DanglingUseHandler {
533 SymbolTableCollection &tables;
534 const DestToSrcToClonedSrcInDest &destToSrcToClone;
535 std::tuple<const PendingDeletionSets &...> otherRefsToBeDeleted;
536
537public:
538 DanglingUseHandler(
539 SymbolTableCollection &symTables, const DestToSrcToClonedSrcInDest &destToSrcToCloneRef,
540 const PendingDeletionSets &...otherRefsPendingDeletion
541 )
542 : tables(symTables), destToSrcToClone(destToSrcToCloneRef),
543 otherRefsToBeDeleted(otherRefsPendingDeletion...) {}
544
550 LogicalResult handle(Operation *op) const {
551 if (op->use_empty()) {
552 return success(); // safe to erase
553 }
554
555 LLVM_DEBUG({
556 llvm::dbgs() << "[DanglingUseHandler::handle] op: " << *op << '\n';
557 llvm::dbgs() << "[DanglingUseHandler::handle] in function: "
558 << op->getParentOfType<FuncDefOp>() << '\n';
559 });
560 for (OpOperand &use : llvm::make_early_inc_range(op->getUses())) {
561 if (CallOp c = llvm::dyn_cast<CallOp>(use.getOwner())) {
562 if (failed(handleUseInCallOp(use, c, op))) {
563 return failure();
564 }
565 } else {
566 Operation *user = use.getOwner();
567 // Report an error for any user other than some field ref that will be deleted anyway.
568 if (!opWillBeDeleted(user)) {
569 return op->emitOpError()
570 .append(
571 "with use in '", user->getName().getStringRef(),
572 "' is not (currently) supported by this pass."
573 )
574 .attachNote(user->getLoc())
575 .append("used by this operation");
576 }
577 }
578 }
579 // Ensure that all users of the 'op' were deleted above, or will be per 'otherRefsToBeDeleted'.
580 if (!op->use_empty()) {
581 for (Operation *user : op->getUsers()) {
582 if (!opWillBeDeleted(user)) {
583 llvm::errs() << "Op has remaining use(s) that could not be removed: " << *op << '\n';
584 llvm_unreachable("Expected all uses to be removed");
585 }
586 }
587 }
588 return success();
589 }
590
591private:
592 inline LogicalResult handleUseInCallOp(OpOperand &use, CallOp inCall, Operation *origin) const {
593 LLVM_DEBUG(
594 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] use in call: " << inCall << '\n'
595 );
596 unsigned argIdx = use.getOperandNumber() - inCall.getArgOperands().getBeginOperandIndex();
597 LLVM_DEBUG(
598 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] at index: " << argIdx << '\n'
599 );
600
601 auto tgtFuncRes = inCall.getCalleeTarget(tables);
602 if (failed(tgtFuncRes)) {
603 return origin
604 ->emitOpError("as argument to an unknown function is not supported by this pass.")
605 .attachNote(inCall.getLoc())
606 .append("used by this call");
607 }
608 FuncDefOp tgtFunc = tgtFuncRes->get();
609 LLVM_DEBUG(
610 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] call target: " << tgtFunc << '\n'
611 );
612 if (tgtFunc.isExternal()) {
613 // Those without a body (i.e. external implementation) present a problem because LLZK does
614 // not define a memory layout for the external implementation to interpret the struct.
615 return origin
616 ->emitOpError("as argument to a no-body free function is not supported by this pass.")
617 .attachNote(inCall.getLoc())
618 .append("used by this call");
619 }
620
621 FieldRefOpInterface paramFromField = TypeSwitch<Operation *, FieldRefOpInterface>(origin)
622 .template Case<FieldReadOp>([](auto p) { return p; })
623 .template Case<CreateStructOp>([](auto p) {
624 return findOpThatStoresSubcmp(p, [&p]() { return p.emitOpError(); }).value_or(nullptr);
625 }).Default([](Operation *p) {
626 llvm::errs() << "Encountered unexpected op: "
627 << (p ? p->getName().getStringRef() : "<<null>>") << '\n';
628 llvm_unreachable("Unexpected op kind");
629 return nullptr;
630 });
631 LLVM_DEBUG({
632 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] field ref op for param: "
633 << (paramFromField ? debug::toStringOne(paramFromField) : "<<null>>") << '\n';
634 });
635 if (!paramFromField) {
636 return failure(); // error already printed within findOpThatStoresSubcmp()
637 }
638 const SrcStructFieldToCloneInDest &newFields =
639 destToSrcToClone.at(getDef(tables, paramFromField));
640 LLVM_DEBUG({
641 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] fields to split: "
642 << debug::toStringList(newFields) << '\n';
643 });
644
645 // Convert the FuncDefOp side first (to use the easier builder for the new CallOp).
646 splitFunctionParam(tgtFunc, argIdx, newFields);
647 LLVM_DEBUG({
648 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] UPDATED call target: " << tgtFunc
649 << '\n';
650 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] UPDATED call target type: "
651 << tgtFunc.getFunctionType() << '\n';
652 });
653
654 // Convert the CallOp side. Add a FieldReadOp for each value from the struct and pass them
655 // individually in place of the struct parameter.
656 OpBuilder builder(inCall);
657 SmallVector<Value> splitArgs;
658 // Before the CallOp, insert a read from every new field. These Values will replace the
659 // original argument in the CallOp.
660 Value originalBaseVal = paramFromField.getComponent();
661 for (auto [origName, newFieldRef] : newFields) {
662 splitArgs.push_back(builder.create<FieldReadOp>(
663 inCall.getLoc(), newFieldRef.getType(), originalBaseVal, newFieldRef.getNameAttr()
664 ));
665 }
666 // Generate the new argument list from the original but replace 'argIdx'
667 SmallVector<Value> newOpArgs(inCall.getArgOperands());
668 newOpArgs.insert(
669 newOpArgs.erase(newOpArgs.begin() + argIdx), splitArgs.begin(), splitArgs.end()
670 );
671 // Create the new CallOp, replace uses of the old with the new, delete the old
672 inCall.replaceAllUsesWith(builder.create<CallOp>(
673 inCall.getLoc(), tgtFunc, CallOp::toVectorOfValueRange(inCall.getMapOperands()),
674 inCall.getNumDimsPerMapAttr(), newOpArgs
675 ));
676 inCall.erase();
677 LLVM_DEBUG({
678 llvm::dbgs() << "[DanglingUseHandler::handleUseInCallOp] UPDATED function: "
679 << origin->getParentOfType<FuncDefOp>() << '\n';
680 });
681 return success();
682 }
683
685 inline bool opWillBeDeleted(Operation *otherOp) const {
686 return std::apply([&](const auto &...sets) {
687 return ((sets.contains(otherOp)) || ...);
688 }, otherRefsToBeDeleted);
689 }
690
695 static void splitFunctionParam(
696 FuncDefOp func, unsigned paramIdx, const SrcStructFieldToCloneInDest &nameToNewField
697 ) {
698 class Impl : public FunctionTypeConverter {
699 unsigned inputIdx;
700 const SrcStructFieldToCloneInDest &newFields;
701
702 public:
703 Impl(unsigned paramIdx, const SrcStructFieldToCloneInDest &nameToNewField)
704 : inputIdx(paramIdx), newFields(nameToNewField) {}
705
706 protected:
707 SmallVector<Type> convertInputs(ArrayRef<Type> origTypes) override {
708 SmallVector<Type> newTypes(origTypes);
709 auto it = newTypes.erase(newTypes.begin() + inputIdx);
710 for (auto [_, newField] : newFields) {
711 newTypes.insert(it, newField.getType());
712 ++it;
713 }
714 return newTypes;
715 }
716 SmallVector<Type> convertResults(ArrayRef<Type> origTypes) override {
717 return SmallVector<Type>(origTypes);
718 }
719 ArrayAttr convertInputAttrs(ArrayAttr origAttrs, SmallVector<Type>) override {
720 if (origAttrs) {
721 // Replicate the value at `origAttrs[inputIdx]` to have `newFields.size()`
722 SmallVector<Attribute> newAttrs(origAttrs.getValue());
723 newAttrs.insert(newAttrs.begin() + inputIdx, newFields.size() - 1, origAttrs[inputIdx]);
724 return ArrayAttr::get(origAttrs.getContext(), newAttrs);
725 }
726 return nullptr;
727 }
728 ArrayAttr convertResultAttrs(ArrayAttr origAttrs, SmallVector<Type>) override {
729 return origAttrs;
730 }
731
732 void processBlockArgs(Block &entryBlock, RewriterBase &rewriter) override {
733 Value oldStructRef = entryBlock.getArgument(inputIdx);
734
735 // Insert new Block arguments, one per field, following the original one. Keep a map
736 // of field name to the associated block argument for replacing FieldReadOp.
737 llvm::StringMap<BlockArgument> fieldNameToNewArg;
738 Location loc = oldStructRef.getLoc();
739 unsigned idx = inputIdx;
740 for (auto [fieldName, newField] : newFields) {
741 // note: pre-increment so the original to be erased is still at `inputIdx`
742 BlockArgument newArg = entryBlock.insertArgument(++idx, newField.getType(), loc);
743 fieldNameToNewArg[fieldName] = newArg;
744 }
745
746 // Find all field reads from the original Block argument and replace uses of those
747 // reads with the appropriate new Block argument.
748 for (OpOperand &oldBlockArgUse : llvm::make_early_inc_range(oldStructRef.getUses())) {
749 if (FieldReadOp readOp = llvm::dyn_cast<FieldReadOp>(oldBlockArgUse.getOwner())) {
750 if (readOp.getComponent() == oldStructRef) {
751 BlockArgument newArg = fieldNameToNewArg.at(readOp.getFieldName());
752 rewriter.replaceAllUsesWith(readOp, newArg);
753 rewriter.eraseOp(readOp);
754 continue;
755 }
756 }
757 // Currently, there's no other way in which a StructType parameter can be used.
758 llvm::errs() << "Unexpected use of " << oldBlockArgUse.get() << " in "
759 << *oldBlockArgUse.getOwner() << '\n';
760 llvm_unreachable("Not yet implemented");
761 }
762
763 // Delete the original Block argument
764 entryBlock.eraseArgument(inputIdx);
765 }
766 };
767 IRRewriter rewriter(func.getContext());
768 Impl(paramIdx, nameToNewField).convert(func, rewriter);
769 }
770};
771
772static LogicalResult finalizeStruct(
773 SymbolTableCollection &tables, StructDefOp caller, PendingErasure &&toDelete,
774 DestToSrcToClonedSrcInDest &&destToSrcToClone
775) {
776 LLVM_DEBUG({
777 llvm::dbgs() << "[finalizeStruct] dumping 'caller' struct before compressing chains:\n";
778 caller.print(llvm::dbgs(), OpPrintingFlags().assumeVerified());
779 llvm::dbgs() << '\n';
780 });
781
782 // Compress chains of reads that result after inlining multiple callees.
783 caller.getConstrainFuncOp().walk([&tables, &destToSrcToClone](FieldReadOp readOp) {
784 combineReadChain(readOp, tables, destToSrcToClone);
785 });
786 FuncDefOp computeFn = caller.getComputeFuncOp();
787 Value computeSelfVal = computeFn.getSelfValueFromCompute();
788 auto res = computeFn.walk([&tables, &destToSrcToClone, &computeSelfVal](FieldReadOp readOp) {
789 combineReadChain(readOp, tables, destToSrcToClone);
790 // Reads targeting the "self" value from "compute()" are not eligible for the compression
791 // provided in `combineNewThenReadChain()` and will actually cause an error within.
792 if (readOp.getComponent() == computeSelfVal) {
793 return WalkResult::advance();
794 }
795 LogicalResult innerRes = combineNewThenReadChain(readOp, tables, destToSrcToClone);
796 return failed(innerRes) ? WalkResult::interrupt() : WalkResult::advance();
797 });
798 if (res.wasInterrupted()) {
799 return failure(); // error already printed within combineNewThenReadChain()
800 }
801
802 LLVM_DEBUG({
803 llvm::dbgs() << "[finalizeStruct] dumping 'caller' struct before deleting ops:\n";
804 caller.print(llvm::dbgs(), OpPrintingFlags().assumeVerified());
805 llvm::dbgs() << '\n';
806 llvm::dbgs() << "[finalizeStruct] ops marked for deletion:\n";
807 for (Operation *op : toDelete.fieldReadOps) {
808 llvm::dbgs().indent(2) << *op << '\n';
809 }
810 for (Operation *op : toDelete.fieldWriteOps) {
811 llvm::dbgs().indent(2) << *op << '\n';
812 }
813 for (CreateStructOp op : toDelete.newStructOps) {
814 llvm::dbgs().indent(2) << op << '\n';
815 }
816 for (DestFieldWithSrcStructType op : toDelete.fieldDefs) {
817 llvm::dbgs().indent(2) << op << '\n';
818 }
819 });
820
821 // Handle remaining uses of CreateStructOp before deleting anything because this process
822 // needs to be able to find the FieldWriteOp instances that store the result of these ops.
823 DanglingUseHandler<SmallPtrSet<Operation *, 8>, SmallPtrSet<Operation *, 8>> useHandler(
824 tables, destToSrcToClone, toDelete.fieldWriteOps, toDelete.fieldReadOps
825 );
826 for (CreateStructOp op : toDelete.newStructOps) {
827 if (failed(useHandler.handle(op))) {
828 return failure(); // error already printed within handle()
829 }
830 }
831 // Next, to avoid "still has uses" errors, must erase FieldWriteOp first, then FieldReadOp, before
832 // erasing the CreateStructOp or FieldDefOp.
833 for (Operation *op : toDelete.fieldWriteOps) {
834 if (failed(useHandler.handle(op))) {
835 return failure(); // error already printed within handle()
836 }
837 op->erase();
838 }
839 for (Operation *op : toDelete.fieldReadOps) {
840 if (failed(useHandler.handle(op))) {
841 return failure(); // error already printed within handle()
842 }
843 op->erase();
844 }
845 for (CreateStructOp op : toDelete.newStructOps) {
846 op.erase();
847 }
848 // Finally, erase FieldDefOp via SymbolTable so table itself is updated too.
849 SymbolTable &callerSymTab = tables.getSymbolTable(caller);
850 for (DestFieldWithSrcStructType op : toDelete.fieldDefs) {
851 assert(op.getParentOp() == caller); // using correct SymbolTable
852 callerSymTab.erase(op);
853 }
854
855 return success();
856}
857
858} // namespace
859
860LogicalResult performInlining(SymbolTableCollection &tables, InliningPlan &plan) {
861 for (auto &[caller, callees] : plan) {
862 // Cache operations that should be deleted but must wait until all callees are processed
863 // to ensure that all uses of the values defined by these operations are replaced.
864 PendingErasure toDelete;
865 // Cache old-to-new field mappings across all callees inlined for the current struct.
866 DestToSrcToClonedSrcInDest aggregateReplacements;
867 // Inline callees/subcomponents of the current struct
868 for (StructDefOp toInline : callees) {
869 FailureOr<DestToSrcToClonedSrcInDest> res =
870 StructInliner(tables, toDelete, toInline, caller).doInline();
871 if (failed(res)) {
872 return failure();
873 }
874 // Add current field replacements to the aggregate
875 for (auto &[k, v] : res.value()) {
876 assert(!aggregateReplacements.contains(k) && "duplicate not possible");
877 aggregateReplacements[k] = std::move(v);
878 }
879 }
880 // Complete steps to finalize/cleanup the caller
881 LogicalResult finalizeResult =
882 finalizeStruct(tables, caller, std::move(toDelete), std::move(aggregateReplacements));
883 if (failed(finalizeResult)) {
884 return failure();
885 }
886 }
887 return success();
888}
889
890namespace {
891
892class InlineStructsPass : public llzk::impl::InlineStructsPassBase<InlineStructsPass> {
893 static uint64_t complexity(FuncDefOp f) {
894 uint64_t complexity = 0;
895 f.getBody().walk([&complexity](Operation *op) {
896 if (llvm::isa<felt::MulFeltOp>(op)) {
897 ++complexity;
898 } else if (auto ee = llvm::dyn_cast<constrain::EmitEqualityOp>(op)) {
899 complexity += computeEmitEqCardinality(ee.getLhs().getType());
900 } else if (auto ec = llvm::dyn_cast<constrain::EmitContainmentOp>(op)) {
901 // TODO: increment based on dimension sizes in the operands
902 // Pending update to implementation/semantics of EmitContainmentOp.
903 ++complexity;
904 }
905 });
906 return complexity;
907 }
908
909 static FailureOr<FuncDefOp>
910 getIfStructConstrain(const SymbolUseGraphNode *node, SymbolTableCollection &tables) {
911 auto lookupRes = node->lookupSymbol(tables, false);
912 assert(succeeded(lookupRes) && "graph contains node with invalid path");
913 if (FuncDefOp f = llvm::dyn_cast<FuncDefOp>(lookupRes->get())) {
914 if (f.isStructConstrain()) {
915 return f;
916 }
917 }
918 return failure();
919 }
920
923 static inline StructDefOp getParentStruct(FuncDefOp func) {
924 assert(func.isStructConstrain()); // pre-condition
925 FailureOr<StructDefOp> currentNodeParentStruct = getParentOfType<StructDefOp>(func);
926 assert(succeeded(currentNodeParentStruct)); // follows from ODS definition
927 return currentNodeParentStruct.value();
928 }
929
931 inline bool exceedsMaxComplexity(uint64_t check) {
932 return maxComplexity > 0 && check > maxComplexity;
933 }
934
937 static inline bool canInline(FuncDefOp currentFunc, FuncDefOp successorFunc) {
938 // Find CallOp for `successorFunc` within `currentFunc` and check the condition used by
939 // `ConstrainImpl::getSelfRefField()`.
940 //
941 // Implementation Note: There is a possibility that the "self" value is not from a field read.
942 // It could be a parameter to the current/destination function or a global read. Inlining a
943 // struct stored to a global would probably require splitting up the global into multiple, one
944 // for each field in the successor/source struct. That may not be a good idea. The parameter
945 // case could be handled but it will not have a mapping in `destToSrcToClone` in
946 // `getSelfRefField()` and new fields will still need to be added. They can be prefixed with
947 // parameter index since there is no current field name to use as the unique prefix. Handling
948 // that would require refactoring the inlining process a bit.
949 WalkResult res = currentFunc.walk([](CallOp c) {
950 return getFieldReadThatDefinesSelfValuePassedToConstrain(c)
951 ? WalkResult::interrupt() // use interrupt to indicate success
952 : WalkResult::advance();
953 });
954 LLVM_DEBUG({
955 llvm::dbgs() << "[canInline] " << successorFunc.getFullyQualifiedName() << " into "
956 << currentFunc.getFullyQualifiedName() << "? " << res.wasInterrupted() << '\n';
957 });
958 return res.wasInterrupted();
959 }
960
965 inline FailureOr<InliningPlan>
966 makePlan(const SymbolUseGraph &useGraph, SymbolTableCollection &tables) {
967 LLVM_DEBUG({
968 llvm::dbgs() << "Running InlineStructsPass with max complexity ";
969 if (maxComplexity == 0) {
970 llvm::dbgs() << "unlimited";
971 } else {
972 llvm::dbgs() << maxComplexity;
973 }
974 llvm::dbgs() << '\n';
975 });
976 InliningPlan retVal;
977 DenseMap<const SymbolUseGraphNode *, uint64_t> complexityMemo;
978
979 // NOTE: The assumption that the use graph has no cycles allows `complexityMemo` to only
980 // store the result for relevant nodes and assume nodes without a mapped value are `0`. This
981 // must be true of the "compute"/"constrain" function uses and field defs because circuits
982 // must be acyclic. This is likely true to for the symbol use graph is general but if a
983 // counterexample is ever found, the algorithm below must be re-evaluated.
984 assert(!hasCycle(&useGraph));
985
986 // Traverse "constrain" function nodes to compute their complexity and an inlining plan. Use
987 // post-order traversal so the complexity of all successor nodes is computed before computing
988 // the current node's complexity.
989 for (const SymbolUseGraphNode *currentNode : llvm::post_order(&useGraph)) {
990 LLVM_DEBUG(llvm::dbgs() << "\ncurrentNode = " << currentNode->toString());
991 if (!currentNode->isRealNode()) {
992 continue;
993 }
994 if (currentNode->isStructParam()) {
995 // Try to get the location of the StructDefOp to report an error.
996 Operation *lookupFrom = currentNode->getSymbolPathRoot().getOperation();
997 SymbolRefAttr prefix = getPrefixAsSymbolRefAttr(currentNode->getSymbolPath());
998 auto res = lookupSymbolIn<StructDefOp>(tables, prefix, lookupFrom, lookupFrom, false);
999 // If that lookup didn't work for some reason, report at the path root location.
1000 Operation *reportLoc = succeeded(res) ? res->get() : lookupFrom;
1001 return reportLoc->emitError("Cannot inline structs with parameters.");
1002 }
1003 FailureOr<FuncDefOp> currentFuncOpt = getIfStructConstrain(currentNode, tables);
1004 if (failed(currentFuncOpt)) {
1005 continue;
1006 }
1007 FuncDefOp currentFunc = currentFuncOpt.value();
1008 uint64_t currentComplexity = complexity(currentFunc);
1009 // If the current complexity is already too high, store it and continue.
1010 if (exceedsMaxComplexity(currentComplexity)) {
1011 complexityMemo[currentNode] = currentComplexity;
1012 continue;
1013 }
1014 // Otherwise, make a plan that adds successor "constrain" functions unless the
1015 // complexity becomes too high by adding that successor.
1016 SmallVector<StructDefOp> successorsToMerge;
1017 for (const SymbolUseGraphNode *successor : currentNode->successorIter()) {
1018 LLVM_DEBUG(llvm::dbgs().indent(2) << "successor: " << successor->toString() << '\n');
1019 // Note: all "constrain" function nodes will have a value, and all other nodes will not.
1020 auto memoResult = complexityMemo.find(successor);
1021 if (memoResult == complexityMemo.end()) {
1022 continue; // inner loop
1023 }
1024 uint64_t sComplexity = memoResult->second;
1025 assert(
1026 sComplexity <= (std::numeric_limits<uint64_t>::max() - currentComplexity) &&
1027 "addition will overflow"
1028 );
1029 uint64_t potentialComplexity = currentComplexity + sComplexity;
1030 if (!exceedsMaxComplexity(potentialComplexity)) {
1031 currentComplexity = potentialComplexity;
1032 FailureOr<FuncDefOp> successorFuncOpt = getIfStructConstrain(successor, tables);
1033 assert(succeeded(successorFuncOpt)); // follows from the Note above
1034 FuncDefOp successorFunc = successorFuncOpt.value();
1035 if (canInline(currentFunc, successorFunc)) {
1036 successorsToMerge.push_back(getParentStruct(successorFunc));
1037 }
1038 }
1039 }
1040 complexityMemo[currentNode] = currentComplexity;
1041 if (!successorsToMerge.empty()) {
1042 retVal.emplace_back(getParentStruct(currentFunc), std::move(successorsToMerge));
1043 }
1044 }
1045 LLVM_DEBUG({
1046 llvm::dbgs() << "-----------------------------------------------------------------\n";
1047 llvm::dbgs() << "InlineStructsPass plan:\n";
1048 for (auto &[caller, callees] : retVal) {
1049 llvm::dbgs().indent(2) << "inlining the following into \"" << caller.getSymName() << "\"\n";
1050 for (StructDefOp c : callees) {
1051 llvm::dbgs().indent(4) << "\"" << c.getSymName() << "\"\n";
1052 }
1053 }
1054 llvm::dbgs() << "-----------------------------------------------------------------\n";
1055 });
1056 return retVal;
1057 }
1058
1059public:
1060 void runOnOperation() override {
1061 const SymbolUseGraph &useGraph = getAnalysis<SymbolUseGraph>();
1062 LLVM_DEBUG(useGraph.dumpToDotFile());
1063
1064 SymbolTableCollection tables;
1065 FailureOr<InliningPlan> plan = makePlan(useGraph, tables);
1066 if (failed(plan)) {
1067 signalPassFailure(); // error already printed w/in makePlan()
1068 return;
1069 }
1070
1071 if (failed(performInlining(tables, plan.value()))) {
1072 signalPassFailure();
1073 return;
1074 };
1075 }
1076};
1077
1078} // namespace
1079
1080std::unique_ptr<mlir::Pass> llzk::createInlineStructsPass() {
1081 return std::make_unique<InlineStructsPass>();
1082};
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for use
Definition LICENSE.txt:9
Apache License January AND DISTRIBUTION Definitions License shall mean the terms and conditions for and distribution as defined by Sections through of this document Licensor shall mean the copyright owner or entity authorized by the copyright owner that is granting the License Legal Entity shall mean the union of the acting entity and all other entities that control are controlled by or are under common control with that entity For the purposes of this definition control direct or to cause the direction or management of such whether by contract or including but not limited to software source documentation and configuration files Object form shall mean any form resulting from mechanical transformation or translation of a Source including but not limited to compiled object generated and conversions to other media types Work shall mean the work of whether in Source or Object made available under the as indicated by a copyright notice that is included in or attached to the whether in Source or Object that is based or other modifications as a an original work of authorship For the purposes of this Derivative Works shall not include works that remain separable from
Definition LICENSE.txt:45
LogicalResult performInlining(SymbolTableCollection &tables, InliningPlan &plan)
mlir::SmallVector< std::pair< llzk::component::StructDefOp, mlir::SmallVector< llzk::component::StructDefOp > > > InliningPlan
Maps caller struct to callees that should be inlined.
This file defines methods symbol lookup across LLZK operations and included files.
static std::string from(mlir::Type type)
Return a brief string representation of the given LLZK type.
Definition TypeHelper.h:52
General helper for converting a FuncDefOp by changing its input and/or result types and the associate...
virtual void processBlockArgs(mlir::Block &entryBlock, mlir::RewriterBase &rewriter)=0
virtual llvm::SmallVector< mlir::Type > convertResults(mlir::ArrayRef< mlir::Type > origTypes)=0
virtual mlir::ArrayAttr convertResultAttrs(mlir::ArrayAttr origAttrs, llvm::SmallVector< mlir::Type > newTypes)=0
virtual mlir::ArrayAttr convertInputAttrs(mlir::ArrayAttr origAttrs, llvm::SmallVector< mlir::Type > newTypes)=0
virtual llvm::SmallVector< mlir::Type > convertInputs(mlir::ArrayRef< mlir::Type > origTypes)=0
mlir::FailureOr< SymbolLookupResultUntyped > lookupSymbol(mlir::SymbolTableCollection &tables, bool reportMissing=true) const
void dumpToDotFile(std::string filename="") const
Dump the graph to file in dot graph format.
::llvm::StringRef getFieldName()
Definition Ops.cpp.inc:900
::mlir::TypedValue<::llzk::component::StructType > getComponent()
Definition Ops.h.inc:650
::mlir::FailureOr< SymbolLookupResult< FieldDefOp > > getFieldDefOp(::mlir::SymbolTableCollection &tables)
Gets the definition for the field referenced in this op.
Definition Ops.cpp:599
::mlir::TypedValue<::llzk::component::StructType > getComponent()
Gets the SSA value with the target component from the FieldRefOp.
::llvm::StringRef getSymName()
Definition Ops.cpp.inc:1590
::llzk::function::FuncDefOp getConstrainFuncOp()
Gets the FuncDefOp that defines the constrain function in this structure, if present,...
Definition Ops.cpp:433
::llzk::function::FuncDefOp getComputeFuncOp()
Gets the FuncDefOp that defines the compute function in this structure, if present,...
Definition Ops.cpp:429
void print(::mlir::OpAsmPrinter &_odsPrinter)
Definition Ops.cpp.inc:1719
::mlir::Operation::operand_range getArgOperands()
Definition Ops.h.inc:241
::mlir::Value getSelfValueFromConstrain()
Return the "self" value (i.e.
Definition Ops.cpp:772
::mlir::OperandRangeRange getMapOperands()
Definition Ops.h.inc:245
static ::llvm::SmallVector<::mlir::ValueRange > toVectorOfValueRange(::mlir::OperandRangeRange)
Allocate consecutive storage of the ValueRange instances in the parameter so it can be passed to the ...
Definition Ops.cpp:802
::mlir::Value getSelfValueFromCompute()
Return the "self" value (i.e.
Definition Ops.cpp:767
::mlir::FailureOr<::llzk::SymbolLookupResult<::llzk::function::FuncDefOp > > getCalleeTarget(::mlir::SymbolTableCollection &tables)
Resolve and return the target FuncDefOp for this CallOp.
Definition Ops.cpp:777
::mlir::DenseI32ArrayAttr getNumDimsPerMapAttr()
Definition Ops.h.inc:272
::mlir::Value getSelfValueFromCompute()
Return the "self" value (i.e.
Definition Ops.cpp:346
::mlir::FunctionType getFunctionType()
Definition Ops.cpp.inc:952
::mlir::Value getSelfValueFromConstrain()
Return the "self" value (i.e.
Definition Ops.cpp:365
bool nameIsCompute()
Return true iff the function name is FUNC_NAME_COMPUTE (if needed, a check that this FuncDefOp is loc...
Definition Ops.h.inc:770
bool nameIsConstrain()
Return true iff the function name is FUNC_NAME_CONSTRAIN (if needed, a check that this FuncDefOp is l...
Definition Ops.h.inc:774
bool isStructConstrain()
Return true iff the function is within a StructDefOp and named FUNC_NAME_CONSTRAIN.
Definition Ops.h.inc:787
::mlir::SymbolRefAttr getFullyQualifiedName(bool requireParent=true)
Return the full name for this function from the root module, including all surrounding symbol table n...
Definition Ops.cpp:336
::mlir::Region & getBody()
Definition Ops.h.inc:607
std::string toStringOne(const T &value)
Definition Debug.h:175
std::string toStringList(InputIt begin, InputIt end)
Generate a comma-separated string representation by traversing elements from begin to end where the e...
Definition Debug.h:149
mlir::SymbolRefAttr getPrefixAsSymbolRefAttr(mlir::SymbolRefAttr symbol)
Return SymbolRefAttr like the one given but with the leaf/final element removed.
uint64_t computeEmitEqCardinality(Type type)
constexpr char FUNC_NAME_CONSTRAIN[]
Definition Constants.h:28
bool structTypesUnify(StructType lhs, StructType rhs, ArrayRef< StringRef > rhsReversePrefix, UnificationMap *unifications)
mlir::DenseMap< std::pair< mlir::SymbolRefAttr, Side >, mlir::Attribute > UnificationMap
Optional result from type unifications.
Definition TypeHelper.h:185
mlir::FailureOr< SymbolLookupResultUntyped > lookupSymbolIn(mlir::SymbolTableCollection &tables, mlir::SymbolRefAttr symbol, Within &&lookupWithin, mlir::Operation *origin, bool reportMissing=true)
mlir::FailureOr< OpClass > getParentOfType(mlir::Operation *op)
Return the closest surrounding parent operation that is of type 'OpClass'.
Definition OpHelpers.h:45
std::unique_ptr< mlir::Pass > createInlineStructsPass()
bool hasCycle(const GraphT &G)
Definition GraphUtil.h:17