diff --git a/lib/SILOptimizer/Mandatory/SemanticARCOpts.cpp b/lib/SILOptimizer/Mandatory/SemanticARCOpts.cpp index 9915714f5c575..0640a2af98fb1 100644 --- a/lib/SILOptimizer/Mandatory/SemanticARCOpts.cpp +++ b/lib/SILOptimizer/Mandatory/SemanticARCOpts.cpp @@ -17,6 +17,7 @@ #include "swift/SIL/DebugUtils.h" #include "swift/SIL/MemAccessUtils.h" #include "swift/SIL/OwnershipUtils.h" +#include "swift/SIL/Projection.h" #include "swift/SIL/SILArgument.h" #include "swift/SIL/SILBuilder.h" #include "swift/SIL/SILInstruction.h" @@ -150,6 +151,129 @@ LiveRange::LiveRange(SILValue value) } } +//===----------------------------------------------------------------------===// +// Address Written To Analysis +//===----------------------------------------------------------------------===// + +namespace { + +/// A simple analysis that checks if a specific def (in our case an inout +/// argument) is ever written to. This is conservative, local and processes +/// recursively downwards from def->use. +struct IsAddressWrittenToDefUseAnalysis { + llvm::SmallDenseMap isWrittenToCache; + + bool operator()(SILValue value) { + auto iter = isWrittenToCache.try_emplace(value, true); + + // If we are already in the map, just return that. + if (!iter.second) + return iter.first->second; + + // Otherwise, compute our value, cache it and return. + bool result = isWrittenToHelper(value); + iter.first->second = result; + return result; + } + +private: + bool isWrittenToHelper(SILValue value); +}; + +} // end anonymous namespace + +bool IsAddressWrittenToDefUseAnalysis::isWrittenToHelper(SILValue initialValue) { + SmallVector worklist(initialValue->getUses()); + while (!worklist.empty()) { + auto *op = worklist.pop_back_val(); + SILInstruction *user = op->getUser(); + + if (Projection::isAddressProjection(user) || + isa(user)) { + for (SILValue r : user->getResults()) { + llvm::copy(r->getUses(), std::back_inserter(worklist)); + } + continue; + } + + if (auto *oeai = dyn_cast(user)) { + // Mutable access! + if (oeai->getAccessKind() != OpenedExistentialAccess::Immutable) { + return true; + } + + // Otherwise, look through it and continue. + llvm::copy(oeai->getUses(), std::back_inserter(worklist)); + continue; + } + + // load_borrow and incidental uses are fine as well. + if (isa(user) || isIncidentalUse(user)) { + continue; + } + + // Look through immutable begin_access. + if (auto *bai = dyn_cast(user)) { + // If we do not have a read, return true. + if (bai->getAccessKind() != SILAccessKind::Read) { + return true; + } + + // Otherwise, add the users to the worklist and continue. + llvm::copy(bai->getUses(), std::back_inserter(worklist)); + continue; + } + + // As long as we do not have a load [take], we are fine. + if (auto *li = dyn_cast(user)) { + if (li->getOwnershipQualifier() == LoadOwnershipQualifier::Take) { + return true; + } + continue; + } + + // If we have a FullApplySite, see if we use the value as an + // indirect_guaranteed parameter. If we use it as inout, we need + // interprocedural analysis that we do not perform here. + if (auto fas = FullApplySite::isa(user)) { + if (fas.getArgumentConvention(*op) == + SILArgumentConvention::Indirect_In_Guaranteed) + continue; + + // Otherwise, be conservative and return true. + return true; + } + + // Copy addr that read are just loads. + if (auto *cai = dyn_cast(user)) { + // If our value is the destination, this is a write. + if (cai->getDest() == op->get()) { + return true; + } + + // Ok, so we are Src by process of elimination. Make sure we are not being + // taken. + if (cai->isTakeOfSrc()) { + return true; + } + + // Otherwise, we are safe and can continue. + continue; + } + + // If we did not recognize the user, just return conservatively that it was + // written to. + LLVM_DEBUG(llvm::dbgs() + << "Function: " << user->getFunction()->getName() << "\n"); + LLVM_DEBUG(llvm::dbgs() << "Value: " << op->get()); + LLVM_DEBUG(llvm::dbgs() << "Unknown instruction!: " << *user); + return true; + } + + // Ok, we finished our worklist and this address is not being written to. + return false; +} + //===----------------------------------------------------------------------===// // Implementation //===----------------------------------------------------------------------===// @@ -185,6 +309,7 @@ struct SemanticARCOptVisitor SILFunction &F; Optional TheDeadEndBlocks; ValueLifetimeAnalysis::Frontier lifetimeFrontier; + IsAddressWrittenToDefUseAnalysis isAddressWrittenToDefUseAnalysis; explicit SemanticARCOptVisitor(SILFunction &F) : F(F) {} @@ -712,12 +837,15 @@ class StorageGuaranteesLoadVisitor return answer(false); } + // If we have an inout parameter that isn't ever actually written to, return + // false. + if (arg->getKnownParameterInfo().isIndirectMutating()) { + return answer(ARCOpt.isAddressWrittenToDefUseAnalysis(arg)); + } + // TODO: This should be extended: // - // 1. We should be able to analyze inout arguments and see if the inout - // argument is never actually written to in a flow insensitive way. - // - // 2. We should be able to analyze in arguments and see if they are only + // 1. We should be able to analyze in arguments and see if they are only // ever destroyed at the end of the function. In such a case, we may be // able to also to promote load [copy] from such args to load_borrow. return answer(true); diff --git a/test/SILOptimizer/semantic-arc-opts.sil b/test/SILOptimizer/semantic-arc-opts.sil index 2c4e0e9bafeed..bb6824f988d66 100644 --- a/test/SILOptimizer/semantic-arc-opts.sil +++ b/test/SILOptimizer/semantic-arc-opts.sil @@ -1085,3 +1085,58 @@ bb0(%0 : @owned $Klass): %9999 = tuple() return %9999 : $() } + +// CHECK-LABEL: sil [ossa] @inout_argument_never_written_to_1 : $@convention(thin) (@inout NativeObjectPair) -> () { +// CHECK-NOT: load [copy] +// CHECK: load_borrow +// CHECK-NOT: load [copy] +// CHECK: } // end sil function 'inout_argument_never_written_to_1' +sil [ossa] @inout_argument_never_written_to_1 : $@convention(thin) (@inout NativeObjectPair) -> () { +bb0(%0 : $*NativeObjectPair): + %2 = load [copy] %0 : $*NativeObjectPair + (%3, %4) = destructure_struct %2 : $NativeObjectPair + + %5 = function_ref @guaranteed_user : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () + apply %5(%3) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () + apply %5(%4) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () + + destroy_value %3 : $Builtin.NativeObject + destroy_value %4 : $Builtin.NativeObject + + %9999 = tuple() + return %9999 : $() +} + +// CHECK-LABEL: sil [ossa] @inout_argument_never_written_to_2 : $@convention(thin) (@inout NativeObjectPair) -> () { +// CHECK-NOT: load [copy] +// CHECK: load_borrow +// CHECK-NOT: load [copy] +// CHECK: } // end sil function 'inout_argument_never_written_to_2' +sil [ossa] @inout_argument_never_written_to_2 : $@convention(thin) (@inout NativeObjectPair) -> () { +bb0(%0 : $*NativeObjectPair): + %2 = struct_element_addr %0 : $*NativeObjectPair, #NativeObjectPair.obj1 + %3 = load [copy] %2 : $*Builtin.NativeObject + %5 = function_ref @guaranteed_user : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () + apply %5(%3) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () + destroy_value %3 : $Builtin.NativeObject + %9999 = tuple() + return %9999 : $() +} + +// We should be able to handle this, but we can not until we teach the ARC +// optimizer to avoid writes not within the load [copy] region. +// +// CHECK-LABEL: sil [ossa] @inout_argument_never_written_to_3 : $@convention(thin) (@inout NativeObjectPair, @owned Builtin.NativeObject) -> () { +// CHECK: load [copy] +// CHECK: } // end sil function 'inout_argument_never_written_to_3' +sil [ossa] @inout_argument_never_written_to_3 : $@convention(thin) (@inout NativeObjectPair, @owned Builtin.NativeObject) -> () { +bb0(%0 : $*NativeObjectPair, %1 : @owned $Builtin.NativeObject): + %2 = struct_element_addr %0 : $*NativeObjectPair, #NativeObjectPair.obj1 + %3 = load [copy] %2 : $*Builtin.NativeObject + %5 = function_ref @guaranteed_user : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () + apply %5(%3) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> () + destroy_value %3 : $Builtin.NativeObject + store %1 to [assign] %2 : $*Builtin.NativeObject + %9999 = tuple() + return %9999 : $() +}