Skip to content

[pull] master from apple:master #50

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Feb 3, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
136 changes: 132 additions & 4 deletions lib/SILOptimizer/Mandatory/SemanticARCOpts.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
#include "swift/SIL/DebugUtils.h"
#include "swift/SIL/MemAccessUtils.h"
#include "swift/SIL/OwnershipUtils.h"
#include "swift/SIL/Projection.h"
#include "swift/SIL/SILArgument.h"
#include "swift/SIL/SILBuilder.h"
#include "swift/SIL/SILInstruction.h"
Expand Down Expand Up @@ -150,6 +151,129 @@ LiveRange::LiveRange(SILValue value)
}
}

//===----------------------------------------------------------------------===//
// Address Written To Analysis
//===----------------------------------------------------------------------===//

namespace {

/// A simple analysis that checks if a specific def (in our case an inout
/// argument) is ever written to. This is conservative, local and processes
/// recursively downwards from def->use.
struct IsAddressWrittenToDefUseAnalysis {
llvm::SmallDenseMap<SILValue, bool, 8> isWrittenToCache;

bool operator()(SILValue value) {
auto iter = isWrittenToCache.try_emplace(value, true);

// If we are already in the map, just return that.
if (!iter.second)
return iter.first->second;

// Otherwise, compute our value, cache it and return.
bool result = isWrittenToHelper(value);
iter.first->second = result;
return result;
}

private:
bool isWrittenToHelper(SILValue value);
};

} // end anonymous namespace

bool IsAddressWrittenToDefUseAnalysis::isWrittenToHelper(SILValue initialValue) {
SmallVector<Operand *, 8> worklist(initialValue->getUses());
while (!worklist.empty()) {
auto *op = worklist.pop_back_val();
SILInstruction *user = op->getUser();

if (Projection::isAddressProjection(user) ||
isa<ProjectBlockStorageInst>(user)) {
for (SILValue r : user->getResults()) {
llvm::copy(r->getUses(), std::back_inserter(worklist));
}
continue;
}

if (auto *oeai = dyn_cast<OpenExistentialAddrInst>(user)) {
// Mutable access!
if (oeai->getAccessKind() != OpenedExistentialAccess::Immutable) {
return true;
}

// Otherwise, look through it and continue.
llvm::copy(oeai->getUses(), std::back_inserter(worklist));
continue;
}

// load_borrow and incidental uses are fine as well.
if (isa<LoadBorrowInst>(user) || isIncidentalUse(user)) {
continue;
}

// Look through immutable begin_access.
if (auto *bai = dyn_cast<BeginAccessInst>(user)) {
// If we do not have a read, return true.
if (bai->getAccessKind() != SILAccessKind::Read) {
return true;
}

// Otherwise, add the users to the worklist and continue.
llvm::copy(bai->getUses(), std::back_inserter(worklist));
continue;
}

// As long as we do not have a load [take], we are fine.
if (auto *li = dyn_cast<LoadInst>(user)) {
if (li->getOwnershipQualifier() == LoadOwnershipQualifier::Take) {
return true;
}
continue;
}

// If we have a FullApplySite, see if we use the value as an
// indirect_guaranteed parameter. If we use it as inout, we need
// interprocedural analysis that we do not perform here.
if (auto fas = FullApplySite::isa(user)) {
if (fas.getArgumentConvention(*op) ==
SILArgumentConvention::Indirect_In_Guaranteed)
continue;

// Otherwise, be conservative and return true.
return true;
}

// Copy addr that read are just loads.
if (auto *cai = dyn_cast<CopyAddrInst>(user)) {
// If our value is the destination, this is a write.
if (cai->getDest() == op->get()) {
return true;
}

// Ok, so we are Src by process of elimination. Make sure we are not being
// taken.
if (cai->isTakeOfSrc()) {
return true;
}

// Otherwise, we are safe and can continue.
continue;
}

// If we did not recognize the user, just return conservatively that it was
// written to.
LLVM_DEBUG(llvm::dbgs()
<< "Function: " << user->getFunction()->getName() << "\n");
LLVM_DEBUG(llvm::dbgs() << "Value: " << op->get());
LLVM_DEBUG(llvm::dbgs() << "Unknown instruction!: " << *user);
return true;
}

// Ok, we finished our worklist and this address is not being written to.
return false;
}

//===----------------------------------------------------------------------===//
// Implementation
//===----------------------------------------------------------------------===//
Expand Down Expand Up @@ -185,6 +309,7 @@ struct SemanticARCOptVisitor
SILFunction &F;
Optional<DeadEndBlocks> TheDeadEndBlocks;
ValueLifetimeAnalysis::Frontier lifetimeFrontier;
IsAddressWrittenToDefUseAnalysis isAddressWrittenToDefUseAnalysis;

explicit SemanticARCOptVisitor(SILFunction &F) : F(F) {}

Expand Down Expand Up @@ -712,12 +837,15 @@ class StorageGuaranteesLoadVisitor
return answer(false);
}

// If we have an inout parameter that isn't ever actually written to, return
// false.
if (arg->getKnownParameterInfo().isIndirectMutating()) {
return answer(ARCOpt.isAddressWrittenToDefUseAnalysis(arg));
}

// TODO: This should be extended:
//
// 1. We should be able to analyze inout arguments and see if the inout
// argument is never actually written to in a flow insensitive way.
//
// 2. We should be able to analyze in arguments and see if they are only
// 1. We should be able to analyze in arguments and see if they are only
// ever destroyed at the end of the function. In such a case, we may be
// able to also to promote load [copy] from such args to load_borrow.
return answer(true);
Expand Down
55 changes: 55 additions & 0 deletions test/SILOptimizer/semantic-arc-opts.sil
Original file line number Diff line number Diff line change
Expand Up @@ -1085,3 +1085,58 @@ bb0(%0 : @owned $Klass):
%9999 = tuple()
return %9999 : $()
}

// CHECK-LABEL: sil [ossa] @inout_argument_never_written_to_1 : $@convention(thin) (@inout NativeObjectPair) -> () {
// CHECK-NOT: load [copy]
// CHECK: load_borrow
// CHECK-NOT: load [copy]
// CHECK: } // end sil function 'inout_argument_never_written_to_1'
sil [ossa] @inout_argument_never_written_to_1 : $@convention(thin) (@inout NativeObjectPair) -> () {
bb0(%0 : $*NativeObjectPair):
%2 = load [copy] %0 : $*NativeObjectPair
(%3, %4) = destructure_struct %2 : $NativeObjectPair

%5 = function_ref @guaranteed_user : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
apply %5(%3) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
apply %5(%4) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()

destroy_value %3 : $Builtin.NativeObject
destroy_value %4 : $Builtin.NativeObject

%9999 = tuple()
return %9999 : $()
}

// CHECK-LABEL: sil [ossa] @inout_argument_never_written_to_2 : $@convention(thin) (@inout NativeObjectPair) -> () {
// CHECK-NOT: load [copy]
// CHECK: load_borrow
// CHECK-NOT: load [copy]
// CHECK: } // end sil function 'inout_argument_never_written_to_2'
sil [ossa] @inout_argument_never_written_to_2 : $@convention(thin) (@inout NativeObjectPair) -> () {
bb0(%0 : $*NativeObjectPair):
%2 = struct_element_addr %0 : $*NativeObjectPair, #NativeObjectPair.obj1
%3 = load [copy] %2 : $*Builtin.NativeObject
%5 = function_ref @guaranteed_user : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
apply %5(%3) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
destroy_value %3 : $Builtin.NativeObject
%9999 = tuple()
return %9999 : $()
}

// We should be able to handle this, but we can not until we teach the ARC
// optimizer to avoid writes not within the load [copy] region.
//
// CHECK-LABEL: sil [ossa] @inout_argument_never_written_to_3 : $@convention(thin) (@inout NativeObjectPair, @owned Builtin.NativeObject) -> () {
// CHECK: load [copy]
// CHECK: } // end sil function 'inout_argument_never_written_to_3'
sil [ossa] @inout_argument_never_written_to_3 : $@convention(thin) (@inout NativeObjectPair, @owned Builtin.NativeObject) -> () {
bb0(%0 : $*NativeObjectPair, %1 : @owned $Builtin.NativeObject):
%2 = struct_element_addr %0 : $*NativeObjectPair, #NativeObjectPair.obj1
%3 = load [copy] %2 : $*Builtin.NativeObject
%5 = function_ref @guaranteed_user : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
apply %5(%3) : $@convention(thin) (@guaranteed Builtin.NativeObject) -> ()
destroy_value %3 : $Builtin.NativeObject
store %1 to [assign] %2 : $*Builtin.NativeObject
%9999 = tuple()
return %9999 : $()
}