diff --git a/CHANGELOG.md b/CHANGELOG.md index 2542dd5ba6a9c..2ea34536a6f30 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,31 @@ > [!NOTE] > This is in reverse chronological order, so newer entries are added to the top. +## Swift 6.2 + +* [SE-0458][]: + Introduced an opt-in mode for strict checking of memory safety, which can be + enabled with the compiler flag `-strict-memory-safety`. In this mode, + the Swift compiler will produce warnings for uses of memory-unsafe constructs + and APIs. For example, + + ```swift + func evilMalloc(size: Int) -> Int { + // warning: call to global function 'malloc' involves unsafe type 'UnsafeMutableRawPointer' + return Int(bitPattern: malloc(size)) + } + ``` + + These warnings are in their own diagnostic group (`Unsafe`) and can + be suppressed by ackwnowledging the memory-unsafe behavior, for + example with an `unsafe` expression: + + ```swift + func evilMalloc(size: Int) -> Int { + return unsafe Int(bitPattern: malloc(size)) // no warning + } + ``` + ## Swift 6.1 * Previous versions of Swift would incorrectly allow Objective-C `-init...` @@ -10676,6 +10701,7 @@ using the `.dynamicType` member to retrieve the type of an expression should mig [SE-0431]: https://github.com/apple/swift-evolution/blob/main/proposals/0431-isolated-any-functions.md [SE-0442]: https://github.com/swiftlang/swift-evolution/blob/main/proposals/0442-allow-taskgroup-childtaskresult-type-to-be-inferred.md [SE-0444]: https://github.com/swiftlang/swift-evolution/blob/main/proposals/0444-member-import-visibility.md +[SE-0458]: https://github.com/swiftlang/swift-evolution/blob/main/proposals/0458-strict-memory-safety.md [#64927]: [#42697]: [#42728]: diff --git a/Runtimes/Core/CMakeLists.txt b/Runtimes/Core/CMakeLists.txt index 526edeee98b77..f03ab8d9bef32 100644 --- a/Runtimes/Core/CMakeLists.txt +++ b/Runtimes/Core/CMakeLists.txt @@ -182,7 +182,7 @@ add_compile_options( "$<$:SHELL:-enable-experimental-feature NoncopyableGenerics2>" "$<$:SHELL:-enable-experimental-feature SuppressedAssociatedTypes>" "$<$:SHELL:-enable-experimental-feature SE427NoInferenceOnExtension>" - "$<$:SHELL:-enable-experimental-feature AllowUnsafeAttribute>" + "$<$:SHELL:-strict-memory-safety>" "$<$:SHELL:-enable-experimental-feature NonescapableTypes>" "$<$:SHELL:-enable-experimental-feature LifetimeDependence>" "$<$:SHELL:-enable-experimental-feature MemberImportVisibility>" diff --git a/include/swift/AST/ConformanceAttributes.h b/include/swift/AST/ConformanceAttributes.h index 4f9242c6b3902..aa3ec21dff4dc 100644 --- a/include/swift/AST/ConformanceAttributes.h +++ b/include/swift/AST/ConformanceAttributes.h @@ -27,6 +27,9 @@ struct ConformanceAttributes { /// The location of the "unsafe" attribute if present. SourceLoc unsafeLoc; + + /// The location of the "@isolated" attribute if present. + SourceLoc isolatedLoc; /// Merge other conformance attributes into this set. ConformanceAttributes & @@ -37,6 +40,8 @@ struct ConformanceAttributes { preconcurrencyLoc = other.preconcurrencyLoc; if (other.unsafeLoc.isValid()) unsafeLoc = other.unsafeLoc; + if (other.isolatedLoc.isValid()) + isolatedLoc = other.isolatedLoc; return *this; } }; diff --git a/include/swift/AST/Decl.h b/include/swift/AST/Decl.h index 1de660e903aec..92e7afe8eeb2d 100644 --- a/include/swift/AST/Decl.h +++ b/include/swift/AST/Decl.h @@ -1836,12 +1836,13 @@ struct InheritedEntry : public TypeLoc { bool isPreconcurrency() const { return getOptions().contains(ProtocolConformanceFlags::Preconcurrency); } + bool isIsolated() const { + return getOptions().contains(ProtocolConformanceFlags::Isolated); + } ExplicitSafety getExplicitSafety() const { if (getOptions().contains(ProtocolConformanceFlags::Unsafe)) return ExplicitSafety::Unsafe; - if (getOptions().contains(ProtocolConformanceFlags::Safe)) - return ExplicitSafety::Safe; return ExplicitSafety::Unspecified; } @@ -1852,13 +1853,10 @@ struct InheritedEntry : public TypeLoc { } void setOption(ExplicitSafety safety) { - RawOptions = (getOptions() - ProtocolConformanceFlags::Unsafe - - ProtocolConformanceFlags::Safe).toRaw(); + RawOptions = (getOptions() - ProtocolConformanceFlags::Unsafe).toRaw(); switch (safety) { case ExplicitSafety::Unspecified: - break; case ExplicitSafety::Safe: - RawOptions = (getOptions() | ProtocolConformanceFlags::Safe).toRaw(); break; case ExplicitSafety::Unsafe: RawOptions = (getOptions() | ProtocolConformanceFlags::Unsafe).toRaw(); diff --git a/include/swift/AST/DiagnosticsSema.def b/include/swift/AST/DiagnosticsSema.def index 95ad9917b891f..7f3daf123dbe9 100644 --- a/include/swift/AST/DiagnosticsSema.def +++ b/include/swift/AST/DiagnosticsSema.def @@ -2722,8 +2722,23 @@ WARNING(add_predates_concurrency_import,none, GROUPED_WARNING(remove_predates_concurrency_import,PreconcurrencyImport, DefaultIgnore, "'@preconcurrency' attribute on module %0 has no effect", (Identifier)) +NOTE(add_isolated_to_conformance,none, + "add 'isolated' to the %0 conformance to restrict it to %1 code", + (DeclName, ActorIsolation)) NOTE(add_preconcurrency_to_conformance,none, "add '@preconcurrency' to the %0 conformance to defer isolation checking to run time", (DeclName)) +ERROR(isolated_conformance_not_global_actor_isolated,none, + "isolated conformance is only permitted on global-actor-isolated types", + ()) +ERROR(isolated_conformance_experimental_feature,none, + "isolated conformances require experimental feature " + " 'IsolatedConformances'", ()) +ERROR(nonisolated_conformance_depends_on_isolated_conformance,none, + "conformance of %0 to %1 depends on %2 conformance of %3 to %4; mark it as 'isolated'", + (Type, DeclName, ActorIsolation, Type, DeclName)) +ERROR(isolated_conformance_mismatch_with_associated_isolation,none, + "%0 conformance of %1 to %2 cannot depend on %3 conformance of %4 to %5", + (ActorIsolation, Type, DeclName, ActorIsolation, Type, DeclName)) WARNING(remove_public_import,none, "public import of %0 was not used in public declarations or inlinable code", (Identifier)) @@ -8150,9 +8165,6 @@ NOTE(sending_function_result_with_sending_param_note, none, //------------------------------------------------------------------------------ // MARK: Strict Safety Diagnostics //------------------------------------------------------------------------------ -ERROR(unsafe_attr_disabled,none, - "attribute requires '-enable-experimental-feature AllowUnsafeAttribute'", ()) - NOTE(note_reference_to_unsafe_decl,none, "%select{reference|call}0 to unsafe %kind1", (bool, const ValueDecl *)) diff --git a/include/swift/AST/ProtocolConformance.h b/include/swift/AST/ProtocolConformance.h index 8d464ac19f587..3969f60624915 100644 --- a/include/swift/AST/ProtocolConformance.h +++ b/include/swift/AST/ProtocolConformance.h @@ -669,6 +669,11 @@ class NormalProtocolConformance : public RootProtocolConformance, return getOptions().contains(ProtocolConformanceFlags::Preconcurrency); } + /// Whether this is an isolated conformance. + bool isIsolated() const { + return getOptions().contains(ProtocolConformanceFlags::Isolated); + } + /// Retrieve the location of `@preconcurrency`, if there is one and it is /// known. SourceLoc getPreconcurrencyLoc() const { return PreconcurrencyLoc; } @@ -678,8 +683,6 @@ class NormalProtocolConformance : public RootProtocolConformance, ExplicitSafety getExplicitSafety() const { if (getOptions().contains(ProtocolConformanceFlags::Unsafe)) return ExplicitSafety::Unsafe; - if (getOptions().contains(ProtocolConformanceFlags::Safe)) - return ExplicitSafety::Safe; return ExplicitSafety::Unspecified; } diff --git a/include/swift/AST/ProtocolConformanceOptions.h b/include/swift/AST/ProtocolConformanceOptions.h index 0fc91203e88bc..002186bf307d2 100644 --- a/include/swift/AST/ProtocolConformanceOptions.h +++ b/include/swift/AST/ProtocolConformanceOptions.h @@ -34,8 +34,8 @@ enum class ProtocolConformanceFlags { /// @retroactive conformance Retroactive = 0x08, - /// @safe conformance - Safe = 0x10, + /// @isolated conformance + Isolated = 0x10, // Note: whenever you add a bit here, update // NumProtocolConformanceOptions below. diff --git a/include/swift/Basic/Features.def b/include/swift/Basic/Features.def index 773e7e38c2fc9..f7dcd6e9ce287 100644 --- a/include/swift/Basic/Features.def +++ b/include/swift/Basic/Features.def @@ -48,6 +48,12 @@ // for features that can be assumed to be available in any Swift compiler that // will be used to process the textual interface files produced by this // Swift compiler. +// +// OPTIONAL_LANGUAGE_FEATURE is the same as LANGUAGE_FEATURE, but describes +// accepted features that can be enabled independently of language version and +// are not scheduled to be enabled in some specific language version. Examples +// of optional language features include strict memory safety checking (SE-0458) +// and Embedded Swift. //===----------------------------------------------------------------------===// #ifndef LANGUAGE_FEATURE @@ -89,6 +95,11 @@ LANGUAGE_FEATURE(FeatureName, SENumber, Description) #endif +#ifndef OPTIONAL_LANGUAGE_FEATURE +# define OPTIONAL_LANGUAGE_FEATURE(FeatureName, SENumber, Description) \ + LANGUAGE_FEATURE(FeatureName, SENumber, Description) +#endif + // A feature that's both conditionally-suppressible and experimental. // Delegates to whichever the includer defines. #ifndef CONDITIONALLY_SUPPRESSIBLE_EXPERIMENTAL_FEATURE @@ -203,6 +214,7 @@ LANGUAGE_FEATURE(IsolatedAny2, 431, "@isolated(any) function types") LANGUAGE_FEATURE(ObjCImplementation, 436, "@objc @implementation extensions") LANGUAGE_FEATURE(NonescapableTypes, 446, "Nonescapable types") LANGUAGE_FEATURE(BuiltinEmplaceTypedThrows, 0, "Builtin.emplace typed throws") +SUPPRESSIBLE_LANGUAGE_FEATURE(MemorySafetyAttributes, 458, "@unsafe attribute") // Swift 6 UPCOMING_FEATURE(ConciseMagicFile, 274, 6) @@ -226,6 +238,14 @@ UPCOMING_FEATURE(ExistentialAny, 335, 7) UPCOMING_FEATURE(InternalImportsByDefault, 409, 7) UPCOMING_FEATURE(MemberImportVisibility, 444, 7) +// Optional language features / modes + +/// Diagnose uses of language constructs and APIs that can violate memory +/// safety. +OPTIONAL_LANGUAGE_FEATURE(StrictMemorySafety, 458, "Strict memory safety") + +// Experimental features + EXPERIMENTAL_FEATURE(StaticAssert, false) EXPERIMENTAL_FEATURE(NamedOpaqueTypes, false) EXPERIMENTAL_FEATURE(FlowSensitiveConcurrencyCaptures, false) @@ -396,12 +416,6 @@ EXPERIMENTAL_FEATURE(Extern, true) // Enable trailing comma for comma-separated lists. EXPERIMENTAL_FEATURE(TrailingComma, false) -/// Allow the @unsafe attribute. -SUPPRESSIBLE_EXPERIMENTAL_FEATURE(AllowUnsafeAttribute, true) - -/// Warn on use of unsafe constructs. -EXPERIMENTAL_FEATURE(WarnUnsafe, true) - // Import bounds safety and lifetime attributes from interop headers to // generate Swift wrappers with safe pointer types. EXPERIMENTAL_FEATURE(SafeInteropWrappers, false) @@ -450,15 +464,20 @@ SUPPRESSIBLE_EXPERIMENTAL_FEATURE(CustomAvailability, true) /// Be strict about the Sendable conformance of metatypes. EXPERIMENTAL_FEATURE(StrictSendableMetatypes, true) + /// Allow public enumerations to be extensible by default /// regardless of whether the module they are declared in /// is resilient or not. EXPERIMENTAL_FEATURE(ExtensibleEnums, true) +/// Allow isolated conformances. +EXPERIMENTAL_FEATURE(IsolatedConformances, true) + #undef EXPERIMENTAL_FEATURE_EXCLUDED_FROM_MODULE_INTERFACE #undef EXPERIMENTAL_FEATURE #undef UPCOMING_FEATURE #undef BASELINE_LANGUAGE_FEATURE +#undef OPTIONAL_LANGUAGE_FEATURE #undef CONDITIONALLY_SUPPRESSIBLE_EXPERIMENTAL_FEATURE #undef CONDITIONALLY_SUPPRESSIBLE_LANGUAGE_FEATURE #undef SUPPRESSIBLE_EXPERIMENTAL_FEATURE diff --git a/include/swift/Option/Options.td b/include/swift/Option/Options.td index a4e622a58acc2..74fd5731e7269 100644 --- a/include/swift/Option/Options.td +++ b/include/swift/Option/Options.td @@ -1005,6 +1005,11 @@ def disable_upcoming_feature : Separate<["-"], "disable-upcoming-feature">, HelpText<"Disable a feature that will be introduced in an upcoming language " "version">; +def strict_memory_safety : Flag<["-"], "strict-memory-safety">, + Flags<[FrontendOption, ModuleInterfaceOptionIgnorable, + SwiftAPIDigesterOption, SwiftSynthesizeInterfaceOption]>, + HelpText<"Enable strict memory safety checking">; + def Rpass_EQ : Joined<["-"], "Rpass=">, Flags<[FrontendOption]>, HelpText<"Report performed transformations by optimization passes whose " diff --git a/lib/AST/ASTPrinter.cpp b/lib/AST/ASTPrinter.cpp index 8199a648638b3..87b7927231b6a 100644 --- a/lib/AST/ASTPrinter.cpp +++ b/lib/AST/ASTPrinter.cpp @@ -3214,9 +3214,10 @@ struct ExcludeAttrRAII { } static void -suppressingFeatureAllowUnsafeAttribute(PrintOptions &options, +suppressingFeatureMemorySafetyAttributes(PrintOptions &options, llvm::function_ref action) { ExcludeAttrRAII scope(options.ExcludeAttrList, DeclAttrKind::Unsafe); + ExcludeAttrRAII scope2(options.ExcludeAttrList, DeclAttrKind::Safe); action(); } diff --git a/lib/AST/ConformanceLookupTable.h b/lib/AST/ConformanceLookupTable.h index 92ed8be8c8e06..f43ab190aefe3 100644 --- a/lib/AST/ConformanceLookupTable.h +++ b/lib/AST/ConformanceLookupTable.h @@ -154,6 +154,8 @@ class ConformanceLookupTable : public ASTAllocated { options |= ProtocolConformanceFlags::Preconcurrency; if (getUnsafeLoc().isValid()) options |= ProtocolConformanceFlags::Unsafe; + if (getIsolatedLoc().isValid()) + options |= ProtocolConformanceFlags::Isolated; return options; } @@ -209,6 +211,11 @@ class ConformanceLookupTable : public ASTAllocated { return attributes.unsafeLoc; } + /// The location of the @isolated attribute, if any. + SourceLoc getIsolatedLoc() const { + return attributes.isolatedLoc; + } + /// For an inherited conformance, retrieve the class declaration /// for the inheriting class. ClassDecl *getInheritingClass() const { diff --git a/lib/AST/Decl.cpp b/lib/AST/Decl.cpp index 40d78a7c9f07f..dd0dea8bd12cf 100644 --- a/lib/AST/Decl.cpp +++ b/lib/AST/Decl.cpp @@ -1780,6 +1780,8 @@ InheritedEntry::InheritedEntry(const TypeLoc &typeLoc) setOption(ProtocolConformanceFlags::Unsafe); if (typeRepr->findAttrLoc(TypeAttrKind::Preconcurrency).isValid()) setOption(ProtocolConformanceFlags::Preconcurrency); + if (typeRepr->findAttrLoc(TypeAttrKind::Isolated).isValid()) + setOption(ProtocolConformanceFlags::Isolated); } } diff --git a/lib/AST/FeatureSet.cpp b/lib/AST/FeatureSet.cpp index 1eb747ac39d2a..d71487ddcbb92 100644 --- a/lib/AST/FeatureSet.cpp +++ b/lib/AST/FeatureSet.cpp @@ -330,10 +330,6 @@ UNINTERESTING_FEATURE(ReinitializeConsumeInMultiBlockDefer) UNINTERESTING_FEATURE(SE427NoInferenceOnExtension) UNINTERESTING_FEATURE(TrailingComma) -static bool usesFeatureAllowUnsafeAttribute(Decl *decl) { - return decl->getAttrs().hasAttribute(); -} - static ABIAttr *getABIAttr(Decl *decl) { if (auto pbd = dyn_cast(decl)) for (auto i : range(pbd->getNumPatternEntries())) @@ -348,7 +344,17 @@ static bool usesFeatureABIAttribute(Decl *decl) { return getABIAttr(decl) != nullptr; } -UNINTERESTING_FEATURE(WarnUnsafe) +static bool usesFeatureIsolatedConformances(Decl *decl) { + // FIXME: Check conformances associated with this decl? + return false; +} + +static bool usesFeatureMemorySafetyAttributes(Decl *decl) { + return decl->getAttrs().hasAttribute() || + decl->getAttrs().hasAttribute(); +} + +UNINTERESTING_FEATURE(StrictMemorySafety) UNINTERESTING_FEATURE(SafeInteropWrappers) UNINTERESTING_FEATURE(AssumeResilientCxxTypes) UNINTERESTING_FEATURE(CoroutineAccessorsUnwindOnCallerError) diff --git a/lib/AST/NameLookup.cpp b/lib/AST/NameLookup.cpp index cfc1a96373770..8f692d4900071 100644 --- a/lib/AST/NameLookup.cpp +++ b/lib/AST/NameLookup.cpp @@ -3144,6 +3144,12 @@ directReferencesForTypeRepr(Evaluator &evaluator, ASTContext &ctx, attributed->getTypeRepr(), dc, options); } + case TypeReprKind::Isolated: { + auto isolated = cast(typeRepr); + return directReferencesForTypeRepr(evaluator, ctx, + isolated->getBase(), dc, options); + } + case TypeReprKind::Composition: { auto composition = cast(typeRepr); for (auto component : composition->getTypes()) { @@ -3217,7 +3223,6 @@ directReferencesForTypeRepr(Evaluator &evaluator, ASTContext &ctx, case TypeReprKind::Error: case TypeReprKind::Function: case TypeReprKind::Ownership: - case TypeReprKind::Isolated: case TypeReprKind::CompileTimeConst: case TypeReprKind::Metatype: case TypeReprKind::Protocol: @@ -3933,6 +3938,21 @@ CustomAttrNominalRequest::evaluate(Evaluator &evaluator, return nullptr; } +/// Find the location of 'isolated' within this type representation. +static SourceLoc findIsolatedLoc(TypeRepr *typeRepr) { + do { + if (auto isolatedTypeRepr = dyn_cast(typeRepr)) + return isolatedTypeRepr->getLoc(); + + if (auto attrTypeRepr = dyn_cast(typeRepr)) { + typeRepr = attrTypeRepr->getTypeRepr(); + continue; + } + + return SourceLoc(); + } while (true); +} + /// Decompose the ith inheritance clause entry to a list of type declarations, /// inverses, and optional AnyObject member. void swift::getDirectlyInheritedNominalTypeDecls( @@ -3971,6 +3991,9 @@ void swift::getDirectlyInheritedNominalTypeDecls( attributes.uncheckedLoc = typeRepr->findAttrLoc(TypeAttrKind::Unchecked); attributes.preconcurrencyLoc = typeRepr->findAttrLoc(TypeAttrKind::Preconcurrency); attributes.unsafeLoc = typeRepr->findAttrLoc(TypeAttrKind::Unsafe); + + // Look for an IsolatedTypeRepr. + attributes.isolatedLoc = findIsolatedLoc(typeRepr); } // Form the result. diff --git a/lib/ASTGen/Sources/ASTGen/SourceFile.swift b/lib/ASTGen/Sources/ASTGen/SourceFile.swift index e9f9a92e45ce2..e74fc16ef7b2e 100644 --- a/lib/ASTGen/Sources/ASTGen/SourceFile.swift +++ b/lib/ASTGen/Sources/ASTGen/SourceFile.swift @@ -77,7 +77,6 @@ extension Parser.ExperimentalFeatures { mapFeature(.CoroutineAccessors, to: .coroutineAccessors) mapFeature(.ValueGenerics, to: .valueGenerics) mapFeature(.ABIAttribute, to: .abiAttribute) - mapFeature(.WarnUnsafe, to: .unsafeExpression) } } diff --git a/lib/Basic/LangOptions.cpp b/lib/Basic/LangOptions.cpp index af79d027df997..2fbc993e0d855 100644 --- a/lib/Basic/LangOptions.cpp +++ b/lib/Basic/LangOptions.cpp @@ -38,6 +38,7 @@ LangOptions::LangOptions() { Features.insert(Feature::FeatureName); #define UPCOMING_FEATURE(FeatureName, SENumber, Version) #define EXPERIMENTAL_FEATURE(FeatureName, AvailableInProd) +#define OPTIONAL_LANGUAGE_FEATURE(FeatureName, SENumber, Description) #include "swift/Basic/Features.def" // Special case: remove macro support if the compiler wasn't built with a @@ -636,6 +637,8 @@ bool swift::isFeatureAvailableInProduction(Feature feature) { return true; #define EXPERIMENTAL_FEATURE(FeatureName, AvailableInProd) \ case Feature::FeatureName: return AvailableInProd; +#define OPTIONAL_LANGUAGE_FEATURE(FeatureName, SENumber, Description) \ + LANGUAGE_FEATURE(FeatureName, SENumber, Description) #include "swift/Basic/Features.def" } llvm_unreachable("covered switch"); @@ -655,6 +658,7 @@ std::optional swift::getExperimentalFeature(llvm::StringRef name) { #define LANGUAGE_FEATURE(FeatureName, SENumber, Description) #define EXPERIMENTAL_FEATURE(FeatureName, AvailableInProd) \ .Case(#FeatureName, Feature::FeatureName) +#define OPTIONAL_LANGUAGE_FEATURE(FeatureName, SENumber, Description) #include "swift/Basic/Features.def" .Default(std::nullopt); } @@ -664,6 +668,7 @@ std::optional swift::getFeatureLanguageVersion(Feature feature) { #define LANGUAGE_FEATURE(FeatureName, SENumber, Description) #define UPCOMING_FEATURE(FeatureName, SENumber, Version) \ case Feature::FeatureName: return Version; +#define OPTIONAL_LANGUAGE_FEATURE(FeatureName, SENumber, Description) #include "swift/Basic/Features.def" default: return std::nullopt; @@ -677,6 +682,8 @@ bool swift::includeInModuleInterface(Feature feature) { return true; #define EXPERIMENTAL_FEATURE_EXCLUDED_FROM_MODULE_INTERFACE(FeatureName, AvailableInProd) \ case Feature::FeatureName: return false; +#define OPTIONAL_LANGUAGE_FEATURE(FeatureName, SENumber, Description) \ + LANGUAGE_FEATURE(FeatureName, SENumber, Description) #include "swift/Basic/Features.def" } llvm_unreachable("covered switch"); diff --git a/lib/ClangImporter/ImportDecl.cpp b/lib/ClangImporter/ImportDecl.cpp index 87b1533c8506e..cce357e46d02c 100644 --- a/lib/ClangImporter/ImportDecl.cpp +++ b/lib/ClangImporter/ImportDecl.cpp @@ -2039,7 +2039,7 @@ namespace { fd->getAttrs().add(new (Impl.SwiftContext) UnsafeNonEscapableResultAttr(/*Implicit=*/true)); if (Impl.SwiftContext.LangOpts.hasFeature( - Feature::AllowUnsafeAttribute)) + Feature::StrictMemorySafety)) fd->getAttrs().add(new (Impl.SwiftContext) UnsafeAttr(/*Implicit=*/true)); } @@ -2201,7 +2201,7 @@ namespace { // We have to do this after populating ImportedDecls to avoid importing // the same multiple times. if (Impl.SwiftContext.LangOpts.hasFeature( - Feature::AllowUnsafeAttribute)) { + Feature::StrictMemorySafety)) { if (const auto *ctsd = dyn_cast(decl)) { for (auto arg : ctsd->getTemplateArgs().asArray()) { @@ -4178,13 +4178,13 @@ namespace { LifetimeDependenceInfoRequest{result}, Impl.SwiftContext.AllocateCopy(lifetimeDependencies)); } - if (ASTContext.LangOpts.hasFeature(Feature::AllowUnsafeAttribute)) { + if (ASTContext.LangOpts.hasFeature(Feature::StrictMemorySafety)) { for (auto [idx, param] : llvm::enumerate(decl->parameters())) { if (swiftParams->get(idx)->getInterfaceType()->isEscapable()) continue; if (param->hasAttr() || paramHasAnnotation[idx]) continue; - // We have a nonescapabe parameter that does not have its lifetime + // We have a nonescapable parameter that does not have its lifetime // annotated nor is it marked noescape. auto attr = new (ASTContext) UnsafeAttr(/*implicit=*/true); result->getAttrs().add(attr); @@ -8722,8 +8722,6 @@ ClangImporter::Implementation::importSwiftAttrAttributes(Decl *MappedDecl) { } if (swiftAttr->getAttribute() == "unsafe") { - if (!SwiftContext.LangOpts.hasFeature(Feature::AllowUnsafeAttribute)) - continue; seenUnsafe = true; continue; } diff --git a/lib/Driver/ToolChains.cpp b/lib/Driver/ToolChains.cpp index 2cde7c782a78e..6a7ae73498e00 100644 --- a/lib/Driver/ToolChains.cpp +++ b/lib/Driver/ToolChains.cpp @@ -282,6 +282,7 @@ void ToolChain::addCommonFrontendArgs(const OutputInfo &OI, options::OPT_disable_experimental_feature, options::OPT_enable_upcoming_feature, options::OPT_disable_upcoming_feature}); + inputArgs.AddLastArg(arguments, options::OPT_strict_memory_safety); inputArgs.AddLastArg(arguments, options::OPT_warn_implicit_overrides); inputArgs.AddLastArg(arguments, options::OPT_typo_correction_limit); inputArgs.AddLastArg(arguments, options::OPT_enable_app_extension); diff --git a/lib/Frontend/CompilerInvocation.cpp b/lib/Frontend/CompilerInvocation.cpp index 7a5850f015c0b..332ec422b5a22 100644 --- a/lib/Frontend/CompilerInvocation.cpp +++ b/lib/Frontend/CompilerInvocation.cpp @@ -878,6 +878,9 @@ static bool ParseEnabledFeatureArgs(LangOptions &Opts, ArgList &Args, Opts.enableFeature(Feature::LayoutPrespecialization); + if (Args.hasArg(OPT_strict_memory_safety)) + Opts.enableFeature(Feature::StrictMemorySafety); + return HadError; } @@ -3909,7 +3912,7 @@ bool CompilerInvocation::parseArgs( } } - if (LangOpts.hasFeature(Feature::WarnUnsafe)) { + if (LangOpts.hasFeature(Feature::StrictMemorySafety)) { if (SILOpts.RemoveRuntimeAsserts || SILOpts.AssertConfig == SILOptions::Unchecked) { Diags.diagnose(SourceLoc(), diff --git a/lib/Frontend/Frontend.cpp b/lib/Frontend/Frontend.cpp index 7811395f106d4..1e64de754512d 100644 --- a/lib/Frontend/Frontend.cpp +++ b/lib/Frontend/Frontend.cpp @@ -1455,7 +1455,7 @@ ModuleDecl *CompilerInstance::getMainModule() const { MainModule->setAllowNonResilientAccess(); if (Invocation.getSILOptions().EnableSerializePackage) MainModule->setSerializePackageEnabled(); - if (Invocation.getLangOptions().hasFeature(Feature::WarnUnsafe)) + if (Invocation.getLangOptions().hasFeature(Feature::StrictMemorySafety)) MainModule->setStrictMemorySafety(true); if (Invocation.getLangOptions().hasFeature(Feature::ExtensibleEnums)) MainModule->setSupportsExtensibleEnums(true); diff --git a/lib/Parse/ParseExpr.cpp b/lib/Parse/ParseExpr.cpp index 9fb1e94412c0d..acf6dadf587e8 100644 --- a/lib/Parse/ParseExpr.cpp +++ b/lib/Parse/ParseExpr.cpp @@ -436,8 +436,9 @@ ParserResult Parser::parseExprSequenceElement(Diag<> message, return sub; } - if (Context.LangOpts.hasFeature(Feature::WarnUnsafe) && - Tok.isContextualKeyword("unsafe")) { + if (Tok.isContextualKeyword("unsafe") && + !peekToken().isAtStartOfLine() && + !peekToken().is(tok::r_paren)) { Tok.setKind(tok::contextual_keyword); SourceLoc unsafeLoc = consumeToken(); ParserResult sub = diff --git a/lib/Parse/ParseStmt.cpp b/lib/Parse/ParseStmt.cpp index 4d564324b3c5d..f7992bbd6abda 100644 --- a/lib/Parse/ParseStmt.cpp +++ b/lib/Parse/ParseStmt.cpp @@ -2379,8 +2379,8 @@ ParserResult Parser::parseStmtForEach(LabeledStmtInfo LabelInfo) { } } - if (Context.LangOpts.hasFeature(Feature::WarnUnsafe) && - Tok.isContextualKeyword("unsafe")) { + if (Tok.isContextualKeyword("unsafe") && + !peekToken().isAny(tok::colon, tok::kw_in)) { UnsafeLoc = consumeToken(); } diff --git a/lib/Parse/ParseType.cpp b/lib/Parse/ParseType.cpp index 31fd093e3b5a3..212e8cb432c36 100644 --- a/lib/Parse/ParseType.cpp +++ b/lib/Parse/ParseType.cpp @@ -163,7 +163,9 @@ ParserResult Parser::parseTypeSimple( Diag<> MessageID, ParseTypeReason reason) { ParserResult ty; - if (isParameterSpecifier()) { + if (isParameterSpecifier() && + !(!Context.LangOpts.hasFeature(Feature::IsolatedConformances) && + Tok.isContextualKeyword("isolated"))) { // Type specifier should already be parsed before here. This only happens // for construct like 'P1 & inout P2'. diagnose(Tok.getLoc(), diag::attr_only_on_parameters, Tok.getRawText()); diff --git a/lib/Sema/TypeCheckAttr.cpp b/lib/Sema/TypeCheckAttr.cpp index 4218b6880a310..b197f6af0ba80 100644 --- a/lib/Sema/TypeCheckAttr.cpp +++ b/lib/Sema/TypeCheckAttr.cpp @@ -189,6 +189,8 @@ class AttributeChecker : public AttributeVisitor { IGNORED_ATTR(LexicalLifetimes) IGNORED_ATTR(AllowFeatureSuppression) IGNORED_ATTR(PreInverseGenerics) + IGNORED_ATTR(Safe) + IGNORED_ATTR(Unsafe) #undef IGNORED_ATTR private: @@ -564,8 +566,6 @@ class AttributeChecker : public AttributeVisitor { void visitStaticExclusiveOnlyAttr(StaticExclusiveOnlyAttr *attr); void visitWeakLinkedAttr(WeakLinkedAttr *attr); void visitSILGenNameAttr(SILGenNameAttr *attr); - void visitUnsafeAttr(UnsafeAttr *attr); - void visitSafeAttr(SafeAttr *attr); void visitLifetimeAttr(LifetimeAttr *attr); void visitAddressableSelfAttr(AddressableSelfAttr *attr); void visitAddressableForDependenciesAttr(AddressableForDependenciesAttr *attr); @@ -8125,20 +8125,6 @@ void AttributeChecker::visitWeakLinkedAttr(WeakLinkedAttr *attr) { attr->getAttrName(), Ctx.LangOpts.Target.str()); } -void AttributeChecker::visitUnsafeAttr(UnsafeAttr *attr) { - if (Ctx.LangOpts.hasFeature(Feature::AllowUnsafeAttribute)) - return; - - diagnoseAndRemoveAttr(attr, diag::unsafe_attr_disabled); -} - -void AttributeChecker::visitSafeAttr(SafeAttr *attr) { - if (Ctx.LangOpts.hasFeature(Feature::AllowUnsafeAttribute)) - return; - - diagnoseAndRemoveAttr(attr, diag::unsafe_attr_disabled); -} - void AttributeChecker::visitLifetimeAttr(LifetimeAttr *attr) {} void AttributeChecker::visitAddressableSelfAttr(AddressableSelfAttr *attr) { diff --git a/lib/Sema/TypeCheckConcurrency.cpp b/lib/Sema/TypeCheckConcurrency.cpp index b989ce887f1ce..88a16a3a42149 100644 --- a/lib/Sema/TypeCheckConcurrency.cpp +++ b/lib/Sema/TypeCheckConcurrency.cpp @@ -30,6 +30,7 @@ #include "swift/AST/ImportCache.h" #include "swift/AST/Initializer.h" #include "swift/AST/NameLookupRequests.h" +#include "swift/AST/PackConformance.h" #include "swift/AST/ParameterList.h" #include "swift/AST/ProtocolConformance.h" #include "swift/AST/TypeCheckRequests.h" @@ -7669,3 +7670,56 @@ bool swift::diagnoseNonSendableFromDeinit( diag::non_sendable_from_deinit, var->getDescriptiveKind(), var->getName()); } + +bool swift::forEachIsolatedConformance( + ProtocolConformanceRef conformance, + llvm::function_ref body +) { + if (conformance.isInvalid() || conformance.isAbstract()) + return false; + + if (conformance.isPack()) { + auto pack = conformance.getPack()->getPatternConformances(); + for (auto conformance : pack) { + if (forEachIsolatedConformance(conformance, body)) + return true; + } + + return false; + } + + // Is this an isolated conformance? + auto concrete = conformance.getConcrete(); + if (auto normal = + dyn_cast(concrete->getRootConformance())) { + if (normal->isIsolated()) { + if (body(concrete)) + return true; + } + } + + // Check conformances that are part of this conformance. + auto subMap = concrete->getSubstitutionMap(); + for (auto conformance : subMap.getConformances()) { + if (forEachIsolatedConformance(conformance, body)) + return true; + } + + return false; +} + +ActorIsolation swift::getConformanceIsolation(ProtocolConformance *conformance) { + auto rootNormal = + dyn_cast(conformance->getRootConformance()); + if (!rootNormal) + return ActorIsolation::forNonisolated(false); + + if (!rootNormal->isIsolated()) + return ActorIsolation::forNonisolated(false); + + auto nominal = rootNormal->getDeclContext()->getSelfNominalTypeDecl(); + if (!nominal) + return ActorIsolation::forNonisolated(false); + + return getActorIsolation(nominal); +} diff --git a/lib/Sema/TypeCheckConcurrency.h b/lib/Sema/TypeCheckConcurrency.h index b61c085b16172..d63eb140b4a28 100644 --- a/lib/Sema/TypeCheckConcurrency.h +++ b/lib/Sema/TypeCheckConcurrency.h @@ -699,6 +699,19 @@ void introduceUnsafeInheritExecutorReplacements( void introduceUnsafeInheritExecutorReplacements( const DeclContext *dc, Type base, SourceLoc loc, LookupResult &result); +/// Enumerate all of the isolated conformances in the given conformance. +/// +/// The given `body` will be called on each isolated conformance. If it ever +/// returns `true`, this function will abort the search and return `true`. +bool forEachIsolatedConformance( + ProtocolConformanceRef conformance, + llvm::function_ref body +); + +/// Determine the isolation of the given conformance. This only applies to +/// the immediate conformance, not any conformances on which it depends. +ActorIsolation getConformanceIsolation(ProtocolConformance *conformance); + } // end namespace swift namespace llvm { diff --git a/lib/Sema/TypeCheckDeclOverride.cpp b/lib/Sema/TypeCheckDeclOverride.cpp index 7b89e8cb4f8ba..36b402d1becbe 100644 --- a/lib/Sema/TypeCheckDeclOverride.cpp +++ b/lib/Sema/TypeCheckDeclOverride.cpp @@ -2259,7 +2259,7 @@ static bool checkSingleOverride(ValueDecl *override, ValueDecl *base) { diagnoseOverrideForAvailability(override, base); } - if (ctx.LangOpts.hasFeature(Feature::WarnUnsafe)) { + if (ctx.LangOpts.hasFeature(Feature::StrictMemorySafety)) { // If the override is unsafe but the base declaration is not, then the // inheritance itself is unsafe. auto subs = SubstitutionMap::getOverrideSubstitutions(base, override); diff --git a/lib/Sema/TypeCheckDeclPrimary.cpp b/lib/Sema/TypeCheckDeclPrimary.cpp index a3fc274b3311e..55134261c09d2 100644 --- a/lib/Sema/TypeCheckDeclPrimary.cpp +++ b/lib/Sema/TypeCheckDeclPrimary.cpp @@ -2395,7 +2395,7 @@ class DeclChecker : public DeclVisitor { // If strict memory safety checking is enabled, check the storage // of the nominal type. - if (Ctx.LangOpts.hasFeature(Feature::WarnUnsafe) && + if (Ctx.LangOpts.hasFeature(Feature::StrictMemorySafety) && !isa(nominal)) { checkUnsafeStorage(nominal); } @@ -2468,7 +2468,7 @@ class DeclChecker : public DeclVisitor { // concurrency checking enabled. if (ID->preconcurrency() && Ctx.LangOpts.StrictConcurrencyLevel == StrictConcurrency::Complete && - Ctx.LangOpts.hasFeature(Feature::WarnUnsafe)) { + Ctx.LangOpts.hasFeature(Feature::StrictMemorySafety)) { diagnoseUnsafeUse(UnsafeUse::forPreconcurrencyImport(ID)); } } diff --git a/lib/Sema/TypeCheckEffects.cpp b/lib/Sema/TypeCheckEffects.cpp index e638d5e5b343c..23fe8a7f42988 100644 --- a/lib/Sema/TypeCheckEffects.cpp +++ b/lib/Sema/TypeCheckEffects.cpp @@ -1101,7 +1101,7 @@ class Classification { bool considerAsync = !onlyEffect || *onlyEffect == EffectKind::Async; bool considerThrows = !onlyEffect || *onlyEffect == EffectKind::Throws; bool considerUnsafe = (!onlyEffect || *onlyEffect == EffectKind::Unsafe) && - ctx.LangOpts.hasFeature(Feature::WarnUnsafe); + ctx.LangOpts.hasFeature(Feature::StrictMemorySafety); // If we're tracking "unsafe" effects, compute them here. if (considerUnsafe) { diff --git a/lib/Sema/TypeCheckProtocol.cpp b/lib/Sema/TypeCheckProtocol.cpp index b7b58ee0d2953..435f8c8f1f7c0 100644 --- a/lib/Sema/TypeCheckProtocol.cpp +++ b/lib/Sema/TypeCheckProtocol.cpp @@ -2541,6 +2541,22 @@ checkIndividualConformance(NormalProtocolConformance *conformance) { ComplainLoc, diag::unchecked_conformance_not_special, ProtoType); } + // Complain if the conformance is isolated but the conforming type is + // not global-actor-isolated. + if (conformance->isIsolated()) { + auto enclosingNominal = DC->getSelfNominalTypeDecl(); + if (!enclosingNominal || + !getActorIsolation(enclosingNominal).isGlobalActor()) { + Context.Diags.diagnose( + ComplainLoc, diag::isolated_conformance_not_global_actor_isolated); + } + + if (!Context.LangOpts.hasFeature(Feature::IsolatedConformances)) { + Context.Diags.diagnose( + ComplainLoc, diag::isolated_conformance_experimental_feature); + } + } + bool allowImpliedConditionalConformance = false; if (Proto->isSpecificProtocol(KnownProtocolKind::Sendable)) { // In -swift-version 5 mode, a conditional conformance to a protocol can imply @@ -2625,7 +2641,7 @@ checkIndividualConformance(NormalProtocolConformance *conformance) { // If we're enforcing strict memory safety and this conformance hasn't // opted out, look for safe/unsafe witness mismatches. if (conformance->getExplicitSafety() == ExplicitSafety::Unspecified && - Context.LangOpts.hasFeature(Feature::WarnUnsafe)) { + Context.LangOpts.hasFeature(Feature::StrictMemorySafety)) { // Collect all of the unsafe uses for this conformance. SmallVector unsafeUses; for (auto requirement: Proto->getMembers()) { @@ -3313,6 +3329,14 @@ static bool hasExplicitGlobalActorAttr(ValueDecl *decl) { return !globalActorAttr->first->isImplicit(); } +/// Determine whether the given actor isolation matches that of the enclosing +/// type. +static bool isolationMatchesEnclosingType( + ActorIsolation isolation, NominalTypeDecl *nominal) { + auto nominalIsolation = getActorIsolation(nominal); + return isolation == nominalIsolation; +} + std::optional ConformanceChecker::checkActorIsolation(ValueDecl *requirement, ValueDecl *witness, @@ -3342,7 +3366,8 @@ ConformanceChecker::checkActorIsolation(ValueDecl *requirement, Conformance->isPreconcurrency() && !(requirementIsolation.isActorIsolated() || requirement->getAttrs().hasAttribute()); - + bool isIsolatedConformance = false; + switch (refResult) { case ActorReferenceResult::SameConcurrencyDomain: // If the witness has distributed-actor isolation, we have extra @@ -3374,6 +3399,17 @@ ConformanceChecker::checkActorIsolation(ValueDecl *requirement, return std::nullopt; case ActorReferenceResult::EntersActor: + // If the conformance itself is isolated, and the witness isolation + // matches the enclosing type's isolation, treat this as being in the + // same concurrency domain. + if (Conformance->isIsolated() && + refResult.isolation.isGlobalActor() && + isolationMatchesEnclosingType( + refResult.isolation, DC->getSelfNominalTypeDecl())) { + sameConcurrencyDomain = true; + isIsolatedConformance = true; + } + // Handled below. break; } @@ -3446,7 +3482,12 @@ ConformanceChecker::checkActorIsolation(ValueDecl *requirement, // If we aren't missing anything or this is a witness to a `@preconcurrency` // conformance, do a Sendable check and move on. - if (!missingOptions || isPreconcurrency) { + if (!missingOptions || isPreconcurrency || isIsolatedConformance) { + // An isolated conformance won't ever leave the isolation domain in which + // it was created, so there is nothing to check. + if (isIsolatedConformance) + return std::nullopt; + // FIXME: Disable Sendable checking when the witness is an initializer // that is explicitly marked nonisolated. if (isa(witness) && @@ -3546,18 +3587,26 @@ ConformanceChecker::checkActorIsolation(ValueDecl *requirement, witness->diagnose(diag::note_add_nonisolated_to_decl, witness) .fixItInsert(witness->getAttributeInsertionLoc(true), "nonisolated "); } - + // Another way to address the issue is to mark the conformance as - // "preconcurrency". + // "isolated" or "@preconcurrency". if (Conformance->getSourceKind() == ConformanceEntryKind::Explicit && - !Conformance->isPreconcurrency() && - !suggestedPreconcurrency && + !Conformance->isIsolated() && !Conformance->isPreconcurrency() && + !suggestedPreconcurrencyOrIsolated && !requirementIsolation.isActorIsolated()) { + if (Context.LangOpts.hasFeature(Feature::IsolatedConformances)) { + Context.Diags.diagnose(Conformance->getProtocolNameLoc(), + diag::add_isolated_to_conformance, + Proto->getName(), refResult.isolation) + .fixItInsert(Conformance->getProtocolNameLoc(), "isolated "); + } + Context.Diags.diagnose(Conformance->getProtocolNameLoc(), diag::add_preconcurrency_to_conformance, Proto->getName()) .fixItInsert(Conformance->getProtocolNameLoc(), "@preconcurrency "); - suggestedPreconcurrency = true; + + suggestedPreconcurrencyOrIsolated = true; } } @@ -5158,6 +5207,8 @@ static void ensureRequirementsAreSatisfied(ASTContext &ctx, if (where.isImplicit()) return; + bool diagnosedIsolatedConformanceIssue = false; + conformance->forEachAssociatedConformance( [&](Type depTy, ProtocolDecl *proto, unsigned index) { auto assocConf = conformance->getAssociatedConformance(depTy, proto); @@ -5181,6 +5232,50 @@ static void ensureRequirementsAreSatisfied(ASTContext &ctx, where.withRefinedAvailability(availability), depTy, replacementTy); } + if (!diagnosedIsolatedConformanceIssue) { + bool foundIssue = forEachIsolatedConformance( + ProtocolConformanceRef(assocConf), + [&](ProtocolConformance *isolatedConformance) { + // If the conformance we're checking isn't isolated at all, it + // needs "isolated". + if (!conformance->isIsolated()) { + ctx.Diags.diagnose( + conformance->getLoc(), + diag::nonisolated_conformance_depends_on_isolated_conformance, + typeInContext, conformance->getProtocol()->getName(), + getConformanceIsolation(isolatedConformance), + isolatedConformance->getType(), + isolatedConformance->getProtocol()->getName() + ).fixItInsert(conformance->getProtocolNameLoc(), "isolated "); + + return true; + } + + // The conformance is isolated, but we need it to have the same + // isolation as the other isolated conformance we found. + auto outerIsolation = getConformanceIsolation(conformance); + auto innerIsolation = getConformanceIsolation(isolatedConformance); + if (outerIsolation != innerIsolation) { + ctx.Diags.diagnose( + conformance->getLoc(), + diag::isolated_conformance_mismatch_with_associated_isolation, + outerIsolation, + typeInContext, conformance->getProtocol()->getName(), + innerIsolation, + isolatedConformance->getType(), + isolatedConformance->getProtocol()->getName() + ); + + return true; + } + + return false; + } + ); + + diagnosedIsolatedConformanceIssue = foundIssue; + } + return false; }); } diff --git a/lib/Sema/TypeCheckProtocol.h b/lib/Sema/TypeCheckProtocol.h index 07786ce68c42a..0e9360b2d3cdf 100644 --- a/lib/Sema/TypeCheckProtocol.h +++ b/lib/Sema/TypeCheckProtocol.h @@ -112,8 +112,8 @@ enum class ResolveWitnessResult { /// This helper class handles most of the details of checking whether a /// given type (\c Adoptee) conforms to a protocol (\c Proto). class ConformanceChecker : public WitnessChecker { - /// Whether we already suggested adding `@preconcurrency`. - bool suggestedPreconcurrency = false; + /// Whether we already suggested adding `@preconcurrency` or 'isolated'. + bool suggestedPreconcurrencyOrIsolated = false; public: NormalProtocolConformance *Conformance; diff --git a/lib/Sema/TypeCheckType.cpp b/lib/Sema/TypeCheckType.cpp index aa39887ce2279..e7041fd822316 100644 --- a/lib/Sema/TypeCheckType.cpp +++ b/lib/Sema/TypeCheckType.cpp @@ -5175,8 +5175,9 @@ NeverNullType TypeResolver::resolveIsolatedTypeRepr(IsolatedTypeRepr *repr, TypeResolutionOptions options) { // isolated is only value for non-EnumCaseDecl parameters. - if (!options.is(TypeResolverContext::FunctionInput) || - options.hasBase(TypeResolverContext::EnumElementDecl)) { + if ((!options.is(TypeResolverContext::FunctionInput) || + options.hasBase(TypeResolverContext::EnumElementDecl)) && + !options.is(TypeResolverContext::Inherited)) { diagnoseInvalid( repr, repr->getSpecifierLoc(), diag::attr_only_on_parameters, "isolated"); @@ -5197,7 +5198,8 @@ TypeResolver::resolveIsolatedTypeRepr(IsolatedTypeRepr *repr, unwrappedType = dynamicSelfType->getSelfType(); } - if (inStage(TypeResolutionStage::Interface)) { + if (inStage(TypeResolutionStage::Interface) && + !options.is(TypeResolverContext::Inherited)) { if (auto *env = resolution.getGenericSignature().getGenericEnvironment()) unwrappedType = env->mapTypeIntoContext(unwrappedType); diff --git a/lib/Sema/TypeCheckUnsafe.cpp b/lib/Sema/TypeCheckUnsafe.cpp index 491458513e3e2..4389638a44c6d 100644 --- a/lib/Sema/TypeCheckUnsafe.cpp +++ b/lib/Sema/TypeCheckUnsafe.cpp @@ -300,7 +300,7 @@ bool swift::enumerateUnsafeUses(ArrayRef conformances, continue; ASTContext &ctx = conformance.getRequirement()->getASTContext(); - if (!ctx.LangOpts.hasFeature(Feature::WarnUnsafe)) + if (!ctx.LangOpts.hasFeature(Feature::StrictMemorySafety)) return false; if (!conformance.hasEffect(EffectKind::Unsafe)) @@ -365,7 +365,7 @@ bool swift::isUnsafeInConformance(const ValueDecl *requirement, void swift::diagnoseUnsafeType(ASTContext &ctx, SourceLoc loc, Type type, llvm::function_ref diagnose) { - if (!ctx.LangOpts.hasFeature(Feature::WarnUnsafe)) + if (!ctx.LangOpts.hasFeature(Feature::StrictMemorySafety)) return; if (!type->isUnsafe() && !type->getCanonicalType()->isUnsafe()) diff --git a/lib/Serialization/Serialization.cpp b/lib/Serialization/Serialization.cpp index 1b6b0a093436c..6cb12f9dfcf80 100644 --- a/lib/Serialization/Serialization.cpp +++ b/lib/Serialization/Serialization.cpp @@ -862,6 +862,7 @@ void Serializer::writeBlockInfoBlock() { BLOCK_RECORD(options_block, HAS_CXX_INTEROPERABILITY_ENABLED); BLOCK_RECORD(options_block, ALLOW_NON_RESILIENT_ACCESS); BLOCK_RECORD(options_block, SERIALIZE_PACKAGE_ENABLED); + BLOCK_RECORD(options_block, STRICT_MEMORY_SAFETY); BLOCK_RECORD(options_block, CXX_STDLIB_KIND); BLOCK_RECORD(options_block, PUBLIC_MODULE_NAME); BLOCK_RECORD(options_block, SWIFT_INTERFACE_COMPILER_VERSION); diff --git a/stdlib/cmake/modules/SwiftSource.cmake b/stdlib/cmake/modules/SwiftSource.cmake index ed57a7eadedfc..49db1a9f34b7b 100644 --- a/stdlib/cmake/modules/SwiftSource.cmake +++ b/stdlib/cmake/modules/SwiftSource.cmake @@ -628,7 +628,7 @@ function(_compile_swift_files list(APPEND swift_flags "-enable-experimental-feature" "NoncopyableGenerics2") list(APPEND swift_flags "-enable-experimental-feature" "SuppressedAssociatedTypes") list(APPEND swift_flags "-enable-experimental-feature" "SE427NoInferenceOnExtension") - list(APPEND swift_flags "-enable-experimental-feature" "AllowUnsafeAttribute") + list(APPEND swift_flags "-enable-experimental-feature" "NonescapableTypes") list(APPEND swift_flags "-enable-experimental-feature" "LifetimeDependence") diff --git a/stdlib/public/Concurrency/AsyncStreamBuffer.swift b/stdlib/public/Concurrency/AsyncStreamBuffer.swift index a7b746d6eab0c..62d515ccf52fb 100644 --- a/stdlib/public/Concurrency/AsyncStreamBuffer.swift +++ b/stdlib/public/Concurrency/AsyncStreamBuffer.swift @@ -54,18 +54,19 @@ func _unlock(_ ptr: UnsafeRawPointer) @available(SwiftStdlib 5.1, *) extension AsyncStream { + @safe internal final class _Storage: @unchecked Sendable { typealias TerminationHandler = @Sendable (Continuation.Termination) -> Void - struct State { - var continuations = [UnsafeContinuation]() + @unsafe struct State { + var continuations = unsafe [UnsafeContinuation]() var pending = _Deque() let limit: Continuation.BufferingPolicy var onTermination: TerminationHandler? var terminal: Bool = false init(limit: Continuation.BufferingPolicy) { - self.limit = limit + unsafe self.limit = limit } } // Stored as a singular structured assignment for initialization @@ -76,32 +77,32 @@ extension AsyncStream { } deinit { - state.onTermination?(.cancelled) + unsafe state.onTermination?(.cancelled) } private func lock() { let ptr = - UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) - _lock(ptr) + unsafe UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) + unsafe _lock(ptr) } private func unlock() { let ptr = - UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) - _unlock(ptr) + unsafe UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) + unsafe _unlock(ptr) } func getOnTermination() -> TerminationHandler? { lock() - let handler = state.onTermination + let handler = unsafe state.onTermination unlock() return handler } func setOnTermination(_ newValue: TerminationHandler?) { lock() - withExtendedLifetime(state.onTermination) { - state.onTermination = newValue + unsafe withExtendedLifetime(state.onTermination) { + unsafe state.onTermination = newValue unlock() } } @@ -109,8 +110,8 @@ extension AsyncStream { @Sendable func cancel() { lock() // swap out the handler before we invoke it to prevent double cancel - let handler = state.onTermination - state.onTermination = nil + let handler = unsafe state.onTermination + unsafe state.onTermination = nil unlock() // handler must be invoked before yielding nil for termination @@ -122,31 +123,31 @@ extension AsyncStream { func yield(_ value: __owned Element) -> Continuation.YieldResult { var result: Continuation.YieldResult lock() - let limit = state.limit - let count = state.pending.count + let limit = unsafe state.limit + let count = unsafe state.pending.count - if !state.continuations.isEmpty { - let continuation = state.continuations.removeFirst() + if unsafe !state.continuations.isEmpty { + let continuation = unsafe state.continuations.removeFirst() if count > 0 { - if !state.terminal { + if unsafe !state.terminal { switch limit { case .unbounded: - state.pending.append(value) + unsafe state.pending.append(value) result = .enqueued(remaining: .max) case .bufferingOldest(let limit): if count < limit { - state.pending.append(value) + unsafe state.pending.append(value) result = .enqueued(remaining: limit - (count + 1)) } else { result = .dropped(value) } case .bufferingNewest(let limit): if count < limit { - state.pending.append(value) + unsafe state.pending.append(value) result = .enqueued(remaining: limit - (count + 1)) } else if count > 0 { - result = .dropped(state.pending.removeFirst()) - state.pending.append(value) + result = unsafe .dropped(state.pending.removeFirst()) + unsafe state.pending.append(value) } else { result = .dropped(value) } @@ -154,13 +155,13 @@ extension AsyncStream { } else { result = .terminated } - let toSend = state.pending.removeFirst() + let toSend = unsafe state.pending.removeFirst() unlock() - continuation.resume(returning: toSend) - } else if state.terminal { + unsafe continuation.resume(returning: toSend) + } else if unsafe state.terminal { result = .terminated unlock() - continuation.resume(returning: nil) + unsafe continuation.resume(returning: nil) } else { switch limit { case .unbounded: @@ -172,28 +173,28 @@ extension AsyncStream { } unlock() - continuation.resume(returning: value) + unsafe continuation.resume(returning: value) } } else { - if !state.terminal { + if unsafe !state.terminal { switch limit { case .unbounded: result = .enqueued(remaining: .max) - state.pending.append(value) + unsafe state.pending.append(value) case .bufferingOldest(let limit): if count < limit { result = .enqueued(remaining: limit - (count + 1)) - state.pending.append(value) + unsafe state.pending.append(value) } else { result = .dropped(value) } case .bufferingNewest(let limit): if count < limit { - state.pending.append(value) + unsafe state.pending.append(value) result = .enqueued(remaining: limit - (count + 1)) } else if count > 0 { - result = .dropped(state.pending.removeFirst()) - state.pending.append(value) + result = unsafe .dropped(state.pending.removeFirst()) + unsafe state.pending.append(value) } else { result = .dropped(value) } @@ -208,40 +209,40 @@ extension AsyncStream { func finish() { lock() - let handler = state.onTermination - state.onTermination = nil - state.terminal = true + let handler = unsafe state.onTermination + unsafe state.onTermination = nil + unsafe state.terminal = true - guard !state.continuations.isEmpty else { + guard unsafe !state.continuations.isEmpty else { unlock() handler?(.finished) return } // Hold on to the continuations to resume outside the lock. - let continuations = state.continuations - state.continuations.removeAll() + let continuations = unsafe state.continuations + unsafe state.continuations.removeAll() unlock() handler?(.finished) - for continuation in continuations { - continuation.resume(returning: nil) + for unsafe continuation in unsafe continuations { + unsafe continuation.resume(returning: nil) } } func next(_ continuation: UnsafeContinuation) { lock() - state.continuations.append(continuation) - if state.pending.count > 0 { - let cont = state.continuations.removeFirst() - let toSend = state.pending.removeFirst() + unsafe state.continuations.append(continuation) + if unsafe state.pending.count > 0 { + let cont = unsafe state.continuations.removeFirst() + let toSend = unsafe state.pending.removeFirst() unlock() - cont.resume(returning: toSend) - } else if state.terminal { - let cont = state.continuations.removeFirst() + unsafe cont.resume(returning: toSend) + } else if unsafe state.terminal { + let cont = unsafe state.continuations.removeFirst() unlock() - cont.resume(returning: nil) + unsafe cont.resume(returning: nil) } else { unlock() } @@ -250,7 +251,7 @@ extension AsyncStream { func next() async -> Element? { await withTaskCancellationHandler { - await withUnsafeContinuation { + unsafe await withUnsafeContinuation { next($0) } } onCancel: { [cancel] in @@ -260,18 +261,18 @@ extension AsyncStream { static func create(limit: Continuation.BufferingPolicy) -> _Storage { let minimumCapacity = _lockWordCount() - let storage = Builtin.allocWithTailElems_1( + let storage = unsafe Builtin.allocWithTailElems_1( _Storage.self, minimumCapacity._builtinWordValue, UnsafeRawPointer.self ) let state = - UnsafeMutablePointer(Builtin.addressof(&storage.state)) - state.initialize(to: State(limit: limit)) - let ptr = UnsafeRawPointer( + unsafe UnsafeMutablePointer(Builtin.addressof(&storage.state)) + unsafe state.initialize(to: State(limit: limit)) + let ptr = unsafe UnsafeRawPointer( Builtin.projectTailElems(storage, UnsafeRawPointer.self)) - _lockInit(ptr) + unsafe _lockInit(ptr) return storage } } @@ -279,6 +280,7 @@ extension AsyncStream { @available(SwiftStdlib 5.1, *) extension AsyncThrowingStream { + @safe internal final class _Storage: @unchecked Sendable { typealias TerminationHandler = @Sendable (Continuation.Termination) -> Void enum Terminal { @@ -286,7 +288,7 @@ extension AsyncThrowingStream { case failed(Failure) } - struct State { + @unsafe struct State { var continuation: UnsafeContinuation? var pending = _Deque() let limit: Continuation.BufferingPolicy @@ -294,7 +296,7 @@ extension AsyncThrowingStream { var terminal: Terminal? init(limit: Continuation.BufferingPolicy) { - self.limit = limit + unsafe self.limit = limit } } // Stored as a singular structured assignment for initialization @@ -305,32 +307,32 @@ extension AsyncThrowingStream { } deinit { - state.onTermination?(.cancelled) + unsafe state.onTermination?(.cancelled) } private func lock() { let ptr = - UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) - _lock(ptr) + unsafe UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) + unsafe _lock(ptr) } private func unlock() { let ptr = - UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) - _unlock(ptr) + unsafe UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) + unsafe _unlock(ptr) } func getOnTermination() -> TerminationHandler? { lock() - let handler = state.onTermination + let handler = unsafe state.onTermination unlock() return handler } func setOnTermination(_ newValue: TerminationHandler?) { lock() - withExtendedLifetime(state.onTermination) { - state.onTermination = newValue + unsafe withExtendedLifetime(state.onTermination) { + unsafe state.onTermination = newValue unlock() } } @@ -338,8 +340,8 @@ extension AsyncThrowingStream { @Sendable func cancel() { lock() // swap out the handler before we invoke it to prevent double cancel - let handler = state.onTermination - state.onTermination = nil + let handler = unsafe state.onTermination + unsafe state.onTermination = nil unlock() // handler must be invoked before yielding nil for termination @@ -351,29 +353,29 @@ extension AsyncThrowingStream { func yield(_ value: __owned Element) -> Continuation.YieldResult { var result: Continuation.YieldResult lock() - let limit = state.limit - let count = state.pending.count - if let continuation = state.continuation { + let limit = unsafe state.limit + let count = unsafe state.pending.count + if let continuation = unsafe state.continuation { if count > 0 { - if state.terminal == nil { + if unsafe state.terminal == nil { switch limit { case .unbounded: result = .enqueued(remaining: .max) - state.pending.append(value) + unsafe state.pending.append(value) case .bufferingOldest(let limit): if count < limit { result = .enqueued(remaining: limit - (count + 1)) - state.pending.append(value) + unsafe state.pending.append(value) } else { result = .dropped(value) } case .bufferingNewest(let limit): if count < limit { - state.pending.append(value) + unsafe state.pending.append(value) result = .enqueued(remaining: limit - (count + 1)) } else if count > 0 { - result = .dropped(state.pending.removeFirst()) - state.pending.append(value) + result = unsafe .dropped(state.pending.removeFirst()) + unsafe state.pending.append(value) } else { result = .dropped(value) } @@ -381,20 +383,20 @@ extension AsyncThrowingStream { } else { result = .terminated } - state.continuation = nil - let toSend = state.pending.removeFirst() + unsafe state.continuation = nil + let toSend = unsafe state.pending.removeFirst() unlock() - continuation.resume(returning: toSend) - } else if let terminal = state.terminal { + unsafe continuation.resume(returning: toSend) + } else if let terminal = unsafe state.terminal { result = .terminated - state.continuation = nil - state.terminal = .finished + unsafe state.continuation = nil + unsafe state.terminal = .finished unlock() switch terminal { case .finished: - continuation.resume(returning: nil) + unsafe continuation.resume(returning: nil) case .failed(let error): - continuation.resume(throwing: error) + unsafe continuation.resume(throwing: error) } } else { switch limit { @@ -406,30 +408,30 @@ extension AsyncThrowingStream { result = .enqueued(remaining: limit) } - state.continuation = nil + unsafe state.continuation = nil unlock() - continuation.resume(returning: value) + unsafe continuation.resume(returning: value) } } else { - if state.terminal == nil { + if unsafe state.terminal == nil { switch limit { case .unbounded: result = .enqueued(remaining: .max) - state.pending.append(value) + unsafe state.pending.append(value) case .bufferingOldest(let limit): if count < limit { result = .enqueued(remaining: limit - (count + 1)) - state.pending.append(value) + unsafe state.pending.append(value) } else { result = .dropped(value) } case .bufferingNewest(let limit): if count < limit { - state.pending.append(value) + unsafe state.pending.append(value) result = .enqueued(remaining: limit - (count + 1)) } else if count > 0 { - result = .dropped(state.pending.removeFirst()) - state.pending.append(value) + result = unsafe .dropped(state.pending.removeFirst()) + unsafe state.pending.append(value) } else { result = .dropped(value) } @@ -444,32 +446,32 @@ extension AsyncThrowingStream { func finish(throwing error: __owned Failure? = nil) { lock() - let handler = state.onTermination - state.onTermination = nil - if state.terminal == nil { + let handler = unsafe state.onTermination + unsafe state.onTermination = nil + if unsafe state.terminal == nil { if let failure = error { - state.terminal = .failed(failure) + unsafe state.terminal = .failed(failure) } else { - state.terminal = .finished + unsafe state.terminal = .finished } } - if let continuation = state.continuation { - if state.pending.count > 0 { - state.continuation = nil - let toSend = state.pending.removeFirst() + if let continuation = unsafe state.continuation { + if unsafe state.pending.count > 0 { + unsafe state.continuation = nil + let toSend = unsafe state.pending.removeFirst() unlock() handler?(.finished(error)) - continuation.resume(returning: toSend) - } else if let terminal = state.terminal { - state.continuation = nil + unsafe continuation.resume(returning: toSend) + } else if let terminal = unsafe state.terminal { + unsafe state.continuation = nil unlock() handler?(.finished(error)) switch terminal { case .finished: - continuation.resume(returning: nil) + unsafe continuation.resume(returning: nil) case .failed(let error): - continuation.resume(throwing: error) + unsafe continuation.resume(throwing: error) } } else { unlock() @@ -483,22 +485,22 @@ extension AsyncThrowingStream { func next(_ continuation: UnsafeContinuation) { lock() - if state.continuation == nil { - if state.pending.count > 0 { - let toSend = state.pending.removeFirst() + if unsafe state.continuation == nil { + if unsafe state.pending.count > 0 { + let toSend = unsafe state.pending.removeFirst() unlock() - continuation.resume(returning: toSend) - } else if let terminal = state.terminal { - state.terminal = .finished + unsafe continuation.resume(returning: toSend) + } else if let terminal = unsafe state.terminal { + unsafe state.terminal = .finished unlock() switch terminal { case .finished: - continuation.resume(returning: nil) + unsafe continuation.resume(returning: nil) case .failed(let error): - continuation.resume(throwing: error) + unsafe continuation.resume(throwing: error) } } else { - state.continuation = continuation + unsafe state.continuation = unsafe continuation unlock() } } else { @@ -509,7 +511,7 @@ extension AsyncThrowingStream { func next() async throws -> Element? { try await withTaskCancellationHandler { - try await withUnsafeThrowingContinuation { + try unsafe await withUnsafeThrowingContinuation { next($0) } } onCancel: { [cancel] in @@ -519,18 +521,18 @@ extension AsyncThrowingStream { static func create(limit: Continuation.BufferingPolicy) -> _Storage { let minimumCapacity = _lockWordCount() - let storage = Builtin.allocWithTailElems_1( + let storage = unsafe Builtin.allocWithTailElems_1( _Storage.self, minimumCapacity._builtinWordValue, UnsafeRawPointer.self ) let state = - UnsafeMutablePointer(Builtin.addressof(&storage.state)) - state.initialize(to: State(limit: limit)) - let ptr = UnsafeRawPointer( + unsafe UnsafeMutablePointer(Builtin.addressof(&storage.state)) + unsafe state.initialize(to: State(limit: limit)) + let ptr = unsafe UnsafeRawPointer( Builtin.projectTailElems(storage, UnsafeRawPointer.self)) - _lockInit(ptr) + unsafe _lockInit(ptr) return storage } } @@ -545,14 +547,14 @@ final class _AsyncStreamCriticalStorage: @unchecked Sendable { private func lock() { let ptr = - UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) - _lock(ptr) + unsafe UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) + unsafe _lock(ptr) } private func unlock() { let ptr = - UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) - _unlock(ptr) + unsafe UnsafeRawPointer(Builtin.projectTailElems(self, UnsafeRawPointer.self)) + unsafe _unlock(ptr) } var value: Contents { @@ -574,18 +576,18 @@ final class _AsyncStreamCriticalStorage: @unchecked Sendable { static func create(_ initial: Contents) -> _AsyncStreamCriticalStorage { let minimumCapacity = _lockWordCount() - let storage = Builtin.allocWithTailElems_1( + let storage = unsafe Builtin.allocWithTailElems_1( _AsyncStreamCriticalStorage.self, minimumCapacity._builtinWordValue, UnsafeRawPointer.self ) let state = - UnsafeMutablePointer(Builtin.addressof(&storage._value)) - state.initialize(to: initial) - let ptr = UnsafeRawPointer( + unsafe UnsafeMutablePointer(Builtin.addressof(&storage._value)) + unsafe state.initialize(to: initial) + let ptr = unsafe UnsafeRawPointer( Builtin.projectTailElems(storage, UnsafeRawPointer.self)) - _lockInit(ptr) + unsafe _lockInit(ptr) return storage } } diff --git a/stdlib/public/Concurrency/CMakeLists.txt b/stdlib/public/Concurrency/CMakeLists.txt index a38f4230192ad..9c06fe28fdfe3 100644 --- a/stdlib/public/Concurrency/CMakeLists.txt +++ b/stdlib/public/Concurrency/CMakeLists.txt @@ -65,6 +65,8 @@ list(APPEND SWIFT_RUNTIME_CONCURRENCY_SWIFT_FLAGS "IsolatedAny" ) +list(APPEND SWIFT_RUNTIME_CONCURRENCY_SWIFT_FLAGS "-strict-memory-safety") + list(APPEND SWIFT_RUNTIME_CONCURRENCY_C_FLAGS "-D__STDC_WANT_LIB_EXT1__=1") diff --git a/stdlib/public/Concurrency/CheckedContinuation.swift b/stdlib/public/Concurrency/CheckedContinuation.swift index 15781e505e1bb..a4f0f1553d050 100644 --- a/stdlib/public/Concurrency/CheckedContinuation.swift +++ b/stdlib/public/Concurrency/CheckedContinuation.swift @@ -27,59 +27,59 @@ internal final class CheckedContinuationCanary: @unchecked Sendable { private static func _create(continuation: UnsafeRawPointer, function: String) -> CheckedContinuationCanary { - let instance = Builtin.allocWithTailElems_1(CheckedContinuationCanary.self, + let instance = unsafe Builtin.allocWithTailElems_1(CheckedContinuationCanary.self, 1._builtinWordValue, (UnsafeRawPointer?, String).self) - instance._continuationPtr.initialize(to: continuation) - instance._functionPtr.initialize(to: function) + unsafe instance._continuationPtr.initialize(to: continuation) + unsafe instance._functionPtr.initialize(to: function) return instance } private var _continuationPtr: UnsafeMutablePointer { - return UnsafeMutablePointer( + return unsafe UnsafeMutablePointer( Builtin.projectTailElems(self, (UnsafeRawPointer?, String).self)) } private var _functionPtr: UnsafeMutablePointer { - let tailPtr = UnsafeMutableRawPointer( + let tailPtr = unsafe UnsafeMutableRawPointer( Builtin.projectTailElems(self, (UnsafeRawPointer?, String).self)) - let functionPtr = tailPtr + let functionPtr = unsafe tailPtr + MemoryLayout<(UnsafeRawPointer?, String)>.offset(of: \(UnsafeRawPointer?, String).1)! - return functionPtr.assumingMemoryBound(to: String.self) + return unsafe functionPtr.assumingMemoryBound(to: String.self) } internal static func create(continuation: UnsafeContinuation, function: String) -> CheckedContinuationCanary { - return _create( + return unsafe _create( continuation: unsafeBitCast(continuation, to: UnsafeRawPointer.self), function: function) } internal var function: String { - return _functionPtr.pointee + return unsafe _functionPtr.pointee } // Take the continuation away from the container, or return nil if it's // already been taken. internal func takeContinuation() -> UnsafeContinuation? { // Atomically exchange the current continuation value with a null pointer. - let rawContinuationPtr = unsafeBitCast(_continuationPtr, + let rawContinuationPtr = unsafe unsafeBitCast(_continuationPtr, to: Builtin.RawPointer.self) let rawOld = Builtin.atomicrmw_xchg_seqcst_Word(rawContinuationPtr, 0._builtinWordValue) - return unsafeBitCast(rawOld, to: UnsafeContinuation?.self) + return unsafe unsafeBitCast(rawOld, to: UnsafeContinuation?.self) } deinit { - _functionPtr.deinitialize(count: 1) + unsafe _functionPtr.deinitialize(count: 1) // Log if the continuation was never consumed before the instance was // destructed. - if _continuationPtr.pointee != nil { + if unsafe _continuationPtr.pointee != nil { #if !$Embedded - logFailedCheck("SWIFT TASK CONTINUATION MISUSE: \(function) leaked its continuation without resuming it. This may cause tasks waiting on it to remain suspended forever.\n") + unsafe logFailedCheck("SWIFT TASK CONTINUATION MISUSE: \(function) leaked its continuation without resuming it. This may cause tasks waiting on it to remain suspended forever.\n") #else fatalError("SWIFT TASK CONTINUATION MISUSE") #endif @@ -144,7 +144,7 @@ public struct CheckedContinuation: Sendable { /// source for the continuation, used to identify the continuation in /// runtime diagnostics related to misuse of this continuation. public init(continuation: UnsafeContinuation, function: String = #function) { - canary = CheckedContinuationCanary.create( + canary = unsafe CheckedContinuationCanary.create( continuation: continuation, function: function) } @@ -162,8 +162,8 @@ public struct CheckedContinuation: Sendable { /// the caller. The task continues executing when its executor is /// able to reschedule it. public func resume(returning value: sending T) { - if let c: UnsafeContinuation = canary.takeContinuation() { - c.resume(returning: value) + if let c: UnsafeContinuation = unsafe canary.takeContinuation() { + unsafe c.resume(returning: value) } else { #if !$Embedded fatalError("SWIFT TASK CONTINUATION MISUSE: \(canary.function) tried to resume its continuation more than once, returning \(value)!\n") @@ -186,8 +186,8 @@ public struct CheckedContinuation: Sendable { /// the caller. The task continues executing when its executor is /// able to reschedule it. public func resume(throwing error: __owned E) { - if let c: UnsafeContinuation = canary.takeContinuation() { - c.resume(throwing: error) + if let c: UnsafeContinuation = unsafe canary.takeContinuation() { + unsafe c.resume(throwing: error) } else { #if !$Embedded fatalError("SWIFT TASK CONTINUATION MISUSE: \(canary.function) tried to resume its continuation more than once, throwing \(error)!\n") @@ -301,9 +301,9 @@ public func withCheckedContinuation( _ body: (CheckedContinuation) -> Void ) async -> sending T { return await Builtin.withUnsafeContinuation { - let unsafeContinuation = UnsafeContinuation($0) - return body(CheckedContinuation(continuation: unsafeContinuation, - function: function)) + let unsafeContinuation = unsafe UnsafeContinuation($0) + return body(unsafe CheckedContinuation(continuation: unsafeContinuation, + function: function)) } } @@ -320,8 +320,8 @@ public func _unsafeInheritExecutor_withCheckedContinuation( function: String = #function, _ body: (CheckedContinuation) -> Void ) async -> T { - return await withUnsafeContinuation { - body(CheckedContinuation(continuation: $0, function: function)) + return await unsafe withUnsafeContinuation { + body(unsafe CheckedContinuation(continuation: $0, function: function)) } } @@ -365,9 +365,9 @@ public func withCheckedThrowingContinuation( _ body: (CheckedContinuation) -> Void ) async throws -> sending T { return try await Builtin.withUnsafeThrowingContinuation { - let unsafeContinuation = UnsafeContinuation($0) - return body(CheckedContinuation(continuation: unsafeContinuation, - function: function)) + let unsafeContinuation = unsafe UnsafeContinuation($0) + return body(unsafe CheckedContinuation(continuation: unsafeContinuation, + function: function)) } } @@ -384,8 +384,8 @@ public func _unsafeInheritExecutor_withCheckedThrowingContinuation( function: String = #function, _ body: (CheckedContinuation) -> Void ) async throws -> T { - return try await withUnsafeThrowingContinuation { - body(CheckedContinuation(continuation: $0, function: function)) + return try await unsafe withUnsafeThrowingContinuation { + body(unsafe CheckedContinuation(continuation: $0, function: function)) } } @@ -397,7 +397,7 @@ public func _unsafeInheritExecutor_withCheckedThrowingContinuation( internal func _createCheckedContinuation( _ continuation: __owned UnsafeContinuation ) -> CheckedContinuation { - return CheckedContinuation(continuation: continuation) + return unsafe CheckedContinuation(continuation: continuation) } @available(SwiftStdlib 5.1, *) @@ -405,7 +405,7 @@ internal func _createCheckedContinuation( internal func _createCheckedThrowingContinuation( _ continuation: __owned UnsafeContinuation ) -> CheckedContinuation { - return CheckedContinuation(continuation: continuation) + return unsafe CheckedContinuation(continuation: continuation) } @available(SwiftStdlib 5.1, *) diff --git a/stdlib/public/Concurrency/ContinuousClock.swift b/stdlib/public/Concurrency/ContinuousClock.swift index cea25102abea0..604f0ff022430 100644 --- a/stdlib/public/Concurrency/ContinuousClock.swift +++ b/stdlib/public/Concurrency/ContinuousClock.swift @@ -75,7 +75,7 @@ extension ContinuousClock: Clock { public var minimumResolution: Swift.Duration { var seconds = Int64(0) var nanoseconds = Int64(0) - _getClockRes( + unsafe _getClockRes( seconds: &seconds, nanoseconds: &nanoseconds, clock: _ClockID.continuous.rawValue) @@ -86,7 +86,7 @@ extension ContinuousClock: Clock { public static var now: ContinuousClock.Instant { var seconds = Int64(0) var nanoseconds = Int64(0) - _getTime( + unsafe _getTime( seconds: &seconds, nanoseconds: &nanoseconds, clock: _ClockID.continuous.rawValue) diff --git a/stdlib/public/Concurrency/Deque/Deque+Collection.swift b/stdlib/public/Concurrency/Deque/Deque+Collection.swift index 48532298afec7..e3c8d9574b090 100644 --- a/stdlib/public/Concurrency/Deque/Deque+Collection.swift +++ b/stdlib/public/Concurrency/Deque/Deque+Collection.swift @@ -35,30 +35,30 @@ extension _Deque: Sequence { } internal init(_base: _Deque) { - self = _base._storage.read { handle in - let start = handle.startSlot - let end = Swift.min(start.advanced(by: handle.count), handle.limSlot) + self = unsafe _base._storage.read { handle in + let start = unsafe handle.startSlot + let end = unsafe Swift.min(start.advanced(by: handle.count), handle.limSlot) return Self(_storage: _base._storage, start: start, end: end) } } internal init(_base: _Deque, from index: Int) { - self = _base._storage.read { handle in - assert(index >= 0 && index <= handle.count) - let start = handle.slot(forOffset: index) - if index == handle.count { + self = unsafe _base._storage.read { handle in + unsafe assert(index >= 0 && index <= handle.count) + let start = unsafe handle.slot(forOffset: index) + if unsafe index == handle.count { return Self(_storage: _base._storage, start: start, end: start) } - var end = handle.endSlot - if start >= end { end = handle.limSlot } + var end = unsafe handle.endSlot + if start >= end { end = unsafe handle.limSlot } return Self(_storage: _base._storage, start: start, end: end) } } internal mutating func _swapSegment() -> Bool { assert(_nextSlot == _endSlot) - return _storage.read { handle in - let end = handle.endSlot + return unsafe _storage.read { handle in + let end = unsafe handle.endSlot if end == .zero || end == _nextSlot { return false } @@ -79,8 +79,8 @@ extension _Deque: Sequence { assert(_nextSlot < _endSlot) let slot = _nextSlot _nextSlot = _nextSlot.advanced(by: 1) - return _storage.read { handle in - return handle.ptr(at: slot).pointee + return unsafe _storage.read { handle in + return unsafe handle.ptr(at: slot).pointee } } } @@ -94,16 +94,16 @@ extension _Deque: Sequence { __consuming func _copyToContiguousArray() -> ContiguousArray { ContiguousArray(unsafeUninitializedCapacity: _storage.count) { target, count in - _storage.read { source in - let segments = source.segments() - let c = segments.first.count - target[.. ) -> (Iterator, UnsafeMutableBufferPointer.Index) { - _storage.read { source in - let segments = source.segments() - let c1 = Swift.min(segments.first.count, target.count) - target[.. c1, let second = segments.second else { + unsafe _storage.read { source in + let segments = unsafe source.segments() + let c1 = unsafe Swift.min(segments.first.count, target.count) + unsafe target[.. c1, let second = unsafe segments.second else { return (Iterator(_base: self, from: c1), c1) } let c2 = Swift.min(second.count, target.count - c1) - target[c1 ..< c1 + c2]._rebased()._initialize(from: second.prefix(c2)._rebased()) + unsafe target[c1 ..< c1 + c2]._rebased()._initialize(from: second.prefix(c2)._rebased()) return (Iterator(_base: self, from: c1 + c2), c1 + c2) } } @@ -143,10 +143,10 @@ extension _Deque: Sequence { func withContiguousStorageIfAvailable( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { - return try _storage.read { handle in - let endSlot = handle.startSlot.advanced(by: handle.count) - guard endSlot.position <= handle.capacity else { return nil } - return try body(handle.buffer(for: handle.startSlot ..< endSlot)) + return try unsafe _storage.read { handle in + let endSlot = unsafe handle.startSlot.advanced(by: handle.count) + guard unsafe endSlot.position <= handle.capacity else { return nil } + return unsafe try body(handle.buffer(for: handle.startSlot ..< endSlot)) } } } @@ -318,14 +318,14 @@ extension _Deque: RandomAccessCollection { subscript(index: Int) -> Element { get { precondition(index >= 0 && index < count, "Index out of bounds") - return _storage.read { $0.ptr(at: $0.slot(forOffset: index)).pointee } + return unsafe _storage.read { unsafe $0.ptr(at: $0.slot(forOffset: index)).pointee } } set { precondition(index >= 0 && index < count, "Index out of bounds") _storage.ensureUnique() - _storage.update { handle in - let slot = handle.slot(forOffset: index) - handle.ptr(at: slot).pointee = newValue + unsafe _storage.update { handle in + let slot = unsafe handle.slot(forOffset: index) + unsafe handle.ptr(at: slot).pointee = newValue } } _modify { @@ -334,13 +334,13 @@ extension _Deque: RandomAccessCollection { // We technically aren't supposed to escape storage pointers out of a // managed buffer, so we escape a `(slot, value)` pair instead, leaving // the corresponding slot temporarily uninitialized. - var (slot, value) = _storage.update { handle -> (_Slot, Element) in - let slot = handle.slot(forOffset: index) - return (slot, handle.ptr(at: slot).move()) + var (slot, value) = unsafe _storage.update { handle -> (_Slot, Element) in + let slot = unsafe handle.slot(forOffset: index) + return unsafe (slot, handle.ptr(at: slot).move()) } defer { - _storage.update { handle in - handle.ptr(at: slot).initialize(to: value) + unsafe _storage.update { handle in + unsafe handle.ptr(at: slot).initialize(to: value) } } yield &value @@ -385,10 +385,10 @@ extension _Deque: MutableCollection { precondition(i >= 0 && i < count, "Index out of bounds") precondition(j >= 0 && j < count, "Index out of bounds") _storage.ensureUnique() - _storage.update { handle in - let slot1 = handle.slot(forOffset: i) - let slot2 = handle.slot(forOffset: j) - handle.mutableBuffer.swapAt(slot1.position, slot2.position) + unsafe _storage.update { handle in + let slot1 = unsafe handle.slot(forOffset: i) + let slot2 = unsafe handle.slot(forOffset: j) + unsafe handle.mutableBuffer.swapAt(slot1.position, slot2.position) } } @@ -416,26 +416,26 @@ extension _Deque: MutableCollection { _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { _storage.ensureUnique() - return try _storage.update { handle in - let endSlot = handle.startSlot.advanced(by: handle.count) - guard endSlot.position <= handle.capacity else { + return try unsafe _storage.update { handle in + let endSlot = unsafe handle.startSlot.advanced(by: handle.count) + guard unsafe endSlot.position <= handle.capacity else { // FIXME: Rotate storage such that it becomes contiguous. return nil } - let original = handle.mutableBuffer(for: handle.startSlot ..< endSlot) - var extract = original + let original = unsafe handle.mutableBuffer(for: handle.startSlot ..< endSlot) + var extract = unsafe original defer { - precondition(extract.baseAddress == original.baseAddress && extract.count == original.count, + unsafe precondition(extract.baseAddress == original.baseAddress && extract.count == original.count, "Closure must not replace the provided buffer") } - return try body(&extract) + return unsafe try body(&extract) } } mutating func _withUnsafeMutableBufferPointerIfSupported( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { - return try withContiguousMutableStorageIfAvailable(body) + return try unsafe withContiguousMutableStorageIfAvailable(body) } } @@ -503,16 +503,16 @@ extension _Deque: RangeReplaceableCollection { let targetCut = subrange.lowerBound + replacementCount let sourceCut = newElements.index(newElements.startIndex, offsetBy: replacementCount) - _storage.update { target in - target.uncheckedReplaceInPlace( + unsafe _storage.update { target in + unsafe target.uncheckedReplaceInPlace( inOffsets: subrange.lowerBound ..< targetCut, with: newElements[.. 0 { - target.uncheckedInsert( + unsafe target.uncheckedInsert( contentsOf: newElements[sourceCut...], count: deltaCount, atOffset: targetCut) @@ -532,12 +532,12 @@ extension _Deque: RangeReplaceableCollection { init(repeating repeatedValue: Element, count: Int) { precondition(count >= 0) self.init(minimumCapacity: count) - _storage.update { handle in - assert(handle.startSlot == .zero) + unsafe _storage.update { handle in + unsafe assert(handle.startSlot == .zero) if count > 0 { - handle.ptr(at: .zero).initialize(repeating: repeatedValue, count: count) + unsafe handle.ptr(at: .zero).initialize(repeating: repeatedValue, count: count) } - handle.count = count + unsafe handle.count = count } } @@ -562,16 +562,16 @@ extension _Deque: RangeReplaceableCollection { let c = elements.count guard c > 0 else { _storage = _Storage(); return } self._storage = _Storage(minimumCapacity: c) - _storage.update { handle in - assert(handle.startSlot == .zero) - let target = handle.mutableBuffer(for: .zero ..< _Slot(at: c)) - let done: Void? = elements._withContiguousStorageIfAvailable_SR14663 { source in - target._initialize(from: source) + unsafe _storage.update { handle in + unsafe assert(handle.startSlot == .zero) + let target = unsafe handle.mutableBuffer(for: .zero ..< _Slot(at: c)) + let done: Void? = unsafe elements._withContiguousStorageIfAvailable_SR14663 { source in + unsafe target._initialize(from: source) } if done == nil { - target._initialize(from: elements) + unsafe target._initialize(from: elements) } - handle.count = c + unsafe handle.count = c } } @@ -600,8 +600,8 @@ extension _Deque: RangeReplaceableCollection { /// - SeeAlso: `prepend(_:)` mutating func append(_ newElement: Element) { _storage.ensureUnique(minimumCapacity: count + 1) - _storage.update { - $0.uncheckedAppend(newElement) + unsafe _storage.update { + unsafe $0.uncheckedAppend(newElement) } } @@ -620,9 +620,9 @@ extension _Deque: RangeReplaceableCollection { /// /// - Complexity: Amortized O(`newElements.count`). mutating func append(contentsOf newElements: S) where S.Element == Element { - let done: Void? = newElements._withContiguousStorageIfAvailable_SR14663 { source in + let done: Void? = unsafe newElements._withContiguousStorageIfAvailable_SR14663 { source in _storage.ensureUnique(minimumCapacity: count + source.count) - _storage.update { $0.uncheckedAppend(contentsOf: source) } + unsafe _storage.update { unsafe $0.uncheckedAppend(contentsOf: source) } } if done != nil { return @@ -630,18 +630,18 @@ extension _Deque: RangeReplaceableCollection { let underestimatedCount = newElements.underestimatedCount reserveCapacity(count + underestimatedCount) - var it: S.Iterator = _storage.update { target in - let gaps = target.availableSegments() - let (it, copied) = gaps.initialize(fromSequencePrefix: newElements) - target.count += copied + var it: S.Iterator = unsafe _storage.update { target in + let gaps = unsafe target.availableSegments() + let (it, copied) = unsafe gaps.initialize(fromSequencePrefix: newElements) + unsafe target.count += copied return it } while let next = it.next() { _storage.ensureUnique(minimumCapacity: count + 1) - _storage.update { target in - target.uncheckedAppend(next) - let gaps = target.availableSegments() - target.count += gaps.initialize(fromPrefixOf: &it) + unsafe _storage.update { target in + unsafe target.uncheckedAppend(next) + let gaps = unsafe target.availableSegments() + unsafe target.count += gaps.initialize(fromPrefixOf: &it) } } } @@ -661,19 +661,19 @@ extension _Deque: RangeReplaceableCollection { /// /// - Complexity: Amortized O(`newElements.count`). mutating func append(contentsOf newElements: C) where C.Element == Element { - let done: Void? = newElements._withContiguousStorageIfAvailable_SR14663 { source in + let done: Void? = unsafe newElements._withContiguousStorageIfAvailable_SR14663 { source in _storage.ensureUnique(minimumCapacity: count + source.count) - _storage.update { $0.uncheckedAppend(contentsOf: source) } + unsafe _storage.update { unsafe $0.uncheckedAppend(contentsOf: source) } } guard done == nil else { return } let c = newElements.count guard c > 0 else { return } reserveCapacity(count + c) - _storage.update { target in - let gaps = target.availableSegments().prefix(c) - gaps.initialize(from: newElements) - target.count += c + unsafe _storage.update { target in + let gaps = unsafe target.availableSegments().prefix(c) + unsafe gaps.initialize(from: newElements) + unsafe target.count += c } } @@ -696,18 +696,18 @@ extension _Deque: RangeReplaceableCollection { precondition(index >= 0 && index <= count, "Can't insert element at invalid index") _storage.ensureUnique(minimumCapacity: count + 1) - _storage.update { target in + unsafe _storage.update { target in if index == 0 { - target.uncheckedPrepend(newElement) + unsafe target.uncheckedPrepend(newElement) return } if index == count { - target.uncheckedAppend(newElement) + unsafe target.uncheckedAppend(newElement) return } - let gap = target.openGap(ofSize: 1, atOffset: index) - assert(gap.first.count == 1) - gap.first.baseAddress!.initialize(to: newElement) + let gap = unsafe target.openGap(ofSize: 1, atOffset: index) + unsafe assert(gap.first.count == 1) + unsafe gap.first.baseAddress!.initialize(to: newElement) } } @@ -735,8 +735,8 @@ extension _Deque: RangeReplaceableCollection { "Can't insert elements at an invalid index") let newCount = newElements.count _storage.ensureUnique(minimumCapacity: count + newCount) - _storage.update { target in - target.uncheckedInsert(contentsOf: newElements, count: newCount, atOffset: index) + unsafe _storage.update { target in + unsafe target.uncheckedInsert(contentsOf: newElements, count: newCount, atOffset: index) } } @@ -760,10 +760,10 @@ extension _Deque: RangeReplaceableCollection { precondition(index >= 0 && index < self.count, "Index out of bounds") // FIXME: Implement storage shrinking _storage.ensureUnique() - return _storage.update { target in + return unsafe _storage.update { target in // FIXME: Add direct implementation & see if it makes a difference let result = self[index] - target.uncheckedRemove(offsets: index ..< index + 1) + unsafe target.uncheckedRemove(offsets: index ..< index + 1) return result } } @@ -785,20 +785,20 @@ extension _Deque: RangeReplaceableCollection { precondition(bounds.lowerBound >= 0 && bounds.upperBound <= self.count, "Index range out of bounds") _storage.ensureUnique() - _storage.update { $0.uncheckedRemove(offsets: bounds) } + unsafe _storage.update { unsafe $0.uncheckedRemove(offsets: bounds) } } mutating func _customRemoveLast() -> Element? { precondition(!isEmpty, "Cannot remove last element of an empty Deque") _storage.ensureUnique() - return _storage.update { $0.uncheckedRemoveLast() } + return unsafe _storage.update { unsafe $0.uncheckedRemoveLast() } } mutating func _customRemoveLast(_ n: Int) -> Bool { precondition(n >= 0, "Can't remove a negative number of elements") precondition(n <= count, "Can't remove more elements than there are in the Collection") _storage.ensureUnique() - _storage.update { $0.uncheckedRemoveLast(n) } + unsafe _storage.update { unsafe $0.uncheckedRemoveLast(n) } return true } @@ -814,7 +814,7 @@ extension _Deque: RangeReplaceableCollection { mutating func removeFirst() -> Element { precondition(!isEmpty, "Cannot remove first element of an empty Deque") _storage.ensureUnique() - return _storage.update { $0.uncheckedRemoveFirst() } + return unsafe _storage.update { unsafe $0.uncheckedRemoveFirst() } } /// Removes the specified number of elements from the beginning of the deque. @@ -829,7 +829,7 @@ extension _Deque: RangeReplaceableCollection { precondition(n >= 0, "Can't remove a negative number of elements") precondition(n <= count, "Can't remove more elements than there are in the Collection") _storage.ensureUnique() - return _storage.update { $0.uncheckedRemoveFirst(n) } + return unsafe _storage.update { unsafe $0.uncheckedRemoveFirst(n) } } /// Removes all elements from the deque. @@ -841,7 +841,7 @@ extension _Deque: RangeReplaceableCollection { mutating func removeAll(keepingCapacity keepCapacity: Bool = false) { if keepCapacity { _storage.ensureUnique() - _storage.update { $0.uncheckedRemoveAll() } + unsafe _storage.update { unsafe $0.uncheckedRemoveAll() } } else { self = _Deque() } diff --git a/stdlib/public/Concurrency/Deque/Deque+Extras.swift b/stdlib/public/Concurrency/Deque/Deque+Extras.swift index 603a6f7d2a933..05ec89cc38581 100644 --- a/stdlib/public/Concurrency/Deque/Deque+Extras.swift +++ b/stdlib/public/Concurrency/Deque/Deque+Extras.swift @@ -46,19 +46,19 @@ extension _Deque { (inout UnsafeMutableBufferPointer, inout Int) throws -> Void ) rethrows { self._storage = .init(minimumCapacity: capacity) - try _storage.update { handle in - handle.startSlot = .zero + try unsafe _storage.update { handle in + unsafe handle.startSlot = .zero var count = 0 - var buffer = handle.mutableBuffer(for: .zero ..< _Slot(at: capacity)) + var buffer = unsafe handle.mutableBuffer(for: .zero ..< _Slot(at: capacity)) defer { precondition(count <= capacity, "Initialized count set to greater than specified capacity") - let b = handle.mutableBuffer(for: .zero ..< _Slot(at: capacity)) - precondition(buffer.baseAddress == b.baseAddress && buffer.count == b.count, + let b = unsafe handle.mutableBuffer(for: .zero ..< _Slot(at: capacity)) + unsafe precondition(buffer.baseAddress == b.baseAddress && buffer.count == b.count, "Initializer relocated Deque storage") - handle.count = count + unsafe handle.count = count } - try initializer(&buffer, &count) + try unsafe initializer(&buffer, &count) } } } @@ -76,8 +76,8 @@ extension _Deque { // where Self == Self.SubSequence guard count > 0 else { return nil } _storage.ensureUnique() - return _storage.update { - $0.uncheckedRemoveFirst() + return unsafe _storage.update { + unsafe $0.uncheckedRemoveFirst() } } @@ -110,8 +110,8 @@ extension _Deque { /// - SeeAlso: `append(_:)` mutating func prepend(_ newElement: Element) { _storage.ensureUnique(minimumCapacity: count + 1) - return _storage.update { - $0.uncheckedPrepend(newElement) + return unsafe _storage.update { + unsafe $0.uncheckedPrepend(newElement) } } @@ -132,20 +132,20 @@ extension _Deque { /// /// - SeeAlso: `append(contentsOf:)` mutating func prepend(contentsOf newElements: C) where C.Element == Element { - let done: Void? = newElements._withContiguousStorageIfAvailable_SR14663 { source in + let done: Void? = unsafe newElements._withContiguousStorageIfAvailable_SR14663 { source in _storage.ensureUnique(minimumCapacity: count + source.count) - _storage.update { $0.uncheckedPrepend(contentsOf: source) } + unsafe _storage.update { unsafe $0.uncheckedPrepend(contentsOf: source) } } guard done == nil else { return } let c = newElements.count guard c > 0 else { return } _storage.ensureUnique(minimumCapacity: count + c) - _storage.update { target in - let gaps = target.availableSegments().suffix(c) - gaps.initialize(from: newElements) - target.count += c - target.startSlot = target.slot(target.startSlot, offsetBy: -c) + unsafe _storage.update { target in + let gaps = unsafe target.availableSegments().suffix(c) + unsafe gaps.initialize(from: newElements) + unsafe target.count += c + unsafe target.startSlot = unsafe target.slot(target.startSlot, offsetBy: -c) } } @@ -166,9 +166,9 @@ extension _Deque { /// /// - SeeAlso: `append(contentsOf:)` mutating func prepend(contentsOf newElements: S) where S.Element == Element { - let done: Void? = newElements._withContiguousStorageIfAvailable_SR14663 { source in + let done: Void? = unsafe newElements._withContiguousStorageIfAvailable_SR14663 { source in _storage.ensureUnique(minimumCapacity: count + source.count) - _storage.update { $0.uncheckedPrepend(contentsOf: source) } + unsafe _storage.update { unsafe $0.uncheckedPrepend(contentsOf: source) } } guard done == nil else { return } @@ -176,11 +176,11 @@ extension _Deque { self.append(contentsOf: newElements) let newCount = self.count let c = newCount - originalCount - _storage.update { target in - target.startSlot = target.slot(forOffset: originalCount) - target.count = target.capacity - target.closeGap(offsets: c ..< c + (target.capacity - newCount)) - assert(target.count == newCount) + unsafe _storage.update { target in + unsafe target.startSlot = unsafe target.slot(forOffset: originalCount) + unsafe target.count = unsafe target.capacity + unsafe target.closeGap(offsets: c ..< c + (target.capacity - newCount)) + unsafe assert(target.count == newCount) } } } diff --git a/stdlib/public/Concurrency/Deque/Deque+Storage.swift b/stdlib/public/Concurrency/Deque/Deque+Storage.swift index 6047bdff2dc92..9ccd756794554 100644 --- a/stdlib/public/Concurrency/Deque/Deque+Storage.swift +++ b/stdlib/public/Concurrency/Deque/Deque+Storage.swift @@ -71,15 +71,15 @@ extension _Deque._Storage { internal var capacity: Int { - _buffer.withUnsafeMutablePointerToHeader { $0.pointee.capacity } + unsafe _buffer.withUnsafeMutablePointerToHeader { unsafe $0.pointee.capacity } } internal var count: Int { - _buffer.withUnsafeMutablePointerToHeader { $0.pointee.count } + unsafe _buffer.withUnsafeMutablePointerToHeader { unsafe $0.pointee.count } } internal var startSlot: _DequeSlot { - _buffer.withUnsafeMutablePointerToHeader { $0.pointee.startSlot + unsafe _buffer.withUnsafeMutablePointerToHeader { unsafe $0.pointee.startSlot } } } @@ -90,20 +90,20 @@ extension _Deque._Storage { internal typealias _UnsafeHandle = _Deque._UnsafeHandle internal func read(_ body: (_UnsafeHandle) throws -> R) rethrows -> R { - try _buffer.withUnsafeMutablePointers { header, elements in - let handle = _UnsafeHandle(header: header, + try unsafe _buffer.withUnsafeMutablePointers { header, elements in + let handle = unsafe _UnsafeHandle(header: header, elements: elements, isMutable: false) - return try body(handle) + return try unsafe body(handle) } } internal func update(_ body: (_UnsafeHandle) throws -> R) rethrows -> R { - try _buffer.withUnsafeMutablePointers { header, elements in - let handle = _UnsafeHandle(header: header, + try unsafe _buffer.withUnsafeMutablePointers { header, elements in + let handle = unsafe _UnsafeHandle(header: header, elements: elements, isMutable: true) - return try body(handle) + return try unsafe body(handle) } } } @@ -124,7 +124,7 @@ extension _Deque._Storage { } internal mutating func _makeUniqueCopy() { - self = self.read { $0.copyElements() } + self = unsafe self.read { unsafe $0.copyElements() } } /// The growth factor to use to increase storage size to make place for an @@ -165,16 +165,16 @@ extension _Deque._Storage { ) { if capacity >= minimumCapacity { assert(!self.isUnique()) - self = self.read { $0.copyElements() } + self = unsafe self.read { unsafe $0.copyElements() } } else if isUnique() { let minimumCapacity = _growCapacity(to: minimumCapacity, linearly: linearGrowth) - self = self.update { source in - source.moveElements(minimumCapacity: minimumCapacity) + self = unsafe self.update { source in + unsafe source.moveElements(minimumCapacity: minimumCapacity) } } else { let minimumCapacity = _growCapacity(to: minimumCapacity, linearly: linearGrowth) - self = self.read { source in - source.copyElements(minimumCapacity: minimumCapacity) + self = unsafe self.read { source in + unsafe source.copyElements(minimumCapacity: minimumCapacity) } } } diff --git a/stdlib/public/Concurrency/Deque/Deque+Testing.swift b/stdlib/public/Concurrency/Deque/Deque+Testing.swift index cbc514048297d..37dbec4ee5bc8 100644 --- a/stdlib/public/Concurrency/Deque/Deque+Testing.swift +++ b/stdlib/public/Concurrency/Deque/Deque+Testing.swift @@ -60,15 +60,15 @@ extension _Deque { let buffer = _DequeBuffer.create(minimumCapacity: capacity) { _ in _DequeBufferHeader(capacity: capacity, count: contents.count, startSlot: startSlot) } - let storage = _Deque._Storage(unsafeDowncast(buffer, to: _DequeBuffer.self)) + let storage = unsafe _Deque._Storage(unsafeDowncast(buffer, to: _DequeBuffer.self)) if contents.count > 0 { contents.withUnsafeBufferPointer { source in - storage.update { target in - let segments = target.mutableSegments() - let c = segments.first.count - segments.first._initialize(from: source.prefix(c)._rebased()) - if let second = segments.second { - second._initialize(from: source.dropFirst(c)._rebased()) + unsafe storage.update { target in + let segments = unsafe target.mutableSegments() + let c = unsafe segments.first.count + unsafe segments.first._initialize(from: source.prefix(c)._rebased()) + if let second = unsafe segments.second { + unsafe second._initialize(from: source.dropFirst(c)._rebased()) } } } diff --git a/stdlib/public/Concurrency/Deque/Deque+UnsafeHandle.swift b/stdlib/public/Concurrency/Deque/Deque+UnsafeHandle.swift index e91526473119f..683b5a2b264fc 100644 --- a/stdlib/public/Concurrency/Deque/Deque+UnsafeHandle.swift +++ b/stdlib/public/Concurrency/Deque/Deque+UnsafeHandle.swift @@ -15,6 +15,7 @@ import Swift extension _Deque { + @unsafe internal struct _UnsafeHandle { let _header: UnsafeMutablePointer<_DequeBufferHeader> let _elements: UnsafeMutablePointer @@ -27,8 +28,8 @@ extension _Deque { elements: UnsafeMutablePointer, isMutable: Bool ) { - self._header = header - self._elements = elements + unsafe self._header = unsafe header + unsafe self._elements = unsafe elements #if DEBUG self._isMutable = isMutable #endif @@ -48,43 +49,43 @@ extension _Deque._UnsafeHandle { internal typealias Slot = _DequeSlot var header: _DequeBufferHeader { - _header.pointee + unsafe _header.pointee } var capacity: Int { - _header.pointee.capacity + unsafe _header.pointee.capacity } var count: Int { - get { _header.pointee.count } - nonmutating set { _header.pointee.count = newValue } + get { unsafe _header.pointee.count } + nonmutating set { unsafe _header.pointee.count = newValue } } var startSlot: Slot { - get { _header.pointee.startSlot } - nonmutating set { _header.pointee.startSlot = newValue } + get { unsafe _header.pointee.startSlot } + nonmutating set { unsafe _header.pointee.startSlot = unsafe newValue } } func ptr(at slot: Slot) -> UnsafeMutablePointer { - assert(slot.position >= 0 && slot.position <= capacity) - return _elements + slot.position + unsafe assert(slot.position >= 0 && slot.position <= capacity) + return unsafe _elements + slot.position } } extension _Deque._UnsafeHandle { var mutableBuffer: UnsafeMutableBufferPointer { - assertMutable() - return .init(start: _elements, count: _header.pointee.capacity) + unsafe assertMutable() + return unsafe .init(start: _elements, count: _header.pointee.capacity) } internal func buffer(for range: Range) -> UnsafeBufferPointer { - assert(range.upperBound.position <= capacity) - return .init(start: _elements + range.lowerBound.position, count: range._count) + unsafe assert(range.upperBound.position <= capacity) + return unsafe .init(start: _elements + range.lowerBound.position, count: range._count) } internal func mutableBuffer(for range: Range) -> UnsafeMutableBufferPointer { - assertMutable() - return .init(mutating: buffer(for: range)) + unsafe assertMutable() + return unsafe .init(mutating: buffer(for: range)) } } @@ -93,98 +94,98 @@ extension _Deque._UnsafeHandle { /// the valid slot corresponding to `endIndex`, which is a different thing /// entirely.) internal var limSlot: Slot { - Slot(at: capacity) + unsafe Slot(at: capacity) } internal func slot(after slot: Slot) -> Slot { - assert(slot.position < capacity) - let position = slot.position + 1 - if position >= capacity { - return Slot(at: 0) + unsafe assert(slot.position < capacity) + let position = unsafe slot.position + 1 + if unsafe position >= capacity { + return unsafe Slot(at: 0) } - return Slot(at: position) + return unsafe Slot(at: position) } internal func slot(before slot: Slot) -> Slot { - assert(slot.position < capacity) - if slot.position == 0 { return Slot(at: capacity - 1) } - return Slot(at: slot.position - 1) + unsafe assert(slot.position < capacity) + if unsafe slot.position == 0 { return unsafe Slot(at: capacity - 1) } + return unsafe Slot(at: slot.position - 1) } internal func slot(_ slot: Slot, offsetBy delta: Int) -> Slot { - assert(slot.position <= capacity) - let position = slot.position + delta + unsafe assert(slot.position <= capacity) + let position = unsafe slot.position + delta if delta >= 0 { - if position >= capacity { return Slot(at: position - capacity) } + if unsafe position >= capacity { return unsafe Slot(at: position - capacity) } } else { - if position < 0 { return Slot(at: position + capacity) } + if position < 0 { return unsafe Slot(at: position + capacity) } } - return Slot(at: position) + return unsafe Slot(at: position) } internal var endSlot: Slot { - slot(startSlot, offsetBy: count) + unsafe slot(startSlot, offsetBy: count) } /// Return the storage slot corresponding to the specified offset, which may /// or may not address an existing element. internal func slot(forOffset offset: Int) -> Slot { assert(offset >= 0) - assert(offset <= capacity) // Not `count`! + unsafe assert(offset <= capacity) // Not `count`! // Note: The use of wrapping addition/subscription is justified here by the // fact that `offset` is guaranteed to fall in the range `0 ..< capacity`. // Eliminating the overflow checks leads to a measurable speedup for // random-access subscript operations. (Up to 2x on some microbenchmarks.) - let position = startSlot.position &+ offset - guard position < capacity else { return Slot(at: position &- capacity) } - return Slot(at: position) + let position = unsafe startSlot.position &+ offset + guard unsafe position < capacity else { return unsafe Slot(at: position &- capacity) } + return unsafe Slot(at: position) } } extension _Deque._UnsafeHandle { internal func segments() -> _UnsafeWrappedBuffer { - let wrap = capacity - startSlot.position - if count <= wrap { - return .init(start: ptr(at: startSlot), count: count) + let wrap = unsafe capacity - startSlot.position + if unsafe count <= wrap { + return unsafe .init(start: ptr(at: startSlot), count: count) } - return .init(first: ptr(at: startSlot), count: wrap, + return unsafe .init(first: ptr(at: startSlot), count: wrap, second: ptr(at: .zero), count: count - wrap) } internal func segments( forOffsets offsets: Range ) -> _UnsafeWrappedBuffer { - assert(offsets.lowerBound >= 0 && offsets.upperBound <= count) - let lower = slot(forOffset: offsets.lowerBound) - let upper = slot(forOffset: offsets.upperBound) + unsafe assert(offsets.lowerBound >= 0 && offsets.upperBound <= count) + let lower = unsafe slot(forOffset: offsets.lowerBound) + let upper = unsafe slot(forOffset: offsets.upperBound) if offsets.count == 0 || lower < upper { - return .init(start: ptr(at: lower), count: offsets.count) + return unsafe .init(start: ptr(at: lower), count: offsets.count) } - return .init(first: ptr(at: lower), count: capacity - lower.position, + return unsafe .init(first: ptr(at: lower), count: capacity - lower.position, second: ptr(at: .zero), count: upper.position) } internal func mutableSegments() -> _UnsafeMutableWrappedBuffer { - assertMutable() - return .init(mutating: segments()) + unsafe assertMutable() + return unsafe .init(mutating: segments()) } internal func mutableSegments( forOffsets range: Range ) -> _UnsafeMutableWrappedBuffer { - assertMutable() - return .init(mutating: segments(forOffsets: range)) + unsafe assertMutable() + return unsafe .init(mutating: segments(forOffsets: range)) } } extension _Deque._UnsafeHandle { internal func availableSegments() -> _UnsafeMutableWrappedBuffer { - assertMutable() - let endSlot = self.endSlot - guard count < capacity else { return .init(start: ptr(at: endSlot), count: 0) } - if endSlot < startSlot { return .init(mutableBuffer(for: endSlot ..< startSlot)) } - return .init(mutableBuffer(for: endSlot ..< limSlot), + unsafe assertMutable() + let endSlot = unsafe self.endSlot + guard unsafe count < capacity else { return unsafe .init(start: ptr(at: endSlot), count: 0) } + if unsafe endSlot < startSlot { return unsafe .init(mutableBuffer(for: endSlot ..< startSlot)) } + return unsafe .init(mutableBuffer(for: endSlot ..< limSlot), mutableBuffer(for: .zero ..< startSlot)) } } @@ -197,10 +198,10 @@ extension _Deque._UnsafeHandle { at start: Slot, from source: UnsafeBufferPointer ) -> Slot { - assert(start.position + source.count <= capacity) - guard source.count > 0 else { return start } - ptr(at: start).initialize(from: source.baseAddress!, count: source.count) - return Slot(at: start.position + source.count) + unsafe assert(start.position + source.count <= capacity) + guard source.count > 0 else { return unsafe start } + unsafe ptr(at: start).initialize(from: source.baseAddress!, count: source.count) + return unsafe Slot(at: start.position + source.count) } @discardableResult @@ -208,10 +209,10 @@ extension _Deque._UnsafeHandle { at start: Slot, from source: UnsafeMutableBufferPointer ) -> Slot { - assert(start.position + source.count <= capacity) - guard source.count > 0 else { return start } - ptr(at: start).moveInitialize(from: source.baseAddress!, count: source.count) - return Slot(at: start.position + source.count) + unsafe assert(start.position + source.count <= capacity) + guard source.count > 0 else { return unsafe start } + unsafe ptr(at: start).moveInitialize(from: source.baseAddress!, count: source.count) + return unsafe Slot(at: start.position + source.count) } @discardableResult @@ -221,11 +222,11 @@ extension _Deque._UnsafeHandle { count: Int ) -> (source: Slot, target: Slot) { assert(count >= 0) - assert(source.position + count <= self.capacity) - assert(target.position + count <= self.capacity) - guard count > 0 else { return (source, target) } - ptr(at: target).moveInitialize(from: ptr(at: source), count: count) - return (slot(source, offsetBy: count), slot(target, offsetBy: count)) + unsafe assert(source.position + count <= self.capacity) + unsafe assert(target.position + count <= self.capacity) + guard count > 0 else { return unsafe (source, target) } + unsafe ptr(at: target).moveInitialize(from: ptr(at: source), count: count) + return unsafe (slot(source, offsetBy: count), slot(target, offsetBy: count)) } } @@ -235,16 +236,16 @@ extension _Deque._UnsafeHandle { /// Copy elements into a new storage instance without changing capacity or /// layout. internal func copyElements() -> _Deque._Storage { - let object = _DequeBuffer.create( + let object = unsafe _DequeBuffer.create( minimumCapacity: capacity, - makingHeaderWith: { _ in header }) + makingHeaderWith: { _ in unsafe header }) let result = _Deque._Storage(_buffer: ManagedBufferPointer(unsafeBufferObject: object)) - guard self.count > 0 else { return result } - result.update { target in - let source = self.segments() - target.initialize(at: startSlot, from: source.first) - if let second = source.second { - target.initialize(at: .zero, from: second) + guard unsafe self.count > 0 else { return result } + unsafe result.update { target in + let source = unsafe self.segments() + unsafe target.initialize(at: startSlot, from: source.first) + if let second = unsafe source.second { + unsafe target.initialize(at: .zero, from: second) } } return result @@ -253,7 +254,7 @@ extension _Deque._UnsafeHandle { /// Copy elements into a new storage instance with the specified minimum /// capacity. internal func copyElements(minimumCapacity: Int) -> _Deque._Storage { - assert(minimumCapacity >= count) + unsafe assert(minimumCapacity >= count) let object = _DequeBuffer.create( minimumCapacity: minimumCapacity, makingHeaderWith: { @@ -262,19 +263,19 @@ extension _Deque._UnsafeHandle { #else let capacity = $0.capacity #endif - return _DequeBufferHeader( + return unsafe _DequeBufferHeader( capacity: capacity, count: count, startSlot: .zero) }) let result = _Deque._Storage(_buffer: ManagedBufferPointer(unsafeBufferObject: object)) - guard count > 0 else { return result } - result.update { target in - assert(target.count == count && target.startSlot.position == 0) - let source = self.segments() - let next = target.initialize(at: .zero, from: source.first) - if let second = source.second { - target.initialize(at: next, from: second) + guard unsafe count > 0 else { return result } + unsafe result.update { target in + unsafe assert(target.count == count && target.startSlot.position == 0) + let source = unsafe self.segments() + let next = unsafe target.initialize(at: .zero, from: source.first) + if let second = unsafe source.second { + unsafe target.initialize(at: next, from: second) } } return result @@ -284,8 +285,8 @@ extension _Deque._UnsafeHandle { /// capacity. Existing indices in `self` won't necessarily be valid in the /// result. `self` is left empty. internal func moveElements(minimumCapacity: Int) -> _Deque._Storage { - assertMutable() - let count = self.count + unsafe assertMutable() + let count = unsafe self.count assert(minimumCapacity >= count) let object = _DequeBuffer.create( minimumCapacity: minimumCapacity, @@ -302,14 +303,14 @@ extension _Deque._UnsafeHandle { }) let result = _Deque._Storage(_buffer: ManagedBufferPointer(unsafeBufferObject: object)) guard count > 0 else { return result } - result.update { target in - let source = self.mutableSegments() - let next = target.moveInitialize(at: .zero, from: source.first) - if let second = source.second { - target.moveInitialize(at: next, from: second) + unsafe result.update { target in + let source = unsafe self.mutableSegments() + let next = unsafe target.moveInitialize(at: .zero, from: source.first) + if let second = unsafe source.second { + unsafe target.moveInitialize(at: next, from: second) } } - self.count = 0 + unsafe self.count = 0 return result } } @@ -320,16 +321,16 @@ extension _Deque._UnsafeHandle { maximumCount: Int?, _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> (end: Int, result: R) { - assert(start <= count) - guard start < count else { - return try (count, body(UnsafeBufferPointer(start: nil, count: 0))) + unsafe assert(start <= count) + guard unsafe start < count else { + return unsafe try (count, body(UnsafeBufferPointer(start: nil, count: 0))) } - let endSlot = self.endSlot + let endSlot = unsafe self.endSlot - let segmentStart = self.slot(forOffset: start) - let segmentEnd = segmentStart < endSlot ? endSlot : limSlot + let segmentStart = unsafe self.slot(forOffset: start) + let segmentEnd = unsafe segmentStart < endSlot ? endSlot : limSlot let count = Swift.min(maximumCount ?? Int.max, segmentEnd.position - segmentStart.position) - let result = try body(UnsafeBufferPointer(start: ptr(at: segmentStart), count: count)) + let result = try unsafe body(UnsafeBufferPointer(start: ptr(at: segmentStart), count: count)) return (start + count, result) } } @@ -346,12 +347,12 @@ extension _Deque._UnsafeHandle { inOffsets range: Range, with newElements: C ) where C.Element == Element { - assertMutable() - assert(range.upperBound <= count) + unsafe assertMutable() + unsafe assert(range.upperBound <= count) assert(newElements.count == range.count) guard !range.isEmpty else { return } - let target = mutableSegments(forOffsets: range) - target.assign(from: newElements) + let target = unsafe mutableSegments(forOffsets: range) + unsafe target.assign(from: newElements) } } @@ -364,10 +365,10 @@ extension _Deque._UnsafeHandle { /// This function does not validate its input arguments in release builds. Nor /// does it ensure that the storage buffer is uniquely referenced. internal func uncheckedAppend(_ element: Element) { - assertMutable() - assert(count < capacity) - ptr(at: endSlot).initialize(to: element) - count += 1 + unsafe assertMutable() + unsafe assert(count < capacity) + unsafe ptr(at: endSlot).initialize(to: element) + unsafe count += 1 } /// Append the contents of `source` to this buffer. The buffer must have @@ -376,13 +377,13 @@ extension _Deque._UnsafeHandle { /// This function does not validate its input arguments in release builds. Nor /// does it ensure that the storage buffer is uniquely referenced. internal func uncheckedAppend(contentsOf source: UnsafeBufferPointer) { - assertMutable() - assert(count + source.count <= capacity) + unsafe assertMutable() + unsafe assert(count + source.count <= capacity) guard source.count > 0 else { return } - let c = self.count - count += source.count - let gap = mutableSegments(forOffsets: c ..< count) - gap.initialize(from: source) + let c = unsafe self.count + unsafe count += source.count + let gap = unsafe mutableSegments(forOffsets: c ..< count) + unsafe gap.initialize(from: source) } } @@ -390,12 +391,12 @@ extension _Deque._UnsafeHandle { extension _Deque._UnsafeHandle { internal func uncheckedPrepend(_ element: Element) { - assertMutable() - assert(count < capacity) - let slot = self.slot(before: startSlot) - ptr(at: slot).initialize(to: element) - startSlot = slot - count += 1 + unsafe assertMutable() + unsafe assert(count < capacity) + let slot = unsafe self.slot(before: startSlot) + unsafe ptr(at: slot).initialize(to: element) + unsafe startSlot = slot + unsafe count += 1 } /// Prepend the contents of `source` to this buffer. The buffer must have @@ -404,16 +405,16 @@ extension _Deque._UnsafeHandle { /// This function does not validate its input arguments in release builds. Nor /// does it ensure that the storage buffer is uniquely referenced. internal func uncheckedPrepend(contentsOf source: UnsafeBufferPointer) { - assertMutable() - assert(count + source.count <= capacity) + unsafe assertMutable() + unsafe assert(count + source.count <= capacity) guard source.count > 0 else { return } - let oldStart = startSlot - let newStart = self.slot(startSlot, offsetBy: -source.count) - startSlot = newStart - count += source.count + let oldStart = unsafe startSlot + let newStart = unsafe self.slot(startSlot, offsetBy: -source.count) + unsafe startSlot = newStart + unsafe count += source.count - let gap = mutableWrappedBuffer(between: newStart, and: oldStart) - gap.initialize(from: source) + let gap = unsafe mutableWrappedBuffer(between: newStart, and: oldStart) + unsafe gap.initialize(from: source) } } @@ -436,24 +437,24 @@ extension _Deque._UnsafeHandle { count newCount: Int, atOffset offset: Int ) where C.Element == Element { - assertMutable() - assert(offset <= count) + unsafe assertMutable() + unsafe assert(offset <= count) assert(newElements.count == newCount) guard newCount > 0 else { return } - let gap = openGap(ofSize: newCount, atOffset: offset) - gap.initialize(from: newElements) + let gap = unsafe openGap(ofSize: newCount, atOffset: offset) + unsafe gap.initialize(from: newElements) } internal func mutableWrappedBuffer( between start: Slot, and end: Slot ) -> _UnsafeMutableWrappedBuffer { - assert(start.position <= capacity) - assert(end.position <= capacity) - if start < end { - return .init(start: ptr(at: start), count: end.position - start.position) + unsafe assert(start.position <= capacity) + unsafe assert(end.position <= capacity) + if unsafe start < end { + return unsafe .init(start: ptr(at: start), count: end.position - start.position) } - return .init( + return unsafe .init( first: ptr(at: start), count: capacity - start.position, second: ptr(at: .zero), count: end.position) } @@ -472,23 +473,23 @@ extension _Deque._UnsafeHandle { ofSize gapSize: Int, atOffset offset: Int ) -> _UnsafeMutableWrappedBuffer { - assertMutable() - assert(offset >= 0 && offset <= self.count) - assert(self.count + gapSize <= capacity) + unsafe assertMutable() + unsafe assert(offset >= 0 && offset <= self.count) + unsafe assert(self.count + gapSize <= capacity) assert(gapSize > 0) let headCount = offset - let tailCount = count - offset + let tailCount = unsafe count - offset if tailCount <= headCount { // Open the gap by sliding elements to the right. - let originalEnd = self.slot(startSlot, offsetBy: count) - let newEnd = self.slot(startSlot, offsetBy: count + gapSize) - let gapStart = self.slot(forOffset: offset) - let gapEnd = self.slot(gapStart, offsetBy: gapSize) + let originalEnd = unsafe self.slot(startSlot, offsetBy: count) + let newEnd = unsafe self.slot(startSlot, offsetBy: count + gapSize) + let gapStart = unsafe self.slot(forOffset: offset) + let gapEnd = unsafe self.slot(gapStart, offsetBy: gapSize) - let sourceIsContiguous = gapStart <= originalEnd.orIfZero(capacity) - let targetIsContiguous = gapEnd <= newEnd.orIfZero(capacity) + let sourceIsContiguous = unsafe gapStart <= originalEnd.orIfZero(capacity) + let targetIsContiguous = unsafe gapEnd <= newEnd.orIfZero(capacity) if sourceIsContiguous && targetIsContiguous { // No need to deal with wrapping; we just need to slide @@ -498,7 +499,7 @@ extension _Deque._UnsafeHandle { // // 0) ....ABCDE̲F̲G̲H..... EFG̲H̲.̲........ABCD .̲.......ABCDEFGH̲.̲ // 1) ....ABCD.̲.̲.̲EFGH.. EF.̲.̲.̲GH......ABCD .̲H......ABCDEFG.̲.̲ - move(from: gapStart, to: gapEnd, count: tailCount) + unsafe move(from: gapStart, to: gapEnd, count: tailCount) } else if targetIsContiguous { // The gap itself will be wrapped. @@ -507,9 +508,9 @@ extension _Deque._UnsafeHandle { // 0) E̲FGH.........ABC̲D̲ // 1) .̲..EFGH......ABC̲D̲ // 2) .̲CDEFGH......AB.̲.̲ - assert(startSlot > originalEnd.orIfZero(capacity)) - move(from: .zero, to: Slot.zero.advanced(by: gapSize), count: originalEnd.position) - move(from: gapStart, to: gapEnd, count: capacity - gapStart.position) + unsafe assert(startSlot > originalEnd.orIfZero(capacity)) + unsafe move(from: .zero, to: Slot.zero.advanced(by: gapSize), count: originalEnd.position) + unsafe move(from: gapStart, to: gapEnd, count: capacity - gapStart.position) } else if sourceIsContiguous { // Opening the gap pushes subsequent elements across the wrap. @@ -518,8 +519,8 @@ extension _Deque._UnsafeHandle { // 0) ........ABC̲D̲E̲FGH. // 1) GH......ABC̲D̲E̲F... // 2) GH......AB.̲.̲.̲CDEF - move(from: limSlot.advanced(by: -gapSize), to: .zero, count: newEnd.position) - move(from: gapStart, to: gapEnd, count: tailCount - newEnd.position) + unsafe move(from: limSlot.advanced(by: -gapSize), to: .zero, count: newEnd.position) + unsafe move(from: gapStart, to: gapEnd, count: tailCount - newEnd.position) } else { // The rest of the items are wrapped, and will remain so. @@ -529,23 +530,23 @@ extension _Deque._UnsafeHandle { // 1) ...GH......AB̲C̲D̲EF // 2) DEFGH......AB̲C̲.̲.. // 3) DEFGH......A.̲.̲.̲BC - move(from: .zero, to: Slot.zero.advanced(by: gapSize), count: originalEnd.position) - move(from: limSlot.advanced(by: -gapSize), to: .zero, count: gapSize) - move(from: gapStart, to: gapEnd, count: tailCount - gapSize - originalEnd.position) + unsafe move(from: .zero, to: Slot.zero.advanced(by: gapSize), count: originalEnd.position) + unsafe move(from: limSlot.advanced(by: -gapSize), to: .zero, count: gapSize) + unsafe move(from: gapStart, to: gapEnd, count: tailCount - gapSize - originalEnd.position) } - count += gapSize - return mutableWrappedBuffer(between: gapStart, and: gapEnd.orIfZero(capacity)) + unsafe count += gapSize + return unsafe mutableWrappedBuffer(between: gapStart, and: gapEnd.orIfZero(capacity)) } // Open the gap by sliding elements to the left. - let originalStart = self.startSlot - let newStart = self.slot(originalStart, offsetBy: -gapSize) - let gapEnd = self.slot(forOffset: offset) - let gapStart = self.slot(gapEnd, offsetBy: -gapSize) + let originalStart = unsafe self.startSlot + let newStart = unsafe self.slot(originalStart, offsetBy: -gapSize) + let gapEnd = unsafe self.slot(forOffset: offset) + let gapStart = unsafe self.slot(gapEnd, offsetBy: -gapSize) - let sourceIsContiguous = originalStart <= gapEnd.orIfZero(capacity) - let targetIsContiguous = newStart <= gapStart.orIfZero(capacity) + let sourceIsContiguous = unsafe originalStart <= gapEnd.orIfZero(capacity) + let targetIsContiguous = unsafe newStart <= gapStart.orIfZero(capacity) if sourceIsContiguous && targetIsContiguous { // No need to deal with any wrapping. @@ -554,7 +555,7 @@ extension _Deque._UnsafeHandle { // // 0) ....A̲B̲C̲DEFGH... GH.........̲A̲B̲CDEF .̲A̲B̲CDEFGH.......̲.̲ // 1) .ABC.̲.̲.̲DEFGH... GH......AB.̲.̲.̲CDEF .̲.̲.̲CDEFGH....AB.̲.̲ - move(from: originalStart, to: newStart, count: headCount) + unsafe move(from: originalStart, to: newStart, count: headCount) } else if targetIsContiguous { // The gap itself will be wrapped. @@ -564,8 +565,8 @@ extension _Deque._UnsafeHandle { // 1) C̲D̲EFGH.....AB...̲.̲ // 2) .̲.̲EFGH.....ABCD.̲.̲ assert(originalStart >= newStart) - move(from: originalStart, to: newStart, count: capacity - originalStart.position) - move(from: .zero, to: limSlot.advanced(by: -gapSize), count: gapEnd.position) + unsafe move(from: originalStart, to: newStart, count: capacity - originalStart.position) + unsafe move(from: .zero, to: limSlot.advanced(by: -gapSize), count: gapEnd.position) } else if sourceIsContiguous { // Opening the gap pushes preceding elements across the wrap. @@ -574,8 +575,8 @@ extension _Deque._UnsafeHandle { // 0) .AB̲C̲D̲EFGH......... // 1) ...̲C̲D̲EFGH.......AB // 2) CD.̲.̲.̲EFGH.......AB - move(from: originalStart, to: newStart, count: capacity - newStart.position) - move(from: Slot.zero.advanced(by: gapSize), to: .zero, count: gapStart.position) + unsafe move(from: originalStart, to: newStart, count: capacity - newStart.position) + unsafe move(from: Slot.zero.advanced(by: gapSize), to: .zero, count: gapStart.position) } else { // The preceding of the items are wrapped, and will remain so. @@ -584,13 +585,13 @@ extension _Deque._UnsafeHandle { // 1) CD̲E̲F̲GHIJKL......AB... // 2) ..̲.̲F̲GHIJKL......ABCDE // 3) F.̲.̲.̲GHIJKL......ABCDE - move(from: originalStart, to: newStart, count: capacity - originalStart.position) - move(from: .zero, to: limSlot.advanced(by: -gapSize), count: gapSize) - move(from: Slot.zero.advanced(by: gapSize), to: .zero, count: gapStart.position) + unsafe move(from: originalStart, to: newStart, count: capacity - originalStart.position) + unsafe move(from: .zero, to: limSlot.advanced(by: -gapSize), count: gapSize) + unsafe move(from: Slot.zero.advanced(by: gapSize), to: .zero, count: gapStart.position) } - startSlot = newStart - count += gapSize - return mutableWrappedBuffer(between: gapStart, and: gapEnd.orIfZero(capacity)) + unsafe startSlot = newStart + unsafe count += gapSize + return unsafe mutableWrappedBuffer(between: gapStart, and: gapEnd.orIfZero(capacity)) } } @@ -598,40 +599,40 @@ extension _Deque._UnsafeHandle { extension _Deque._UnsafeHandle { internal func uncheckedRemoveFirst() -> Element { - assertMutable() - assert(count > 0) - let result = ptr(at: startSlot).move() - startSlot = slot(after: startSlot) - count -= 1 + unsafe assertMutable() + unsafe assert(count > 0) + let result = unsafe ptr(at: startSlot).move() + unsafe startSlot = unsafe slot(after: startSlot) + unsafe count -= 1 return result } internal func uncheckedRemoveLast() -> Element { - assertMutable() - assert(count > 0) - let slot = self.slot(forOffset: count - 1) - let result = ptr(at: slot).move() - count -= 1 + unsafe assertMutable() + unsafe assert(count > 0) + let slot = unsafe self.slot(forOffset: count - 1) + let result = unsafe ptr(at: slot).move() + unsafe count -= 1 return result } internal func uncheckedRemoveFirst(_ n: Int) { - assertMutable() - assert(count >= n) + unsafe assertMutable() + unsafe assert(count >= n) guard n > 0 else { return } - let target = mutableSegments(forOffsets: 0 ..< n) - target.deinitialize() - startSlot = slot(startSlot, offsetBy: n) - count -= n + let target = unsafe mutableSegments(forOffsets: 0 ..< n) + unsafe target.deinitialize() + unsafe startSlot = unsafe slot(startSlot, offsetBy: n) + unsafe count -= n } internal func uncheckedRemoveLast(_ n: Int) { - assertMutable() - assert(count >= n) + unsafe assertMutable() + unsafe assert(count >= n) guard n > 0 else { return } - let target = mutableSegments(forOffsets: count - n ..< count) - target.deinitialize() - count -= n + let target = unsafe mutableSegments(forOffsets: count - n ..< count) + unsafe target.deinitialize() + unsafe count -= n } /// Remove all elements stored in this instance, deinitializing their storage. @@ -639,12 +640,12 @@ extension _Deque._UnsafeHandle { /// This method does not ensure that the storage buffer is uniquely /// referenced. internal func uncheckedRemoveAll() { - assertMutable() - guard count > 0 else { return } - let target = mutableSegments() - target.deinitialize() - count = 0 - startSlot = .zero + unsafe assertMutable() + guard unsafe count > 0 else { return } + let target = unsafe mutableSegments() + unsafe target.deinitialize() + unsafe count = 0 + unsafe startSlot = .zero } /// Remove all elements in `bounds`, deinitializing their storage and sliding @@ -653,12 +654,12 @@ extension _Deque._UnsafeHandle { /// This function does not validate its input arguments in release builds. Nor /// does it ensure that the storage buffer is uniquely referenced. internal func uncheckedRemove(offsets bounds: Range) { - assertMutable() - assert(bounds.lowerBound >= 0 && bounds.upperBound <= self.count) + unsafe assertMutable() + unsafe assert(bounds.lowerBound >= 0 && bounds.upperBound <= self.count) // Deinitialize elements in `bounds`. - mutableSegments(forOffsets: bounds).deinitialize() - closeGap(offsets: bounds) + unsafe mutableSegments(forOffsets: bounds).deinitialize() + unsafe closeGap(offsets: bounds) } /// Close the gap of already uninitialized elements in `bounds`, sliding @@ -667,24 +668,24 @@ extension _Deque._UnsafeHandle { /// This function does not validate its input arguments in release builds. Nor /// does it ensure that the storage buffer is uniquely referenced. internal func closeGap(offsets bounds: Range) { - assertMutable() - assert(bounds.lowerBound >= 0 && bounds.upperBound <= self.count) + unsafe assertMutable() + unsafe assert(bounds.lowerBound >= 0 && bounds.upperBound <= self.count) let gapSize = bounds.count guard gapSize > 0 else { return } - let gapStart = self.slot(forOffset: bounds.lowerBound) - let gapEnd = self.slot(forOffset: bounds.upperBound) + let gapStart = unsafe self.slot(forOffset: bounds.lowerBound) + let gapEnd = unsafe self.slot(forOffset: bounds.upperBound) let headCount = bounds.lowerBound - let tailCount = count - bounds.upperBound + let tailCount = unsafe count - bounds.upperBound if headCount >= tailCount { // Close the gap by sliding elements to the left. - let originalEnd = endSlot - let newEnd = self.slot(forOffset: count - gapSize) + let originalEnd = unsafe endSlot + let newEnd = unsafe self.slot(forOffset: count - gapSize) - let sourceIsContiguous = gapEnd < originalEnd.orIfZero(capacity) - let targetIsContiguous = gapStart <= newEnd.orIfZero(capacity) + let sourceIsContiguous = unsafe gapEnd < originalEnd.orIfZero(capacity) + let targetIsContiguous = unsafe gapStart <= newEnd.orIfZero(capacity) if tailCount == 0 { // No need to move any elements. } else if sourceIsContiguous && targetIsContiguous { @@ -692,17 +693,17 @@ extension _Deque._UnsafeHandle { // 0) ....ABCD.̲.̲.̲EFGH.. EF.̲.̲.̲GH........ABCD .̲.̲.̲E..........ABCD.̲.̲ .̲.̲.̲EF........ABCD .̲.̲.̲DE.......ABC // 1) ....ABCDE̲F̲G̲H..... EFG̲H̲.̲..........ABCD .̲.̲.̲...........ABCDE̲.̲ E̲F̲.̲..........ABCD D̲E̲.̲.........ABC - move(from: gapEnd, to: gapStart, count: tailCount) + unsafe move(from: gapEnd, to: gapStart, count: tailCount) } else if sourceIsContiguous { // The gap lies across the wrap from the subsequent elements. // 0) .̲.̲.̲EFGH.......ABCD.̲.̲ EFGH.......ABCD.̲.̲.̲ // 1) .̲.̲.̲..GH.......ABCDE̲F̲ ..GH.......ABCDE̲F̲G̲ // 2) G̲H̲.̲...........ABCDE̲F̲ GH.........ABCDE̲F̲G̲ - let c = capacity - gapStart.position + let c = unsafe capacity - gapStart.position assert(tailCount > c) - let next = move(from: gapEnd, to: gapStart, count: c) - move(from: next.source, to: .zero, count: tailCount - c) + let next = unsafe move(from: gapEnd, to: gapStart, count: c) + unsafe move(from: next.source, to: .zero, count: tailCount - c) } else if targetIsContiguous { // We need to move elements across a wrap, but the wrap will // disappear when we're done. @@ -710,8 +711,8 @@ extension _Deque._UnsafeHandle { // 0) HI....ABCDE.̲.̲.̲FG // 1) HI....ABCDEF̲G̲.̲.. // 2) ......ABCDEF̲G̲H̲I. - let next = move(from: gapEnd, to: gapStart, count: capacity - gapEnd.position) - move(from: .zero, to: next.target, count: originalEnd.position) + let next = unsafe move(from: gapEnd, to: gapStart, count: capacity - gapEnd.position) + unsafe move(from: .zero, to: next.target, count: originalEnd.position) } else { // We need to move elements across a wrap that won't go away. @@ -719,18 +720,18 @@ extension _Deque._UnsafeHandle { // 1) HIJKL....ABCDEF̲G̲.̲.. // 2) ...KL....ABCDEF̲G̲H̲IJ // 3) KL.......ABCDEF̲G̲H̲IJ - var next = move(from: gapEnd, to: gapStart, count: capacity - gapEnd.position) - next = move(from: .zero, to: next.target, count: gapSize) - move(from: next.source, to: .zero, count: newEnd.position) + var next = unsafe move(from: gapEnd, to: gapStart, count: capacity - gapEnd.position) + next = unsafe move(from: .zero, to: next.target, count: gapSize) + unsafe move(from: next.source, to: .zero, count: newEnd.position) } - count -= gapSize + unsafe count -= gapSize } else { // Close the gap by sliding elements to the right. - let originalStart = startSlot - let newStart = slot(startSlot, offsetBy: gapSize) + let originalStart = unsafe startSlot + let newStart = unsafe slot(startSlot, offsetBy: gapSize) - let sourceIsContiguous = originalStart < gapStart.orIfZero(capacity) - let targetIsContiguous = newStart <= gapEnd.orIfZero(capacity) + let sourceIsContiguous = unsafe originalStart < gapStart.orIfZero(capacity) + let targetIsContiguous = unsafe newStart <= gapEnd.orIfZero(capacity) if headCount == 0 { // No need to move any elements. @@ -739,15 +740,15 @@ extension _Deque._UnsafeHandle { // 0) ....ABCD.̲.̲.̲EFGH..... EFGH........AB.̲.̲.̲CD .̲.̲.̲CDEFGH.......AB.̲.̲ DEFGH.......ABC.̲.̲ // 1) .......AB̲C̲D̲EFGH..... EFGH...........̲A̲B̲CD .̲A̲B̲CDEFGH..........̲.̲ DEFGH.........AB̲C̲ ABCDEFGH........̲.̲.̲ - move(from: originalStart, to: newStart, count: headCount) + unsafe move(from: originalStart, to: newStart, count: headCount) } else if sourceIsContiguous { // The gap lies across the wrap from the preceding elements. // 0) .̲.̲DEFGH.......ABC.̲.̲ .̲.̲.̲EFGH.......ABCD // 1) B̲C̲DEFGH.......A...̲.̲ B̲C̲D̲DEFGH......A... // 2) B̲C̲DEFGH...........̲A̲ B̲C̲D̲DEFGH.........A - move(from: limSlot.advanced(by: -gapSize), to: .zero, count: gapEnd.position) - move(from: startSlot, to: newStart, count: headCount - gapEnd.position) + unsafe move(from: limSlot.advanced(by: -gapSize), to: .zero, count: gapEnd.position) + unsafe move(from: startSlot, to: newStart, count: headCount - gapEnd.position) } else if targetIsContiguous { // We need to move elements across a wrap, but the wrap will // disappear when we're done. @@ -755,20 +756,20 @@ extension _Deque._UnsafeHandle { // 0) CD.̲.̲.̲EFGHI.....AB // 1) ...̲C̲D̲EFGHI.....AB // 1) .AB̲C̲D̲EFGHI....... - move(from: .zero, to: gapEnd.advanced(by: -gapStart.position), count: gapStart.position) - move(from: startSlot, to: newStart, count: headCount - gapStart.position) + unsafe move(from: .zero, to: gapEnd.advanced(by: -gapStart.position), count: gapStart.position) + unsafe move(from: startSlot, to: newStart, count: headCount - gapStart.position) } else { // We need to move elements across a wrap that won't go away. // 0) FG.̲.̲.̲HIJKLMNO....ABCDE // 1) ...̲F̲G̲HIJKLMNO....ABCDE // 2) CDE̲F̲G̲HIJKLMNO....AB... // 3) CDE̲F̲G̲HIJKLMNO.......AB - move(from: .zero, to: Slot.zero.advanced(by: gapSize), count: gapStart.position) - move(from: limSlot.advanced(by: -gapSize), to: .zero, count: gapSize) - move(from: startSlot, to: newStart, count: headCount - gapEnd.position) + unsafe move(from: .zero, to: Slot.zero.advanced(by: gapSize), count: gapStart.position) + unsafe move(from: limSlot.advanced(by: -gapSize), to: .zero, count: gapSize) + unsafe move(from: startSlot, to: newStart, count: headCount - gapEnd.position) } - startSlot = newStart - count -= gapSize + unsafe startSlot = newStart + unsafe count -= gapSize } } } diff --git a/stdlib/public/Concurrency/Deque/UnsafeMutableBufferPointer+Utilities.swift b/stdlib/public/Concurrency/Deque/UnsafeMutableBufferPointer+Utilities.swift index fffc9f3f81fac..91e3a0fd4fbcd 100644 --- a/stdlib/public/Concurrency/Deque/UnsafeMutableBufferPointer+Utilities.swift +++ b/stdlib/public/Concurrency/Deque/UnsafeMutableBufferPointer+Utilities.swift @@ -17,14 +17,14 @@ import Swift extension Collection { internal func _rebased() -> UnsafeBufferPointer where Self == UnsafeBufferPointer.SubSequence { - .init(rebasing: self) + unsafe .init(rebasing: self) } } extension Collection { internal func _rebased() -> UnsafeMutableBufferPointer where Self == UnsafeMutableBufferPointer.SubSequence { - .init(rebasing: self) + unsafe .init(rebasing: self) } } @@ -32,28 +32,28 @@ extension UnsafeMutableBufferPointer { internal func _initialize(from source: UnsafeBufferPointer) { assert(source.count == count) guard source.count > 0 else { return } - baseAddress!.initialize(from: source.baseAddress!, count: source.count) + unsafe baseAddress!.initialize(from: source.baseAddress!, count: source.count) } internal func _initialize( from elements: C ) where C.Element == Element { assert(elements.count == count) - var (it, copied) = elements._copyContents(initializing: self) + var (it, copied) = unsafe elements._copyContents(initializing: self) precondition(copied == count) precondition(it.next() == nil) } internal func _deinitializeAll() { guard count > 0 else { return } - baseAddress!.deinitialize(count: count) + unsafe baseAddress!.deinitialize(count: count) } internal func _assign( from replacement: C ) where C.Element == Element { guard self.count > 0 else { return } - self[0 ..< count]._rebased()._deinitializeAll() - _initialize(from: replacement) + unsafe self[0 ..< count]._rebased()._deinitializeAll() + unsafe _initialize(from: replacement) } } diff --git a/stdlib/public/Concurrency/Deque/_DequeBuffer.swift b/stdlib/public/Concurrency/Deque/_DequeBuffer.swift index bb2c98f6edaa0..7edbce2b37ba0 100644 --- a/stdlib/public/Concurrency/Deque/_DequeBuffer.swift +++ b/stdlib/public/Concurrency/Deque/_DequeBuffer.swift @@ -16,19 +16,19 @@ import Swift internal class _DequeBuffer: ManagedBuffer<_DequeBufferHeader, Element> { deinit { - self.withUnsafeMutablePointers { header, elements in - header.pointee._checkInvariants() + unsafe self.withUnsafeMutablePointers { header, elements in + unsafe header.pointee._checkInvariants() - let capacity = header.pointee.capacity - let count = header.pointee.count - let startSlot = header.pointee.startSlot + let capacity = unsafe header.pointee.capacity + let count = unsafe header.pointee.count + let startSlot = unsafe header.pointee.startSlot if startSlot.position + count <= capacity { - (elements + startSlot.position).deinitialize(count: count) + unsafe (elements + startSlot.position).deinitialize(count: count) } else { let firstRegion = capacity - startSlot.position - (elements + startSlot.position).deinitialize(count: firstRegion) - elements.deinitialize(count: count - firstRegion) + unsafe (elements + startSlot.position).deinitialize(count: firstRegion) + unsafe elements.deinitialize(count: count - firstRegion) } } } @@ -36,7 +36,7 @@ internal class _DequeBuffer: ManagedBuffer<_DequeBufferHeader, Element> extension _DequeBuffer: CustomStringConvertible { internal var description: String { - withUnsafeMutablePointerToHeader { "_DequeStorage<\(Element.self)>\($0.pointee)" } + unsafe withUnsafeMutablePointerToHeader { "_DequeStorage<\(Element.self)>\(unsafe $0.pointee)" } } } diff --git a/stdlib/public/Concurrency/Deque/_UnsafeWrappedBuffer.swift b/stdlib/public/Concurrency/Deque/_UnsafeWrappedBuffer.swift index ec5eb9f4e44a6..77f610e9c564b 100644 --- a/stdlib/public/Concurrency/Deque/_UnsafeWrappedBuffer.swift +++ b/stdlib/public/Concurrency/Deque/_UnsafeWrappedBuffer.swift @@ -14,6 +14,7 @@ import Swift +@unsafe internal struct _UnsafeWrappedBuffer { internal let first: UnsafeBufferPointer @@ -23,16 +24,16 @@ internal struct _UnsafeWrappedBuffer { _ first: UnsafeBufferPointer, _ second: UnsafeBufferPointer? = nil ) { - self.first = first - self.second = second - assert(first.count > 0 || second == nil) + unsafe self.first = unsafe first + unsafe self.second = unsafe second + unsafe assert(first.count > 0 || second == nil) } internal init( start: UnsafePointer, count: Int ) { - self.init(UnsafeBufferPointer(start: start, count: count)) + unsafe self.init(UnsafeBufferPointer(start: start, count: count)) } internal init( @@ -41,13 +42,14 @@ internal struct _UnsafeWrappedBuffer { second start2: UnsafePointer, count count2: Int ) { - self.init(UnsafeBufferPointer(start: start1, count: count1), + unsafe self.init(UnsafeBufferPointer(start: start1, count: count1), UnsafeBufferPointer(start: start2, count: count2)) } - internal var count: Int { first.count + (second?.count ?? 0) } + internal var count: Int { unsafe first.count + (second?.count ?? 0) } } +@unsafe internal struct _UnsafeMutableWrappedBuffer { internal let first: UnsafeMutableBufferPointer @@ -57,16 +59,16 @@ internal struct _UnsafeMutableWrappedBuffer { _ first: UnsafeMutableBufferPointer, _ second: UnsafeMutableBufferPointer? = nil ) { - self.first = first - self.second = second?.count == 0 ? nil : second - assert(first.count > 0 || second == nil) + unsafe self.first = unsafe first + unsafe self.second = unsafe second?.count == 0 ? nil : second + unsafe assert(first.count > 0 || second == nil) } internal init( start: UnsafeMutablePointer, count: Int ) { - self.init(UnsafeMutableBufferPointer(start: start, count: count)) + unsafe self.init(UnsafeMutableBufferPointer(start: start, count: count)) } internal init( @@ -75,49 +77,49 @@ internal struct _UnsafeMutableWrappedBuffer { second start2: UnsafeMutablePointer, count count2: Int ) { - self.init(UnsafeMutableBufferPointer(start: start1, count: count1), + unsafe self.init(UnsafeMutableBufferPointer(start: start1, count: count1), UnsafeMutableBufferPointer(start: start2, count: count2)) } internal init(mutating buffer: _UnsafeWrappedBuffer) { - self.init(.init(mutating: buffer.first), - buffer.second.map { .init(mutating: $0) }) + unsafe self.init(.init(mutating: buffer.first), + buffer.second.map { unsafe .init(mutating: $0) }) } } extension _UnsafeMutableWrappedBuffer { - internal var count: Int { first.count + (second?.count ?? 0) } + internal var count: Int { unsafe first.count + (second?.count ?? 0) } internal func prefix(_ n: Int) -> Self { assert(n >= 0) - if n >= self.count { - return self + if unsafe n >= self.count { + return unsafe self } - if n <= first.count { - return Self(first.prefix(n)._rebased()) + if unsafe n <= first.count { + return unsafe Self(first.prefix(n)._rebased()) } - return Self(first, second!.prefix(n - first.count)._rebased()) + return unsafe Self(first, second!.prefix(n - first.count)._rebased()) } internal func suffix(_ n: Int) -> Self { assert(n >= 0) - if n >= self.count { - return self + if unsafe n >= self.count { + return unsafe self } - guard let second = second else { - return Self(first.suffix(n)._rebased()) + guard let second = unsafe second else { + return unsafe Self(first.suffix(n)._rebased()) } if n <= second.count { - return Self(second.suffix(n)._rebased()) + return unsafe Self(second.suffix(n)._rebased()) } - return Self(first.suffix(n - second.count)._rebased(), second) + return unsafe Self(first.suffix(n - second.count)._rebased(), second) } } extension _UnsafeMutableWrappedBuffer { internal func deinitialize() { - first._deinitializeAll() - second?._deinitializeAll() + unsafe first._deinitializeAll() + unsafe second?._deinitializeAll() } internal func initialize( @@ -125,29 +127,29 @@ extension _UnsafeMutableWrappedBuffer { ) -> Int where I.Element == Element { var copied = 0 - var gap = first + var gap = unsafe first var wrapped = false while true { if copied == gap.count { - guard !wrapped, let second = second, second.count > 0 else { break } - gap = second + guard !wrapped, let second = unsafe second, second.count > 0 else { break } + unsafe gap = unsafe second copied = 0 wrapped = true } guard let next = iterator.next() else { break } - (gap.baseAddress! + copied).initialize(to: next) + unsafe (gap.baseAddress! + copied).initialize(to: next) copied += 1 } - return wrapped ? first.count + copied : copied + return unsafe wrapped ? first.count + copied : copied } internal func initialize( fromSequencePrefix elements: __owned S ) -> (iterator: S.Iterator, count: Int) where S.Element == Element { - guard second == nil || first.count >= elements.underestimatedCount else { + guard unsafe second == nil || first.count >= elements.underestimatedCount else { var it = elements.makeIterator() - let copied = initialize(fromPrefixOf: &it) + let copied = unsafe initialize(fromPrefixOf: &it) return (it, copied) } // Note: Array._copyContents traps when not given enough space, so we @@ -155,12 +157,12 @@ extension _UnsafeMutableWrappedBuffer { // // FIXME: Add support for segmented (a.k.a. piecewise contiguous) // collections to the stdlib. - var (it, copied) = elements._copyContents(initializing: first) - if copied == first.count, let second = second { + var (it, copied) = unsafe elements._copyContents(initializing: first) + if unsafe copied == first.count, let second = unsafe second { var i = 0 while i < second.count { guard let next = it.next() else { break } - (second.baseAddress! + i).initialize(to: next) + unsafe (second.baseAddress! + i).initialize(to: next) i += 1 } copied += i @@ -171,21 +173,21 @@ extension _UnsafeMutableWrappedBuffer { internal func initialize( from elements: __owned C ) where C.Element == Element { - assert(self.count == elements.count) - if let second = second { - let wrap = elements.index(elements.startIndex, offsetBy: first.count) - first._initialize(from: elements[..( from elements: C ) where C.Element == Element { - assert(elements.count == self.count) - deinitialize() - initialize(from: elements) + unsafe assert(elements.count == self.count) + unsafe deinitialize() + unsafe initialize(from: elements) } } diff --git a/stdlib/public/Concurrency/DiscardingTaskGroup.swift b/stdlib/public/Concurrency/DiscardingTaskGroup.swift index 940757b014fda..22747c2e75d79 100644 --- a/stdlib/public/Concurrency/DiscardingTaskGroup.swift +++ b/stdlib/public/Concurrency/DiscardingTaskGroup.swift @@ -217,7 +217,7 @@ public struct DiscardingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createDiscardingTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, @@ -264,7 +264,7 @@ public struct DiscardingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createDiscardingTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, @@ -286,7 +286,7 @@ public struct DiscardingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createDiscardingTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, @@ -322,7 +322,7 @@ public struct DiscardingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createDiscardingTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, @@ -640,7 +640,7 @@ public struct ThrowingDiscardingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createDiscardingTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, @@ -671,7 +671,7 @@ public struct ThrowingDiscardingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createDiscardingTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, diff --git a/stdlib/public/Concurrency/Executor.swift b/stdlib/public/Concurrency/Executor.swift index be4e46c95d983..8b8229566f0a1 100644 --- a/stdlib/public/Concurrency/Executor.swift +++ b/stdlib/public/Concurrency/Executor.swift @@ -292,7 +292,7 @@ extension Executor { extension SerialExecutor { @available(SwiftStdlib 5.9, *) public func asUnownedSerialExecutor() -> UnownedSerialExecutor { - UnownedSerialExecutor(ordinary: self) + unsafe UnownedSerialExecutor(ordinary: self) } } @@ -328,17 +328,17 @@ public struct UnownedSerialExecutor: Sendable { /// which needs to reach for this from an @_transparent function which prevents @_spi use. @available(SwiftStdlib 5.9, *) public var _executor: Builtin.Executor { - self.executor + unsafe self.executor } @inlinable public init(_ executor: Builtin.Executor) { - self.executor = executor + unsafe self.executor = executor } @inlinable public init(ordinary executor: __shared E) { - self.executor = Builtin.buildOrdinarySerialExecutorRef(executor) + unsafe self.executor = Builtin.buildOrdinarySerialExecutorRef(executor) } /// Opts the executor into complex "same exclusive execution context" equality checks. @@ -354,13 +354,13 @@ public struct UnownedSerialExecutor: Sendable { @available(SwiftStdlib 5.9, *) @inlinable public init(complexEquality executor: __shared E) { - self.executor = Builtin.buildComplexEqualitySerialExecutorRef(executor) + unsafe self.executor = Builtin.buildComplexEqualitySerialExecutorRef(executor) } @_spi(ConcurrencyExecutors) @available(SwiftStdlib 5.9, *) public var _isComplexEquality: Bool { - _executor_isComplexEquality(self) + unsafe _executor_isComplexEquality(self) } } @@ -396,7 +396,7 @@ public struct UnownedTaskExecutor: Sendable { extension UnownedTaskExecutor: Equatable { @inlinable public static func == (_ lhs: UnownedTaskExecutor, _ rhs: UnownedTaskExecutor) -> Bool { - unsafeBitCast(lhs.executor, to: (Int, Int).self) == unsafeBitCast(rhs.executor, to: (Int, Int).self) + unsafe unsafeBitCast(lhs.executor, to: (Int, Int).self) == unsafeBitCast(rhs.executor, to: (Int, Int).self) } } @@ -473,7 +473,7 @@ internal func _task_serialExecutor_checkIsolated(executor: E) @_silgen_name("_task_serialExecutor_getExecutorRef") internal func _task_serialExecutor_getExecutorRef(_ executor: E) -> Builtin.Executor where E: SerialExecutor { - return executor.asUnownedSerialExecutor().executor + return unsafe executor.asUnownedSerialExecutor().executor } /// Obtain the executor ref by calling the executor's `asUnownedTaskExecutor()`. @@ -537,7 +537,7 @@ internal final class DispatchQueueShim: @unchecked Sendable, SerialExecutor { } func asUnownedSerialExecutor() -> UnownedSerialExecutor { - return UnownedSerialExecutor(ordinary: self) + return unsafe UnownedSerialExecutor(ordinary: self) } } #endif // SWIFT_CONCURRENCY_USES_DISPATCH diff --git a/stdlib/public/Concurrency/ExecutorAssertions.swift b/stdlib/public/Concurrency/ExecutorAssertions.swift index 9ba1411abc49b..79143faad9c62 100644 --- a/stdlib/public/Concurrency/ExecutorAssertions.swift +++ b/stdlib/public/Concurrency/ExecutorAssertions.swift @@ -57,7 +57,7 @@ extension SerialExecutor { return } - let expectationCheck = _taskIsCurrentExecutor(self.asUnownedSerialExecutor().executor) + let expectationCheck = unsafe _taskIsCurrentExecutor(self.asUnownedSerialExecutor().executor) /// TODO: implement the logic in-place perhaps rather than delegating to precondition()? precondition(expectationCheck, @@ -109,10 +109,10 @@ extension Actor { // NOTE: This method will CRASH in new runtime versions, // if it would have previously returned `false`. // It will call through to SerialExecutor.checkIsolated` as a last resort. - let expectationCheck = _taskIsCurrentExecutor(self.unownedExecutor.executor) + let expectationCheck = unsafe _taskIsCurrentExecutor(self.unownedExecutor.executor) precondition(expectationCheck, - "Incorrect actor executor assumption; Expected '\(self.unownedExecutor)' executor. \(message())", + unsafe "Incorrect actor executor assumption; Expected '\(self.unownedExecutor)' executor. \(message())", file: file, line: line) } } @@ -198,7 +198,7 @@ extension SerialExecutor { return } - guard _taskIsCurrentExecutor(self.asUnownedSerialExecutor().executor) else { + guard unsafe _taskIsCurrentExecutor(self.asUnownedSerialExecutor().executor) else { // TODO: offer information which executor we actually got let msg = "Incorrect actor executor assumption; Expected '\(self)' executor. \(message())" /// TODO: implement the logic in-place perhaps rather than delegating to precondition()? @@ -247,8 +247,8 @@ extension Actor { return } - guard _taskIsCurrentExecutor(self.unownedExecutor.executor) else { - let msg = "Incorrect actor executor assumption; Expected '\(self.unownedExecutor)' executor. \(message())" + guard unsafe _taskIsCurrentExecutor(self.unownedExecutor.executor) else { + let msg = unsafe "Incorrect actor executor assumption; Expected '\(self.unownedExecutor)' executor. \(message())" /// TODO: implement the logic in-place perhaps rather than delegating to precondition()? assertionFailure(msg, file: file, line: line) // short-cut so we get the exact same failure reporting semantics return @@ -354,7 +354,7 @@ extension Actor { /// This is guaranteed to be fatal if the check fails, /// as this is our "safe" version of this API. - let executor: Builtin.Executor = self.unownedExecutor.executor + let executor: Builtin.Executor = unsafe self.unownedExecutor.executor guard _taskIsCurrentExecutor(executor) else { // TODO: offer information which executor we actually got fatalError("Incorrect actor executor assumption; Expected same executor as \(self).", file: file, line: line) @@ -363,7 +363,7 @@ extension Actor { // To do the unsafe cast, we have to pretend it's @escaping. return try withoutActuallyEscaping(operation) { (_ fn: @escaping YesActor) throws -> T in - let rawFn = unsafeBitCast(fn, to: NoActor.self) + let rawFn = unsafe unsafeBitCast(fn, to: NoActor.self) return try rawFn(self) } } diff --git a/stdlib/public/Concurrency/GlobalActor.swift b/stdlib/public/Concurrency/GlobalActor.swift index 66a9f3b0ae7d9..5570bd016f8a6 100644 --- a/stdlib/public/Concurrency/GlobalActor.swift +++ b/stdlib/public/Concurrency/GlobalActor.swift @@ -77,7 +77,7 @@ public protocol GlobalActor { @available(SwiftStdlib 5.1, *) extension GlobalActor { public static var sharedUnownedExecutor: UnownedSerialExecutor { - shared.unownedExecutor + unsafe shared.unownedExecutor } } diff --git a/stdlib/public/Concurrency/MainActor.swift b/stdlib/public/Concurrency/MainActor.swift index b3142a953ffef..232de7bc17fb7 100644 --- a/stdlib/public/Concurrency/MainActor.swift +++ b/stdlib/public/Concurrency/MainActor.swift @@ -44,12 +44,12 @@ import Swift @inlinable public nonisolated var unownedExecutor: UnownedSerialExecutor { - return UnownedSerialExecutor(Builtin.buildMainActorExecutorRef()) + return unsafe UnownedSerialExecutor(Builtin.buildMainActorExecutorRef()) } @inlinable public static var sharedUnownedExecutor: UnownedSerialExecutor { - return UnownedSerialExecutor(Builtin.buildMainActorExecutorRef()) + return unsafe UnownedSerialExecutor(Builtin.buildMainActorExecutorRef()) } @inlinable @@ -134,7 +134,7 @@ extension MainActor { /// This is guaranteed to be fatal if the check fails, /// as this is our "safe" version of this API. - let executor: Builtin.Executor = Self.shared.unownedExecutor.executor + let executor: Builtin.Executor = unsafe Self.shared.unownedExecutor.executor guard _taskIsCurrentExecutor(executor) else { // TODO: offer information which executor we actually got fatalError("Incorrect actor executor assumption; Expected same executor as \(self).", file: file, line: line) @@ -143,7 +143,7 @@ extension MainActor { // To do the unsafe cast, we have to pretend it's @escaping. return try withoutActuallyEscaping(operation) { (_ fn: @escaping YesActor) throws -> T in - let rawFn = unsafeBitCast(fn, to: NoActor.self) + let rawFn = unsafe unsafeBitCast(fn, to: NoActor.self) return try rawFn() } } diff --git a/stdlib/public/Concurrency/PartialAsyncTask.swift b/stdlib/public/Concurrency/PartialAsyncTask.swift index 1b9404d345f9a..977ebe82f63ef 100644 --- a/stdlib/public/Concurrency/PartialAsyncTask.swift +++ b/stdlib/public/Concurrency/PartialAsyncTask.swift @@ -89,7 +89,7 @@ public struct UnownedJob: Sendable { @inlinable @available(*, deprecated, renamed: "ExecutorJob.runSynchronously(on:)") public func _runSynchronously(on executor: UnownedSerialExecutor) { - _swiftJobRun(self, executor) + unsafe _swiftJobRun(self, executor) } /// Run this job on the passed in executor. @@ -105,7 +105,7 @@ public struct UnownedJob: Sendable { @_alwaysEmitIntoClient @inlinable public func runSynchronously(on executor: UnownedSerialExecutor) { - _swiftJobRun(self, executor) + unsafe _swiftJobRun(self, executor) } /// Run this job isolated to the passed task executor. @@ -159,7 +159,7 @@ public struct UnownedJob: Sendable { @inlinable public func runSynchronously(isolatedTo serialExecutor: UnownedSerialExecutor, taskExecutor: UnownedTaskExecutor) { - _swiftJobRunOnTaskExecutor(self, serialExecutor, taskExecutor) + unsafe _swiftJobRunOnTaskExecutor(self, serialExecutor, taskExecutor) } } @@ -250,7 +250,7 @@ extension Job { @_alwaysEmitIntoClient @inlinable __consuming public func runSynchronously(on executor: UnownedSerialExecutor) { - _swiftJobRun(UnownedJob(self), executor) + unsafe _swiftJobRun(UnownedJob(self), executor) } } @@ -318,7 +318,7 @@ extension ExecutorJob { @_alwaysEmitIntoClient @inlinable __consuming public func runSynchronously(on executor: UnownedSerialExecutor) { - _swiftJobRun(UnownedJob(self), executor) + unsafe _swiftJobRun(UnownedJob(self), executor) } /// Run this job on the passed in task executor. @@ -367,7 +367,7 @@ extension ExecutorJob { @inlinable __consuming public func runSynchronously(isolatedTo serialExecutor: UnownedSerialExecutor, taskExecutor: UnownedTaskExecutor) { - _swiftJobRunOnTaskExecutor(UnownedJob(self), serialExecutor, taskExecutor) + unsafe _swiftJobRunOnTaskExecutor(UnownedJob(self), serialExecutor, taskExecutor) } } #endif // !SWIFT_STDLIB_TASK_TO_THREAD_MODEL_CONCURRENCY @@ -485,7 +485,7 @@ public struct UnsafeContinuation: Sendable { @_alwaysEmitIntoClient internal init(_ context: Builtin.RawUnsafeContinuation) { - self.context = context + unsafe self.context = context } /// Resume the task that's awaiting the continuation @@ -504,7 +504,7 @@ public struct UnsafeContinuation: Sendable { @_alwaysEmitIntoClient public func resume(returning value: sending T) where E == Never { #if compiler(>=5.5) && $BuiltinContinuation - Builtin.resumeNonThrowingContinuationReturning(context, value) + unsafe Builtin.resumeNonThrowingContinuationReturning(context, value) #else fatalError("Swift compiler is incompatible with this SDK version") #endif @@ -526,7 +526,7 @@ public struct UnsafeContinuation: Sendable { @_alwaysEmitIntoClient public func resume(returning value: sending T) { #if compiler(>=5.5) && $BuiltinContinuation - Builtin.resumeThrowingContinuationReturning(context, value) + unsafe Builtin.resumeThrowingContinuationReturning(context, value) #else fatalError("Swift compiler is incompatible with this SDK version") #endif @@ -548,7 +548,7 @@ public struct UnsafeContinuation: Sendable { @_alwaysEmitIntoClient public func resume(throwing error: consuming E) { #if compiler(>=5.5) && $BuiltinContinuation - Builtin.resumeThrowingContinuationThrowing(context, error) + unsafe Builtin.resumeThrowingContinuationThrowing(context, error) #else fatalError("Swift compiler is incompatible with this SDK version") #endif @@ -577,9 +577,9 @@ extension UnsafeContinuation { public func resume(with result: __shared sending Result) where E == Error { switch result { case .success(let val): - self.resume(returning: val) + unsafe self.resume(returning: val) case .failure(let err): - self.resume(throwing: err) + unsafe self.resume(throwing: err) } } @@ -603,9 +603,9 @@ extension UnsafeContinuation { public func resume(with result: __shared sending Result) { switch result { case .success(let val): - self.resume(returning: val) + unsafe self.resume(returning: val) case .failure(let err): - self.resume(throwing: err) + unsafe self.resume(throwing: err) } } @@ -621,7 +621,7 @@ extension UnsafeContinuation { /// when its executor schedules it. @_alwaysEmitIntoClient public func resume() where T == Void { - self.resume(returning: ()) + unsafe self.resume(returning: ()) } } @@ -634,7 +634,7 @@ internal func _resumeUnsafeContinuation( _ continuation: UnsafeContinuation, _ value: sending T ) { - continuation.resume(returning: value) + unsafe continuation.resume(returning: value) } @available(SwiftStdlib 5.1, *) @@ -643,7 +643,7 @@ internal func _resumeUnsafeThrowingContinuation( _ continuation: UnsafeContinuation, _ value: sending T ) { - continuation.resume(returning: value) + unsafe continuation.resume(returning: value) } @available(SwiftStdlib 5.1, *) @@ -652,7 +652,7 @@ internal func _resumeUnsafeThrowingContinuationWithError( _ continuation: UnsafeContinuation, _ error: consuming Error ) { - continuation.resume(throwing: error) + unsafe continuation.resume(throwing: error) } #endif @@ -689,7 +689,7 @@ public func withUnsafeContinuation( _ fn: (UnsafeContinuation) -> Void ) async -> sending T { return await Builtin.withUnsafeContinuation { - fn(UnsafeContinuation($0)) + unsafe fn(UnsafeContinuation($0)) } } @@ -726,7 +726,7 @@ public func withUnsafeThrowingContinuation( _ fn: (UnsafeContinuation) -> Void ) async throws -> sending T { return try await Builtin.withUnsafeThrowingContinuation { - fn(UnsafeContinuation($0)) + unsafe fn(UnsafeContinuation($0)) } } @@ -740,7 +740,7 @@ public func _unsafeInheritExecutor_withUnsafeContinuation( _ fn: (UnsafeContinuation) -> Void ) async -> sending T { return await Builtin.withUnsafeContinuation { - fn(UnsafeContinuation($0)) + unsafe fn(UnsafeContinuation($0)) } } @@ -754,7 +754,7 @@ public func _unsafeInheritExecutor_withUnsafeThrowingContinuation( _ fn: (UnsafeContinuation) -> Void ) async throws -> sending T { return try await Builtin.withUnsafeThrowingContinuation { - fn(UnsafeContinuation($0)) + unsafe fn(UnsafeContinuation($0)) } } diff --git a/stdlib/public/Concurrency/SuspendingClock.swift b/stdlib/public/Concurrency/SuspendingClock.swift index d5bea1f990b9a..c31e043387469 100644 --- a/stdlib/public/Concurrency/SuspendingClock.swift +++ b/stdlib/public/Concurrency/SuspendingClock.swift @@ -59,7 +59,7 @@ extension SuspendingClock: Clock { public static var now: SuspendingClock.Instant { var seconds = Int64(0) var nanoseconds = Int64(0) - _getTime( + unsafe _getTime( seconds: &seconds, nanoseconds: &nanoseconds, clock: _ClockID.suspending.rawValue) @@ -73,7 +73,7 @@ extension SuspendingClock: Clock { public var minimumResolution: Swift.Duration { var seconds = Int64(0) var nanoseconds = Int64(0) - _getClockRes( + unsafe _getClockRes( seconds: &seconds, nanoseconds: &nanoseconds, clock: _ClockID.suspending.rawValue) diff --git a/stdlib/public/Concurrency/Task+PriorityEscalation.swift b/stdlib/public/Concurrency/Task+PriorityEscalation.swift index 32885d69123f4..5fc52b390fbca 100644 --- a/stdlib/public/Concurrency/Task+PriorityEscalation.swift +++ b/stdlib/public/Concurrency/Task+PriorityEscalation.swift @@ -77,7 +77,7 @@ extension UnsafeCurrentTask { /// - newPriority: the new priority the task should continue executing on @available(SwiftStdlib 6.2, *) public static func escalatePriority(_ task: UnsafeCurrentTask, to newPriority: TaskPriority) { - _taskEscalate(task._task, newPriority: newPriority.rawValue) + unsafe _taskEscalate(task._task, newPriority: newPriority.rawValue) } } @@ -116,8 +116,8 @@ public func withTaskPriorityEscalationHandler( let handler0: (UInt8) -> Void = { handler(TaskPriority(rawValue: $0)) } - let record = _taskAddPriorityEscalationHandler(handler: handler0) - defer { _taskRemovePriorityEscalationHandler(record: record) } + let record = unsafe _taskAddPriorityEscalationHandler(handler: handler0) + defer { unsafe _taskRemovePriorityEscalationHandler(record: record) } return try await operation() } \ No newline at end of file diff --git a/stdlib/public/Concurrency/Task+TaskExecutor.swift b/stdlib/public/Concurrency/Task+TaskExecutor.swift index f10493e13c1fe..4356e17045403 100644 --- a/stdlib/public/Concurrency/Task+TaskExecutor.swift +++ b/stdlib/public/Concurrency/Task+TaskExecutor.swift @@ -148,9 +148,9 @@ public func withTaskExecutorPreference( let taskExecutorBuiltin: Builtin.Executor = taskExecutor.asUnownedTaskExecutor().executor - let record = _pushTaskExecutorPreference(taskExecutorBuiltin) + let record = unsafe _pushTaskExecutorPreference(taskExecutorBuiltin) defer { - _popTaskExecutorPreference(record: record) + unsafe _popTaskExecutorPreference(record: record) } // No need to manually hop to the target executor, because as we execute @@ -179,9 +179,9 @@ public func _unsafeInheritExecutor_withTaskExecutorPreference( let taskExecutorBuiltin: Builtin.Executor = taskExecutor.asUnownedTaskExecutor().executor - let record = _pushTaskExecutorPreference(taskExecutorBuiltin) + let record = unsafe _pushTaskExecutorPreference(taskExecutorBuiltin) defer { - _popTaskExecutorPreference(record: record) + unsafe _popTaskExecutorPreference(record: record) } return try await operation() @@ -496,7 +496,7 @@ internal func _getUndefinedTaskExecutor() -> Builtin.Executor { // Rather than call into the runtime to return the // `TaskExecutorRef::undefined()`` we this information to bitcast // and return it directly. - unsafeBitCast((UInt(0), UInt(0)), to: Builtin.Executor.self) + unsafe unsafeBitCast((UInt(0), UInt(0)), to: Builtin.Executor.self) } #endif // !SWIFT_STDLIB_TASK_TO_THREAD_MODEL_CONCURRENCY diff --git a/stdlib/public/Concurrency/Task.swift b/stdlib/public/Concurrency/Task.swift index 7ba55ae85d351..74e982e7818af 100644 --- a/stdlib/public/Concurrency/Task.swift +++ b/stdlib/public/Concurrency/Task.swift @@ -261,14 +261,14 @@ extension Task where Failure == Never { @available(SwiftStdlib 5.1, *) extension Task: Hashable { public func hash(into hasher: inout Hasher) { - UnsafeRawPointer(Builtin.bridgeToRawPointer(_task)).hash(into: &hasher) + unsafe UnsafeRawPointer(Builtin.bridgeToRawPointer(_task)).hash(into: &hasher) } } @available(SwiftStdlib 5.1, *) extension Task: Equatable { public static func ==(lhs: Self, rhs: Self) -> Bool { - UnsafeRawPointer(Builtin.bridgeToRawPointer(lhs._task)) == + unsafe UnsafeRawPointer(Builtin.bridgeToRawPointer(lhs._task)) == UnsafeRawPointer(Builtin.bridgeToRawPointer(rhs._task)) } } @@ -443,10 +443,10 @@ extension Task where Success == Never, Failure == Never { /// If the system can't provide a priority, /// this property's value is `Priority.default`. public static var currentPriority: TaskPriority { - withUnsafeCurrentTask { unsafeTask in + unsafe withUnsafeCurrentTask { unsafeTask in // If we are running on behalf of a task, use that task's priority. - if let unsafeTask { - return unsafeTask.priority + if let unsafeTask = unsafe unsafeTask { + return unsafe unsafeTask.priority } // Otherwise, query the system. @@ -459,10 +459,10 @@ extension Task where Success == Never, Failure == Never { /// If you access this property outside of any task, this returns nil @available(SwiftStdlib 5.7, *) public static var basePriority: TaskPriority? { - withUnsafeCurrentTask { task in + unsafe withUnsafeCurrentTask { task in // If we are running on behalf of a task, use that task's priority. - if let unsafeTask = task { - return TaskPriority(rawValue: _taskBasePriority(unsafeTask._task)) + if let unsafeTask = unsafe task { + return unsafe TaskPriority(rawValue: _taskBasePriority(unsafeTask._task)) } return nil } @@ -674,7 +674,7 @@ extension Task where Failure == Never { // Create the asynchronous task. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor let (task, _) = Builtin.createTask(flags: flags, initialSerialExecutor: @@ -763,7 +763,7 @@ extension Task where Failure == Never { // Create the asynchronous task. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor var task: Builtin.NativeObject? #if $BuiltinCreateAsyncTaskName @@ -842,7 +842,7 @@ extension Task where Failure == Error { // Create the asynchronous task future. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor let (task, _) = Builtin.createTask(flags: flags, initialSerialExecutor: @@ -932,7 +932,7 @@ self._task = task // Create the asynchronous task. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor var task: Builtin.NativeObject? #if $BuiltinCreateAsyncTaskName @@ -1010,7 +1010,7 @@ extension Task where Failure == Never { // Create the asynchronous task future. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor let (task, _) = Builtin.createTask(flags: flags, initialSerialExecutor: @@ -1073,7 +1073,7 @@ extension Task where Failure == Never { // Create the asynchronous task. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor var task: Builtin.NativeObject? #if $BuiltinCreateAsyncTaskName @@ -1151,7 +1151,7 @@ extension Task where Failure == Error { // Create the asynchronous task future. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor let (task, _) = Builtin.createTask(flags: flags, initialSerialExecutor: @@ -1215,7 +1215,7 @@ extension Task where Failure == Error { // Create the asynchronous task future. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor var task: Builtin.NativeObject? #if $BuiltinCreateAsyncTaskName @@ -1303,7 +1303,7 @@ extension Task where Success == Never, Failure == Never { @available(SwiftStdlib 5.1, *) public func withUnsafeCurrentTask(body: (UnsafeCurrentTask?) throws -> T) rethrows -> T { guard let _task = _getCurrentAsyncTask() else { - return try body(nil) + return try unsafe body(nil) } // FIXME: This retain seems pretty wrong, however if we don't we WILL crash @@ -1311,13 +1311,13 @@ public func withUnsafeCurrentTask(body: (UnsafeCurrentTask?) throws -> T) ret // How do we solve this properly? Builtin.retain(_task) - return try body(UnsafeCurrentTask(_task)) + return try unsafe body(UnsafeCurrentTask(_task)) } @available(SwiftStdlib 6.0, *) public func withUnsafeCurrentTask(body: (UnsafeCurrentTask?) async throws -> T) async rethrows -> T { guard let _task = _getCurrentAsyncTask() else { - return try await body(nil) + return try unsafe await body(nil) } // FIXME: This retain seems pretty wrong, however if we don't we WILL crash @@ -1325,7 +1325,7 @@ public func withUnsafeCurrentTask(body: (UnsafeCurrentTask?) async throws -> // How do we solve this properly? Builtin.retain(_task) - return try await body(UnsafeCurrentTask(_task)) + return try unsafe await body(UnsafeCurrentTask(_task)) } /// An unsafe reference to the current task. @@ -1357,7 +1357,7 @@ public struct UnsafeCurrentTask { // May only be created by the standard library. internal init(_ task: Builtin.NativeObject) { - self._task = task + unsafe self._task = task } /// A Boolean value that indicates whether the current task was canceled. @@ -1367,7 +1367,7 @@ public struct UnsafeCurrentTask { /// /// - SeeAlso: `checkCancellation()` public var isCancelled: Bool { - _taskIsCancelled(_task) + unsafe _taskIsCancelled(_task) } /// The current task's priority. @@ -1375,7 +1375,7 @@ public struct UnsafeCurrentTask { /// - SeeAlso: `TaskPriority` /// - SeeAlso: `Task.currentPriority` public var priority: TaskPriority { - TaskPriority(rawValue: _taskCurrentPriority(_task)) + unsafe TaskPriority(rawValue: _taskCurrentPriority(_task)) } /// The current task's base priority. @@ -1384,12 +1384,12 @@ public struct UnsafeCurrentTask { /// - SeeAlso: `Task.basePriority` @available(SwiftStdlib 5.9, *) public var basePriority: TaskPriority { - TaskPriority(rawValue: _taskBasePriority(_task)) + unsafe TaskPriority(rawValue: _taskBasePriority(_task)) } /// Cancel the current task. public func cancel() { - _taskCancel(_task) + unsafe _taskCancel(_task) } } @@ -1398,16 +1398,16 @@ public struct UnsafeCurrentTask { extension UnsafeCurrentTask: Sendable { } @available(SwiftStdlib 5.1, *) -extension UnsafeCurrentTask: Hashable { +extension UnsafeCurrentTask: @unsafe Hashable { public func hash(into hasher: inout Hasher) { - UnsafeRawPointer(Builtin.bridgeToRawPointer(_task)).hash(into: &hasher) + unsafe UnsafeRawPointer(Builtin.bridgeToRawPointer(_task)).hash(into: &hasher) } } @available(SwiftStdlib 5.1, *) extension UnsafeCurrentTask: Equatable { public static func ==(lhs: Self, rhs: Self) -> Bool { - UnsafeRawPointer(Builtin.bridgeToRawPointer(lhs._task)) == + unsafe UnsafeRawPointer(Builtin.bridgeToRawPointer(lhs._task)) == UnsafeRawPointer(Builtin.bridgeToRawPointer(rhs._task)) } } @@ -1478,7 +1478,7 @@ internal func _getGenericSerialExecutor() -> Builtin.Executor { // As the runtime relies on this in multiple places, // so instead of a runtime call to get this executor ref, we bitcast a "zero" // of expected size to the builtin executor type. - unsafeBitCast((UInt(0), UInt(0)), to: Builtin.Executor.self) + unsafe unsafeBitCast((UInt(0), UInt(0)), to: Builtin.Executor.self) } #if SWIFT_STDLIB_TASK_TO_THREAD_MODEL_CONCURRENCY @@ -1690,7 +1690,7 @@ public func _taskRunOnMainActor(operation: @escaping @MainActor () -> ()) { if _taskIsOnMainActor() { return withoutActuallyEscaping(operation) { (_ fn: @escaping YesActor) -> () in - let rawFn = unsafeBitCast(fn, to: NoActor.self) + let rawFn = unsafe unsafeBitCast(fn, to: NoActor.self) return rawFn() } } diff --git a/stdlib/public/Concurrency/TaskCancellation.swift b/stdlib/public/Concurrency/TaskCancellation.swift index 8e442227feccd..62ce412e60798 100644 --- a/stdlib/public/Concurrency/TaskCancellation.swift +++ b/stdlib/public/Concurrency/TaskCancellation.swift @@ -77,8 +77,8 @@ public func withTaskCancellationHandler( ) async rethrows -> T { // unconditionally add the cancellation record to the task. // if the task was already cancelled, it will be executed right away. - let record = _taskAddCancellationHandler(handler: handler) - defer { _taskRemoveCancellationHandler(record: record) } + let record = unsafe _taskAddCancellationHandler(handler: handler) + defer { unsafe _taskRemoveCancellationHandler(record: record) } return try await operation() } @@ -98,8 +98,8 @@ public func _unsafeInheritExecutor_withTaskCancellationHandler( ) async rethrows -> T { // unconditionally add the cancellation record to the task. // if the task was already cancelled, it will be executed right away. - let record = _taskAddCancellationHandler(handler: handler) - defer { _taskRemoveCancellationHandler(record: record) } + let record = unsafe _taskAddCancellationHandler(handler: handler) + defer { unsafe _taskRemoveCancellationHandler(record: record) } return try await operation() } @@ -126,8 +126,8 @@ extension Task where Success == Never, Failure == Never { /// /// - SeeAlso: `checkCancellation()` public static var isCancelled: Bool { - withUnsafeCurrentTask { task in - task?.isCancelled ?? false + unsafe withUnsafeCurrentTask { task in + unsafe task?.isCancelled ?? false } } } diff --git a/stdlib/public/Concurrency/TaskGroup+TaskExecutor.swift b/stdlib/public/Concurrency/TaskGroup+TaskExecutor.swift index 531b1ee303bb0..c793f57105e47 100644 --- a/stdlib/public/Concurrency/TaskGroup+TaskExecutor.swift +++ b/stdlib/public/Concurrency/TaskGroup+TaskExecutor.swift @@ -45,7 +45,7 @@ extension TaskGroup { isDiscardingTask: false) let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, @@ -102,7 +102,7 @@ extension TaskGroup { // Create the task in this group with an executor preference. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, @@ -159,7 +159,7 @@ extension ThrowingTaskGroup { // Create the task in this group with an executor preference. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, @@ -212,7 +212,7 @@ extension ThrowingTaskGroup { // Create the task in this group with an executor preference. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, @@ -269,7 +269,7 @@ extension DiscardingTaskGroup { // Create the task in this group with an executor preference. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, @@ -327,7 +327,7 @@ extension DiscardingTaskGroup { // Create the task in this group with an executor preference. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, @@ -384,7 +384,7 @@ extension ThrowingDiscardingTaskGroup { // Create the task in this group with an executor preference. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, @@ -442,7 +442,7 @@ extension ThrowingDiscardingTaskGroup { // Create the task in this group with an executor preference. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor #if $BuiltinCreateAsyncTaskOwnedTaskExecutor _ = Builtin.createTask(flags: flags, diff --git a/stdlib/public/Concurrency/TaskGroup.swift b/stdlib/public/Concurrency/TaskGroup.swift index a6fedaa3e2aae..5b381ce40b690 100644 --- a/stdlib/public/Concurrency/TaskGroup.swift +++ b/stdlib/public/Concurrency/TaskGroup.swift @@ -361,7 +361,7 @@ public struct TaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, taskGroup: _group, @@ -404,7 +404,7 @@ public struct TaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, taskGroup: _group, @@ -802,7 +802,7 @@ public struct ThrowingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, taskGroup: _group, @@ -841,7 +841,7 @@ public struct ThrowingTaskGroup { // Create the task in this group. let builtinSerialExecutor = - Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor + unsafe Builtin.extractFunctionIsolation(operation)?.unownedExecutor.executor _ = Builtin.createTask(flags: flags, initialSerialExecutor: builtinSerialExecutor, taskGroup: _group, diff --git a/stdlib/public/Concurrency/TaskLocal.swift b/stdlib/public/Concurrency/TaskLocal.swift index 93c0ca75f20a2..df1811d14d6e6 100644 --- a/stdlib/public/Concurrency/TaskLocal.swift +++ b/stdlib/public/Concurrency/TaskLocal.swift @@ -169,7 +169,7 @@ public final class TaskLocal: Sendable, CustomStringConvertible @_alwaysEmitIntoClient var key: Builtin.RawPointer { - unsafeBitCast(self, to: Builtin.RawPointer.self) + unsafe unsafeBitCast(self, to: Builtin.RawPointer.self) } /// Gets the value currently bound to this task-local from the current task. @@ -178,14 +178,14 @@ public final class TaskLocal: Sendable, CustomStringConvertible /// or if the task-local has no value bound, this will return the `defaultValue` /// of the task local. public func get() -> Value { - guard let rawValue = _taskLocalValueGet(key: key) else { + guard let rawValue = unsafe _taskLocalValueGet(key: key) else { return self.defaultValue } // Take the value; The type should be correct by construction let storagePtr = - rawValue.bindMemory(to: Value.self, capacity: 1) - return UnsafeMutablePointer(mutating: storagePtr).pointee + unsafe rawValue.bindMemory(to: Value.self, capacity: 1) + return unsafe UnsafeMutablePointer(mutating: storagePtr).pointee } /// Binds the task-local to the specific value for the duration of the asynchronous operation. @@ -376,8 +376,8 @@ func _taskLocalsCopy( @available(*, deprecated, message: "The situation diagnosed by this is not handled gracefully rather than by crashing") func _checkIllegalTaskLocalBindingWithinWithTaskGroup(file: String, line: UInt) { if _taskHasTaskGroupStatusRecord() { - file.withCString { _fileStart in - _reportIllegalTaskLocalBindingWithinWithTaskGroup( + unsafe file.withCString { _fileStart in + unsafe _reportIllegalTaskLocalBindingWithinWithTaskGroup( _fileStart, file.count, true, line) } } diff --git a/stdlib/public/Concurrency/TaskSleep.swift b/stdlib/public/Concurrency/TaskSleep.swift index fcdd2df847d93..5f72166732f54 100644 --- a/stdlib/public/Concurrency/TaskSleep.swift +++ b/stdlib/public/Concurrency/TaskSleep.swift @@ -36,7 +36,7 @@ extension Task where Success == Never, Failure == Never { typealias SleepContinuation = UnsafeContinuation<(), Error> /// Describes the state of a sleep() operation. - enum SleepState { + @unsafe enum SleepState { /// The sleep continuation has not yet begun. case notStarted @@ -58,21 +58,21 @@ extension Task where Success == Never, Failure == Never { case 0: let continuationBits = UInt(word) & ~0x03 if continuationBits == 0 { - self = .notStarted + unsafe self = unsafe .notStarted } else { - let continuation = unsafeBitCast( + let continuation = unsafe unsafeBitCast( continuationBits, to: SleepContinuation.self) - self = .activeContinuation(continuation) + unsafe self = unsafe .activeContinuation(continuation) } case 1: - self = .finished + unsafe self = unsafe .finished case 2: - self = .cancelled + unsafe self = unsafe .cancelled case 3: - self = .cancelledBeforeStarted + unsafe self = unsafe .cancelledBeforeStarted default: fatalError("Bitmask failure") @@ -81,17 +81,17 @@ extension Task where Success == Never, Failure == Never { /// Decode sleep state by loading from the given pointer init(loading wordPtr: UnsafeMutablePointer) { - self.init(word: Builtin.atomicload_seqcst_Word(wordPtr._rawValue)) + unsafe self.init(word: Builtin.atomicload_seqcst_Word(wordPtr._rawValue)) } /// Encode sleep state into a word of storage. var word: UInt { - switch self { + switch unsafe self { case .notStarted: return 0 case .activeContinuation(let continuation): - let continuationBits = unsafeBitCast(continuation, to: UInt.self) + let continuationBits = unsafe unsafeBitCast(continuation, to: UInt.self) return continuationBits case .finished: @@ -111,25 +111,25 @@ extension Task where Success == Never, Failure == Never { /// exchange operations on the underlying storage. However, this wrapper is also /// _unsafe_ because the owner must manually deallocate the token once it is no /// longer needed. - struct UnsafeSleepStateToken: @unchecked Sendable { + @unsafe struct UnsafeSleepStateToken: @unchecked Sendable { let wordPtr: UnsafeMutablePointer /// Allocates the underlying storage and sets the value to `.notStarted`. init() { - wordPtr = .allocate(capacity: 1) - Builtin.atomicstore_seqcst_Word( + unsafe wordPtr = .allocate(capacity: 1) + unsafe Builtin.atomicstore_seqcst_Word( wordPtr._rawValue, SleepState.notStarted.word._builtinWordValue) } /// Atomically loads the current state. func load() -> SleepState { - return SleepState(word: Builtin.atomicload_seqcst_Word(wordPtr._rawValue)) + return unsafe SleepState(word: Builtin.atomicload_seqcst_Word(wordPtr._rawValue)) } /// Attempts to atomically set the stored value to `desired` if the current /// value is equal to `expected`. Returns true if the exchange was successful. func exchange(expected: SleepState, desired: SleepState) -> Bool { - let (_, won) = Builtin.cmpxchg_seqcst_seqcst_Word( + let (_, won) = unsafe Builtin.cmpxchg_seqcst_seqcst_Word( wordPtr._rawValue, expected.word._builtinWordValue, desired.word._builtinWordValue) @@ -138,7 +138,7 @@ extension Task where Success == Never, Failure == Never { /// Deallocates the underlying storage. func deallocate() { - wordPtr.deallocate() + unsafe wordPtr.deallocate() } } @@ -146,17 +146,17 @@ extension Task where Success == Never, Failure == Never { /// canceled. static func onSleepWake(_ token: UnsafeSleepStateToken) { while true { - let state = token.load() - switch state { + let state = unsafe token.load() + switch unsafe state { case .notStarted: fatalError("Cannot wake before we even started") case .activeContinuation(let continuation): // We have an active continuation, so try to transition to the // "finished" state. - if token.exchange(expected: state, desired: .finished) { + if unsafe token.exchange(expected: state, desired: .finished) { // The sleep finished, so invoke the continuation: we're done. - continuation.resume() + unsafe continuation.resume() return } @@ -170,7 +170,7 @@ extension Task where Success == Never, Failure == Never { // The task was cancelled, which means the continuation was // called by the cancellation handler. We need to deallocate the token // because it was left over for this task to complete. - token.deallocate() + unsafe token.deallocate() return case .cancelledBeforeStarted: @@ -184,12 +184,12 @@ extension Task where Success == Never, Failure == Never { /// the sleep completed. static func onSleepCancel(_ token: UnsafeSleepStateToken) { while true { - let state = token.load() - switch state { + let state = unsafe token.load() + switch unsafe state { case .notStarted: // We haven't started yet, so try to transition to the cancelled-before // started state. - if token.exchange(expected: state, desired: .cancelledBeforeStarted) { + if unsafe token.exchange(expected: state, desired: .cancelledBeforeStarted) { return } @@ -199,10 +199,10 @@ extension Task where Success == Never, Failure == Never { case .activeContinuation(let continuation): // We have an active continuation, so try to transition to the // "cancelled" state. - if token.exchange(expected: state, desired: .cancelled) { + if unsafe token.exchange(expected: state, desired: .cancelled) { // We recorded the task cancellation before the sleep finished, so // invoke the continuation with the cancellation error. - continuation.resume(throwing: _Concurrency.CancellationError()) + unsafe continuation.resume(throwing: _Concurrency.CancellationError()) return } @@ -226,20 +226,20 @@ extension Task where Success == Never, Failure == Never { public static func sleep(nanoseconds duration: UInt64) async throws { // Create a token which will initially have the value "not started", which // means the continuation has neither been created nor completed. - let token = UnsafeSleepStateToken() + let token = unsafe UnsafeSleepStateToken() do { // Install a cancellation handler to resume the continuation by // throwing CancellationError. try await withTaskCancellationHandler { - let _: () = try await withUnsafeThrowingContinuation { continuation in + let _: () = try unsafe await withUnsafeThrowingContinuation { continuation in while true { - let state = token.load() - switch state { + let state = unsafe token.load() + switch unsafe state { case .notStarted: // Try to swap in the continuation state. - let newState = SleepState.activeContinuation(continuation) - if !token.exchange(expected: state, desired: newState) { + let newState = unsafe SleepState.activeContinuation(continuation) + if unsafe !token.exchange(expected: state, desired: newState) { // Keep trying! continue } @@ -253,7 +253,7 @@ extension Task where Success == Never, Failure == Never { addPendingGroupTaskUnconditionally: false, isDiscardingTask: false) let (sleepTask, _) = Builtin.createAsyncTask(sleepTaskFlags) { - onSleepWake(token) + unsafe onSleepWake(token) } _enqueueJobGlobalWithDelay( duration, Builtin.convertTaskToJob(sleepTask)) @@ -268,18 +268,18 @@ extension Task where Success == Never, Failure == Never { case .cancelledBeforeStarted: // Finish the continuation normally. We'll throw later, after // we clean up. - continuation.resume() + unsafe continuation.resume() return } } } } onCancel: { - onSleepCancel(token) + unsafe onSleepCancel(token) } // Determine whether we got cancelled before we even started. let cancelledBeforeStarted: Bool - switch token.load() { + switch unsafe token.load() { case .notStarted, .activeContinuation, .cancelled: fatalError("Invalid state for non-cancelled sleep task") @@ -292,7 +292,7 @@ extension Task where Success == Never, Failure == Never { // We got here without being cancelled, so deallocate the storage for // the flag word and continuation. - token.deallocate() + unsafe token.deallocate() // If we got cancelled before we even started, through the cancellation // error now. diff --git a/stdlib/public/Concurrency/TaskSleepDuration.swift b/stdlib/public/Concurrency/TaskSleepDuration.swift index 2caa5bb12e053..ba4110e262358 100644 --- a/stdlib/public/Concurrency/TaskSleepDuration.swift +++ b/stdlib/public/Concurrency/TaskSleepDuration.swift @@ -24,20 +24,20 @@ extension Task where Success == Never, Failure == Never { ) async throws { // Create a token which will initially have the value "not started", which // means the continuation has neither been created nor completed. - let token = UnsafeSleepStateToken() + let token = unsafe UnsafeSleepStateToken() do { // Install a cancellation handler to resume the continuation by // throwing CancellationError. try await withTaskCancellationHandler { - let _: () = try await withUnsafeThrowingContinuation { continuation in + let _: () = try unsafe await withUnsafeThrowingContinuation { continuation in while true { - let state = token.load() - switch state { + let state = unsafe token.load() + switch unsafe state { case .notStarted: // Try to swap in the continuation word. - let newState = SleepState.activeContinuation(continuation) - if !token.exchange(expected: state, desired: newState) { + let newState = unsafe SleepState.activeContinuation(continuation) + if unsafe !token.exchange(expected: state, desired: newState) { // Keep trying! continue } @@ -51,7 +51,7 @@ extension Task where Success == Never, Failure == Never { addPendingGroupTaskUnconditionally: false, isDiscardingTask: false) let (sleepTask, _) = Builtin.createAsyncTask(sleepTaskFlags) { - onSleepWake(token) + unsafe onSleepWake(token) } let toleranceSeconds: Int64 let toleranceNanoseconds: Int64 @@ -78,18 +78,18 @@ extension Task where Success == Never, Failure == Never { case .cancelledBeforeStarted: // Finish the continuation normally. We'll throw later, after // we clean up. - continuation.resume() + unsafe continuation.resume() return } } } } onCancel: { - onSleepCancel(token) + unsafe onSleepCancel(token) } // Determine whether we got cancelled before we even started. let cancelledBeforeStarted: Bool - switch token.load() { + switch unsafe token.load() { case .notStarted, .activeContinuation, .cancelled: fatalError("Invalid state for non-cancelled sleep task") @@ -102,7 +102,7 @@ extension Task where Success == Never, Failure == Never { // We got here without being cancelled, so deallocate the storage for // the flag word and continuation. - token.deallocate() + unsafe token.deallocate() // If we got cancelled before we even started, through the cancellation // error now. diff --git a/stdlib/public/Cxx/CMakeLists.txt b/stdlib/public/Cxx/CMakeLists.txt index a8c3dfc278575..05fee6c62d9c5 100644 --- a/stdlib/public/Cxx/CMakeLists.txt +++ b/stdlib/public/Cxx/CMakeLists.txt @@ -23,6 +23,7 @@ add_swift_target_library(swiftCxx STATIC NO_LINK_NAME IS_STDLIB IS_SWIFT_ONLY -cxx-interoperability-mode=default -enable-experimental-feature Span -enable-experimental-feature BuiltinModule + -strict-memory-safety # This module should not pull in the C++ standard library, so we disable it explicitly. # For functionality that depends on the C++ stdlib, use C++ stdlib overlay (`swiftstd` module). -Xcc -nostdinc++ diff --git a/stdlib/public/Cxx/CxxSpan.swift b/stdlib/public/Cxx/CxxSpan.swift index 7f4f492ecb624..be08e7d9319c9 100644 --- a/stdlib/public/Cxx/CxxSpan.swift +++ b/stdlib/public/Cxx/CxxSpan.swift @@ -63,26 +63,26 @@ extension CxxSpan { /// Creates a C++ span from a Swift UnsafeBufferPointer @inlinable public init(_ unsafeBufferPointer: UnsafeBufferPointer) { - precondition(unsafeBufferPointer.baseAddress != nil, + unsafe precondition(unsafeBufferPointer.baseAddress != nil, "UnsafeBufferPointer should not point to nil") - self.init(unsafeBufferPointer.baseAddress!, Size(unsafeBufferPointer.count)) + unsafe self.init(unsafeBufferPointer.baseAddress!, Size(unsafeBufferPointer.count)) } @inlinable public init(_ unsafeMutableBufferPointer: UnsafeMutableBufferPointer) { - precondition(unsafeMutableBufferPointer.baseAddress != nil, + unsafe precondition(unsafeMutableBufferPointer.baseAddress != nil, "UnsafeMutableBufferPointer should not point to nil") - self.init(unsafeMutableBufferPointer.baseAddress!, Size(unsafeMutableBufferPointer.count)) + unsafe self.init(unsafeMutableBufferPointer.baseAddress!, Size(unsafeMutableBufferPointer.count)) } @available(SwiftStdlib 6.1, *) @inlinable @unsafe public init(_ span: Span) { - let (p, c) = unsafeBitCast(span, to: (UnsafeRawPointer?, Int).self) - precondition(p != nil, "Span should not point to nil") - let binding = p!.bindMemory(to: Element.self, capacity: c) - self.init(binding, Size(c)) + let (p, c) = unsafe unsafeBitCast(span, to: (UnsafeRawPointer?, Int).self) + unsafe precondition(p != nil, "Span should not point to nil") + let binding = unsafe p!.bindMemory(to: Element.self, capacity: c) + unsafe self.init(binding, Size(c)) } } @@ -94,10 +94,10 @@ extension Span { public init>( _unsafeCxxSpan span: borrowing T, ) { - let buffer = UnsafeBufferPointer(start: span.__dataUnsafe(), count: Int(span.size())) + let buffer = unsafe UnsafeBufferPointer(start: span.__dataUnsafe(), count: Int(span.size())) let newSpan = Span(_unsafeElements: buffer) // 'self' is limited to the caller's scope of the variable passed to the 'span' argument. - self = _overrideLifetime(newSpan, borrowing: span) + self = unsafe _overrideLifetime(newSpan, borrowing: span) } } @@ -113,8 +113,8 @@ extension CxxMutableSpan { /// Creates a C++ span from a Swift UnsafeMutableBufferPointer @inlinable public init(_ unsafeMutableBufferPointer: UnsafeMutableBufferPointer) { - precondition(unsafeMutableBufferPointer.baseAddress != nil, + unsafe precondition(unsafeMutableBufferPointer.baseAddress != nil, "UnsafeMutableBufferPointer should not point to nil") - self.init(unsafeMutableBufferPointer.baseAddress!, Size(unsafeMutableBufferPointer.count)) + unsafe self.init(unsafeMutableBufferPointer.baseAddress!, Size(unsafeMutableBufferPointer.count)) } } diff --git a/stdlib/public/Cxx/UnsafeCxxIterators.swift b/stdlib/public/Cxx/UnsafeCxxIterators.swift index 958ce54f09838..ccfacc698165f 100644 --- a/stdlib/public/Cxx/UnsafeCxxIterators.swift +++ b/stdlib/public/Cxx/UnsafeCxxIterators.swift @@ -34,9 +34,9 @@ public protocol UnsafeCxxInputIterator: Equatable { func successor() -> Self } -extension UnsafePointer: UnsafeCxxInputIterator {} +extension UnsafePointer: @unsafe UnsafeCxxInputIterator {} -extension UnsafeMutablePointer: UnsafeCxxInputIterator {} +extension UnsafeMutablePointer: @unsafe UnsafeCxxInputIterator {} extension Optional: UnsafeCxxInputIterator where Wrapped: UnsafeCxxInputIterator { public typealias Pointee = Wrapped.Pointee @@ -79,9 +79,9 @@ public protocol UnsafeCxxRandomAccessIterator: UnsafeCxxInputIterator { static func +=(lhs: inout Self, rhs: Distance) } -extension UnsafePointer: UnsafeCxxRandomAccessIterator {} +extension UnsafePointer: @unsafe UnsafeCxxRandomAccessIterator {} -extension UnsafeMutablePointer: UnsafeCxxRandomAccessIterator {} +extension UnsafeMutablePointer: @unsafe UnsafeCxxRandomAccessIterator {} public protocol UnsafeCxxMutableRandomAccessIterator: UnsafeCxxRandomAccessIterator, UnsafeCxxMutableInputIterator {} diff --git a/stdlib/public/Cxx/std/CMakeLists.txt b/stdlib/public/Cxx/std/CMakeLists.txt index d92d3e369b1c9..e2deb144ad723 100644 --- a/stdlib/public/Cxx/std/CMakeLists.txt +++ b/stdlib/public/Cxx/std/CMakeLists.txt @@ -60,6 +60,8 @@ add_swift_target_library(swiftCxxStdlib STATIC NO_LINK_NAME IS_STDLIB IS_SWIFT_O # using C++ symbols in resilient overlays (see f4204568). -enable-experimental-feature AssumeResilientCxxTypes + -strict-memory-safety + # The varying modularization of the C++ standard library on different # platforms makes it difficult to enable MemberImportVisibility for this # module diff --git a/stdlib/public/Cxx/std/String.swift b/stdlib/public/Cxx/std/String.swift index 3d379e81cd17b..408426778102a 100644 --- a/stdlib/public/Cxx/std/String.swift +++ b/stdlib/public/Cxx/std/String.swift @@ -21,31 +21,31 @@ extension std.string { /// Swift string. @_alwaysEmitIntoClient public init(_ string: String) { - self = string.withCString(encodedAs: UTF8.self) { buffer in + unsafe self = unsafe string.withCString(encodedAs: UTF8.self) { buffer in #if os(Windows) // Use the 2 parameter constructor. // The MSVC standard library has a enable_if template guard // on the 3 parameter constructor, and thus it's not imported into Swift. std.string(buffer, string.utf8.count) #else - std.string(buffer, string.utf8.count, .init()) + unsafe std.string(buffer, string.utf8.count, .init()) #endif } } @_alwaysEmitIntoClient public init(_ string: UnsafePointer?) { - if let str = string { + if let str = unsafe string { #if os(Windows) // Use the 2 parameter constructor. // The MSVC standard library has a enable_if template guard // on the 3 parameter constructor, and thus it's not imported into Swift. self.init(str, UTF8._nullCodeUnitOffset(in: str)) #else - self.init(str, UTF8._nullCodeUnitOffset(in: str), .init()) + unsafe self.init(str, UTF8._nullCodeUnitOffset(in: str), .init()) #endif } else { - self.init() + unsafe self.init() } } } @@ -58,9 +58,9 @@ extension std.u16string { /// Swift string. @_alwaysEmitIntoClient public init(_ string: String) { - self.init() + unsafe self.init() for char in string.utf16 { - self.push_back(char) + unsafe self.push_back(char) } } } @@ -73,9 +73,9 @@ extension std.u32string { /// Swift string. @_alwaysEmitIntoClient public init(_ string: String) { - self.init() + unsafe self.init() for char in string.unicodeScalars { - self.push_back(char) + unsafe self.push_back(char) } } } @@ -85,21 +85,21 @@ extension std.u32string { extension std.string: ExpressibleByStringLiteral { @_alwaysEmitIntoClient public init(stringLiteral value: String) { - self.init(value) + unsafe self.init(value) } } extension std.u16string: ExpressibleByStringLiteral { @_alwaysEmitIntoClient public init(stringLiteral value: String) { - self.init(value) + unsafe self.init(value) } } extension std.u32string: ExpressibleByStringLiteral { @_alwaysEmitIntoClient public init(stringLiteral value: String) { - self.init(value) + unsafe self.init(value) } } @@ -108,87 +108,87 @@ extension std.u32string: ExpressibleByStringLiteral { extension std.string: Equatable, Comparable { @_alwaysEmitIntoClient public static func ==(lhs: std.string, rhs: std.string) -> Bool { - return lhs.compare(rhs) == 0 + return unsafe lhs.compare(rhs) == 0 } @_alwaysEmitIntoClient public static func <(lhs: std.string, rhs: std.string) -> Bool { - return lhs.compare(rhs) < 0 + return unsafe lhs.compare(rhs) < 0 } @_alwaysEmitIntoClient public static func +=(lhs: inout std.string, rhs: std.string) { - lhs.append(rhs) + unsafe lhs.append(rhs) } @_alwaysEmitIntoClient public mutating func append(_ other: std.string) { - __appendUnsafe(other) // ignore the returned pointer + unsafe __appendUnsafe(other) // ignore the returned pointer } @_alwaysEmitIntoClient public static func +(lhs: std.string, rhs: std.string) -> std.string { - var copy = lhs - copy += rhs - return copy + var copy = unsafe lhs + unsafe copy += rhs + return unsafe copy } } extension std.u16string: Equatable, Comparable { @_alwaysEmitIntoClient public static func ==(lhs: std.u16string, rhs: std.u16string) -> Bool { - return lhs.compare(rhs) == 0 + return unsafe lhs.compare(rhs) == 0 } @_alwaysEmitIntoClient public static func <(lhs: std.u16string, rhs: std.u16string) -> Bool { - return lhs.compare(rhs) < 0 + return unsafe lhs.compare(rhs) < 0 } @_alwaysEmitIntoClient public static func +=(lhs: inout std.u16string, rhs: std.u16string) { - lhs.append(rhs) + unsafe lhs.append(rhs) } @_alwaysEmitIntoClient public mutating func append(_ other: std.u16string) { - __appendUnsafe(other) // ignore the returned pointer + unsafe __appendUnsafe(other) // ignore the returned pointer } @_alwaysEmitIntoClient public static func +(lhs: std.u16string, rhs: std.u16string) -> std.u16string { - var copy = lhs - copy += rhs - return copy + var copy = unsafe lhs + unsafe copy += rhs + return unsafe copy } } extension std.u32string: Equatable, Comparable { @_alwaysEmitIntoClient public static func ==(lhs: std.u32string, rhs: std.u32string) -> Bool { - return lhs.compare(rhs) == 0 + return unsafe lhs.compare(rhs) == 0 } @_alwaysEmitIntoClient public static func <(lhs: std.u32string, rhs: std.u32string) -> Bool { - return lhs.compare(rhs) < 0 + return unsafe lhs.compare(rhs) < 0 } @_alwaysEmitIntoClient public static func +=(lhs: inout std.u32string, rhs: std.u32string) { - lhs.append(rhs) + unsafe lhs.append(rhs) } @_alwaysEmitIntoClient public mutating func append(_ other: std.u32string) { - __appendUnsafe(other) // ignore the returned pointer + unsafe __appendUnsafe(other) // ignore the returned pointer } @_alwaysEmitIntoClient public static func +(lhs: std.u32string, rhs: std.u32string) -> std.u32string { - var copy = lhs - copy += rhs - return copy + var copy = unsafe lhs + unsafe copy += rhs + return unsafe copy } } @@ -198,7 +198,7 @@ extension std.string: Hashable { @_alwaysEmitIntoClient public func hash(into hasher: inout Hasher) { // Call std::hash::operator() - let cxxHash = __swift_interopComputeHashOfString(self) + let cxxHash = unsafe __swift_interopComputeHashOfString(self) hasher.combine(cxxHash) } } @@ -207,7 +207,7 @@ extension std.u16string: Hashable { @_alwaysEmitIntoClient public func hash(into hasher: inout Hasher) { // Call std::hash::operator() - let cxxHash = __swift_interopComputeHashOfU16String(self) + let cxxHash = unsafe __swift_interopComputeHashOfU16String(self) hasher.combine(cxxHash) } } @@ -216,7 +216,7 @@ extension std.u32string: Hashable { @_alwaysEmitIntoClient public func hash(into hasher: inout Hasher) { // Call std::hash::operator() - let cxxHash = __swift_interopComputeHashOfU32String(self) + let cxxHash = unsafe __swift_interopComputeHashOfU32String(self) hasher.combine(cxxHash) } } @@ -226,42 +226,42 @@ extension std.u32string: Hashable { extension std.string: CustomDebugStringConvertible { @_alwaysEmitIntoClient public var debugDescription: String { - return "std.string(\(String(self)))" + return "std.string(\(unsafe String(self)))" } } extension std.u16string: CustomDebugStringConvertible { @_alwaysEmitIntoClient public var debugDescription: String { - return "std.u16string(\(String(self)))" + return "std.u16string(\(unsafe String(self)))" } } extension std.u32string: CustomDebugStringConvertible { @_alwaysEmitIntoClient public var debugDescription: String { - return "std.u32string(\(String(self)))" + return "std.u32string(\(unsafe String(self)))" } } extension std.string: CustomStringConvertible { @_alwaysEmitIntoClient public var description: String { - return String(self) + return unsafe String(self) } } extension std.u16string: CustomStringConvertible { @_alwaysEmitIntoClient public var description: String { - return String(self) + return unsafe String(self) } } extension std.u32string: CustomStringConvertible { @_alwaysEmitIntoClient public var description: String { - return String(self) + return unsafe String(self) } } @@ -277,13 +277,13 @@ extension String { /// - Complexity: O(*n*), where *n* is the number of bytes in the C++ string. @_alwaysEmitIntoClient public init(_ cxxString: std.string) { - let buffer = UnsafeBufferPointer( + let buffer = unsafe UnsafeBufferPointer( start: cxxString.__c_strUnsafe(), count: cxxString.size()) - self = buffer.withMemoryRebound(to: UInt8.self) { - String(decoding: $0, as: UTF8.self) + self = unsafe buffer.withMemoryRebound(to: UInt8.self) { + unsafe String(decoding: $0, as: UTF8.self) } - withExtendedLifetime(cxxString) {} + unsafe withExtendedLifetime(cxxString) {} } /// Creates a String having the same content as the given C++ UTF-16 string. @@ -296,11 +296,11 @@ extension String { /// string. @_alwaysEmitIntoClient public init(_ cxxU16String: std.u16string) { - let buffer = UnsafeBufferPointer( + let buffer = unsafe UnsafeBufferPointer( start: cxxU16String.__dataUnsafe(), count: cxxU16String.size()) - self = String(decoding: buffer, as: UTF16.self) - withExtendedLifetime(cxxU16String) {} + self = unsafe String(decoding: buffer, as: UTF16.self) + unsafe withExtendedLifetime(cxxU16String) {} } /// Creates a String having the same content as the given C++ UTF-32 string. @@ -313,13 +313,13 @@ extension String { /// string. @_alwaysEmitIntoClient public init(_ cxxU32String: std.u32string) { - let buffer = UnsafeBufferPointer( + let buffer = unsafe UnsafeBufferPointer( start: cxxU32String.__dataUnsafe(), count: cxxU32String.size()) - self = buffer.withMemoryRebound(to: UInt32.self) { - String(decoding: $0, as: UTF32.self) + self = unsafe buffer.withMemoryRebound(to: UInt32.self) { + unsafe String(decoding: $0, as: UTF32.self) } - withExtendedLifetime(cxxU32String) {} + unsafe withExtendedLifetime(cxxU32String) {} } } @@ -336,13 +336,13 @@ extension String { /// view. @_alwaysEmitIntoClient public init(_ cxxStringView: std.string_view) { - let buffer = UnsafeBufferPointer( + let buffer = unsafe UnsafeBufferPointer( start: cxxStringView.__dataUnsafe(), count: cxxStringView.size()) - self = buffer.withMemoryRebound(to: UInt8.self) { - String(decoding: $0, as: UTF8.self) + self = unsafe buffer.withMemoryRebound(to: UInt8.self) { + unsafe String(decoding: $0, as: UTF8.self) } - withExtendedLifetime(cxxStringView) {} + unsafe withExtendedLifetime(cxxStringView) {} } /// Creates a String having the same content as the given C++ UTF-16 string @@ -356,11 +356,11 @@ extension String { /// string view. @_alwaysEmitIntoClient public init(_ cxxU16StringView: std.u16string_view) { - let buffer = UnsafeBufferPointer( + let buffer = unsafe UnsafeBufferPointer( start: cxxU16StringView.__dataUnsafe(), count: cxxU16StringView.size()) - self = String(decoding: buffer, as: UTF16.self) - withExtendedLifetime(cxxU16StringView) {} + self = unsafe String(decoding: buffer, as: UTF16.self) + unsafe withExtendedLifetime(cxxU16StringView) {} } /// Creates a String having the same content as the given C++ UTF-32 string @@ -374,12 +374,12 @@ extension String { /// string view. @_alwaysEmitIntoClient public init(_ cxxU32StringView: std.u32string_view) { - let buffer = UnsafeBufferPointer( + let buffer = unsafe UnsafeBufferPointer( start: cxxU32StringView.__dataUnsafe(), count: cxxU32StringView.size()) - self = buffer.withMemoryRebound(to: UInt32.self) { - String(decoding: $0, as: UTF32.self) + self = unsafe buffer.withMemoryRebound(to: UInt32.self) { + unsafe String(decoding: $0, as: UTF32.self) } - withExtendedLifetime(cxxU32StringView) {} + unsafe withExtendedLifetime(cxxU32StringView) {} } } diff --git a/stdlib/public/Distributed/CMakeLists.txt b/stdlib/public/Distributed/CMakeLists.txt index 0a3a821da563e..ae3f0fdad123a 100644 --- a/stdlib/public/Distributed/CMakeLists.txt +++ b/stdlib/public/Distributed/CMakeLists.txt @@ -52,6 +52,7 @@ add_swift_target_library(swiftDistributed ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} IS SWIFT_COMPILE_FLAGS ${SWIFT_STANDARD_LIBRARY_SWIFT_FLAGS} -parse-stdlib + -strict-memory-safety LINK_FLAGS "${SWIFT_RUNTIME_SWIFT_LINK_FLAGS}" diff --git a/stdlib/public/Distributed/DistributedActor.swift b/stdlib/public/Distributed/DistributedActor.swift index fd53d71e71a28..a2df218e2e45c 100644 --- a/stdlib/public/Distributed/DistributedActor.swift +++ b/stdlib/public/Distributed/DistributedActor.swift @@ -406,10 +406,10 @@ extension DistributedActor { @_implements(Actor, unownedExecutor) public nonisolated var __actorUnownedExecutor: UnownedSerialExecutor { if #available(macOS 14.0, iOS 17.0, watchOS 10.0, tvOS 17.0, *) { - return unownedExecutor + return unsafe unownedExecutor } else { // On older platforms, all distributed actors are default actors. - return UnownedSerialExecutor(Builtin.buildDefaultActorExecutorRef(self)) + return unsafe UnownedSerialExecutor(Builtin.buildDefaultActorExecutorRef(self)) } } diff --git a/stdlib/public/Distributed/DistributedActorSystem.swift b/stdlib/public/Distributed/DistributedActorSystem.swift index df02708b1bef1..f6f30fd50d3dd 100644 --- a/stdlib/public/Distributed/DistributedActorSystem.swift +++ b/stdlib/public/Distributed/DistributedActorSystem.swift @@ -438,8 +438,8 @@ extension DistributedActorSystem { // Gen the generic environment (if any) associated with the target. let genericEnv = - targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in - _getGenericEnvironmentOfDistributedTarget( + unsafe targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in + unsafe _getGenericEnvironmentOfDistributedTarget( targetNameUTF8.baseAddress!, UInt(targetNameUTF8.endIndex)) } @@ -449,11 +449,11 @@ extension DistributedActorSystem { var numWitnessTables: Int = 0 defer { - substitutionsBuffer?.deallocate() - witnessTablesBuffer?.deallocate() + unsafe substitutionsBuffer?.deallocate() + unsafe witnessTablesBuffer?.deallocate() } - if let genericEnv = genericEnv { + if let genericEnv = unsafe genericEnv { let subs = try invocationDecoder.decodeGenericSubstitutions() if subs.isEmpty { throw ExecuteDistributedTargetError( @@ -461,14 +461,14 @@ extension DistributedActorSystem { errorCode: .missingGenericSubstitutions) } - substitutionsBuffer = .allocate(capacity: subs.count) + unsafe substitutionsBuffer = .allocate(capacity: subs.count) for (offset, substitution) in subs.enumerated() { - let element = substitutionsBuffer?.advanced(by: offset) - element?.initialize(to: substitution) + let element = unsafe substitutionsBuffer?.advanced(by: offset) + unsafe element?.initialize(to: substitution) } - (witnessTablesBuffer, numWitnessTables) = _getWitnessTablesFor(environment: genericEnv, + unsafe (witnessTablesBuffer, numWitnessTables) = unsafe _getWitnessTablesFor(environment: genericEnv, genericArguments: substitutionsBuffer!) if numWitnessTables < 0 { throw ExecuteDistributedTargetError( @@ -478,8 +478,8 @@ extension DistributedActorSystem { } let paramCount = - targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in - __getParameterCount( + unsafe targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in + unsafe __getParameterCount( targetNameUTF8.baseAddress!, UInt(targetNameUTF8.endIndex)) } @@ -497,12 +497,12 @@ extension DistributedActorSystem { // Prepare buffer for the parameter types to be decoded into: let argumentTypesBuffer = UnsafeMutableBufferPointer.allocate(capacity: Int(paramCount)) defer { - argumentTypesBuffer.deallocate() + unsafe argumentTypesBuffer.deallocate() } // Demangle and write all parameter types into the prepared buffer - let decodedNum = targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in - __getParameterTypeInfo( + let decodedNum = unsafe targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in + unsafe __getParameterTypeInfo( targetNameUTF8.baseAddress!, UInt(targetNameUTF8.endIndex), genericEnv, @@ -525,7 +525,7 @@ extension DistributedActorSystem { var argumentTypes: [Any.Type] = [] do { argumentTypes.reserveCapacity(Int(decodedNum)) - for argumentType in argumentTypesBuffer { + for unsafe argumentType in unsafe argumentTypesBuffer { argumentTypes.append(argumentType) } } @@ -536,8 +536,8 @@ extension DistributedActorSystem { } let maybeReturnTypeFromTypeInfo = - targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in - __getReturnTypeInfo( + unsafe targetNameUTF8.withUnsafeBufferPointer { targetNameUTF8 in + unsafe __getReturnTypeInfo( /*targetName:*/targetNameUTF8.baseAddress!, /*targetLength:*/UInt(targetNameUTF8.endIndex), /*genericEnv:*/genericEnv, @@ -549,7 +549,7 @@ extension DistributedActorSystem { errorCode: .typeDeserializationFailure) } - guard let resultBuffer = _openExistential(returnTypeFromTypeInfo, do: doAllocateReturnTypeBuffer) else { + guard let resultBuffer = _openExistential(returnTypeFromTypeInfo, do: unsafe doAllocateReturnTypeBuffer) else { throw ExecuteDistributedTargetError( message: "Failed to allocate buffer for distributed target return type", errorCode: .typeDeserializationFailure) @@ -560,16 +560,16 @@ extension DistributedActorSystem { var executeDistributedTargetHasThrown = true func doDestroyReturnTypeBuffer(_: R.Type) { - let buf = resultBuffer.assumingMemoryBound(to: R.self) + let buf = unsafe resultBuffer.assumingMemoryBound(to: R.self) if !executeDistributedTargetHasThrown { // since the _execute function has NOT thrown, // there must be a value in the result buffer that we must deinitialize - buf.deinitialize(count: 1) + unsafe buf.deinitialize(count: 1) } // otherwise, the _execute has thrown and not populated the result buffer // finally, deallocate the buffer - buf.deallocate() + unsafe buf.deallocate() } defer { @@ -581,7 +581,7 @@ extension DistributedActorSystem { // let errorType = try invocationDecoder.decodeErrorType() // TODO(distributed): decide how to use when typed throws are done // Execute the target! - try await _executeDistributedTarget( + try unsafe await _executeDistributedTarget( on: actor, /*targetNameData:*/targetName, /*targetNameLength:*/UInt(targetName.count), @@ -599,7 +599,7 @@ extension DistributedActorSystem { if returnType == Void.self { try await handler.onReturnVoid() } else { - try await self.invokeHandlerOnReturn( + try unsafe await self.invokeHandlerOnReturn( handler: handler, resultBuffer: resultBuffer, metatype: returnType diff --git a/stdlib/public/Distributed/DistributedAssertions.swift b/stdlib/public/Distributed/DistributedAssertions.swift index f4ac7c9055224..881741904c81d 100644 --- a/stdlib/public/Distributed/DistributedAssertions.swift +++ b/stdlib/public/Distributed/DistributedAssertions.swift @@ -51,11 +51,11 @@ extension DistributedActor { return } - let unownedExecutor = self.unownedExecutor - let expectationCheck = _taskIsCurrentExecutor(unownedExecutor._executor) + let unownedExecutor = unsafe self.unownedExecutor + let expectationCheck = unsafe _taskIsCurrentExecutor(unownedExecutor._executor) precondition(expectationCheck, - "Incorrect actor executor assumption; Expected '\(self.unownedExecutor)' executor. \(message())", + unsafe "Incorrect actor executor assumption; Expected '\(unsafe self.unownedExecutor)' executor. \(message())", file: file, line: line) } } @@ -99,9 +99,9 @@ extension DistributedActor { return } - let unownedExecutor = self.unownedExecutor - guard _taskIsCurrentExecutor(unownedExecutor._executor) else { - let msg = "Incorrect actor executor assumption; Expected '\(unownedExecutor)' executor. \(message())" + let unownedExecutor = unsafe self.unownedExecutor + guard unsafe _taskIsCurrentExecutor(unownedExecutor._executor) else { + let msg = unsafe "Incorrect actor executor assumption; Expected '\(unsafe unownedExecutor)' executor. \(message())" /// TODO: implement the logic in-place perhaps rather than delegating to precondition()? assertionFailure(msg, file: file, line: line) // short-cut so we get the exact same failure reporting semantics return @@ -165,8 +165,8 @@ extension DistributedActor { fatalError("Cannot assume to be 'isolated \(Self.self)' since distributed actor '\(self)' is a remote actor reference.") } - let unownedExecutor = self.unownedExecutor - guard _taskIsCurrentExecutor(unownedExecutor._executor) else { + let unownedExecutor = unsafe self.unownedExecutor + guard unsafe _taskIsCurrentExecutor(unownedExecutor._executor) else { // TODO: offer information which executor we actually got when fatalError("Incorrect actor executor assumption; Expected same executor as \(self).", file: file, line: line) } @@ -174,7 +174,7 @@ extension DistributedActor { // To do the unsafe cast, we have to pretend it's @escaping. return try withoutActuallyEscaping(operation) { (_ fn: @escaping YesActor) throws -> T in - let rawFn = unsafeBitCast(fn, to: NoActor.self) + let rawFn = unsafe unsafeBitCast(fn, to: NoActor.self) return try rawFn(self) } } diff --git a/stdlib/public/Distributed/DistributedDefaultExecutor.swift b/stdlib/public/Distributed/DistributedDefaultExecutor.swift index abe0aca19c8f3..14e27adfe8bba 100644 --- a/stdlib/public/Distributed/DistributedDefaultExecutor.swift +++ b/stdlib/public/Distributed/DistributedDefaultExecutor.swift @@ -18,7 +18,7 @@ import _Concurrency internal final class DistributedRemoteActorReferenceExecutor: SerialExecutor { static let _shared: DistributedRemoteActorReferenceExecutor = DistributedRemoteActorReferenceExecutor() static var sharedUnownedExecutor: UnownedSerialExecutor { - UnownedSerialExecutor(ordinary: _shared) + unsafe UnownedSerialExecutor(ordinary: _shared) } internal init() {} @@ -38,7 +38,7 @@ internal final class DistributedRemoteActorReferenceExecutor: SerialExecutor { #endif // !SWIFT_STDLIB_TASK_TO_THREAD_MODEL_CONCURRENCY public func asUnownedSerialExecutor() -> UnownedSerialExecutor { - UnownedSerialExecutor(ordinary: self) + unsafe UnownedSerialExecutor(ordinary: self) } } @@ -57,5 +57,5 @@ internal final class DistributedRemoteActorReferenceExecutor: SerialExecutor { public func buildDefaultDistributedRemoteActorExecutor( _ actor: Act ) -> UnownedSerialExecutor where Act: DistributedActor { - return DistributedRemoteActorReferenceExecutor.sharedUnownedExecutor + return unsafe DistributedRemoteActorReferenceExecutor.sharedUnownedExecutor } diff --git a/stdlib/public/Distributed/DistributedMetadata.swift b/stdlib/public/Distributed/DistributedMetadata.swift index b42a4ed4457b6..b3d3db230dd97 100644 --- a/stdlib/public/Distributed/DistributedMetadata.swift +++ b/stdlib/public/Distributed/DistributedMetadata.swift @@ -19,8 +19,8 @@ import Swift public // SPI Distributed func _getParameterCount(mangledMethodName name: String) -> Int32 { let nameUTF8 = Array(name.utf8) - return nameUTF8.withUnsafeBufferPointer { nameUTF8 in - return __getParameterCount( + return unsafe nameUTF8.withUnsafeBufferPointer { nameUTF8 in + return unsafe __getParameterCount( nameUTF8.baseAddress!, UInt(nameUTF8.endIndex)) } } @@ -47,8 +47,8 @@ func _getParameterTypeInfo( into typesBuffer: Builtin.RawPointer, length typesLength: Int ) -> Int32 { let nameUTF8 = Array(name.utf8) - return nameUTF8.withUnsafeBufferPointer { nameUTF8 in - return __getParameterTypeInfo( + return unsafe nameUTF8.withUnsafeBufferPointer { nameUTF8 in + return unsafe __getParameterTypeInfo( nameUTF8.baseAddress!, UInt(nameUTF8.endIndex), genericEnv, genericArguments, typesBuffer, typesLength) } @@ -75,8 +75,8 @@ func _getReturnTypeInfo( genericArguments: UnsafeRawPointer? ) -> Any.Type? { let nameUTF8 = Array(name.utf8) - return nameUTF8.withUnsafeBufferPointer { nameUTF8 in - return __getReturnTypeInfo(nameUTF8.baseAddress!, UInt(nameUTF8.endIndex), + return unsafe nameUTF8.withUnsafeBufferPointer { nameUTF8 in + return unsafe __getReturnTypeInfo(nameUTF8.baseAddress!, UInt(nameUTF8.endIndex), genericEnv, genericArguments) } } diff --git a/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift b/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift index 33c208bff38ea..cab15d5677e29 100644 --- a/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift +++ b/stdlib/public/Distributed/LocalTestingDistributedActorSystem.swift @@ -236,6 +236,7 @@ public struct LocalTestingDistributedActorSystemError: DistributedActorSystemErr // === lock ---------------------------------------------------------------- @available(SwiftStdlib 5.7, *) +@safe fileprivate class _Lock { #if os(iOS) || os(macOS) || os(tvOS) || os(watchOS) private let underlying: UnsafeMutablePointer @@ -252,7 +253,7 @@ fileprivate class _Lock { init() { #if os(iOS) || os(macOS) || os(tvOS) || os(watchOS) self.underlying = UnsafeMutablePointer.allocate(capacity: 1) - self.underlying.initialize(to: os_unfair_lock()) + unsafe self.underlying.initialize(to: os_unfair_lock()) #elseif os(Windows) self.underlying = UnsafeMutablePointer.allocate(capacity: 1) InitializeSRWLock(self.underlying) @@ -280,8 +281,8 @@ fileprivate class _Lock { #endif #if !os(WASI) - self.underlying.deinitialize(count: 1) - self.underlying.deallocate() + unsafe self.underlying.deinitialize(count: 1) + unsafe self.underlying.deallocate() #endif } @@ -289,7 +290,7 @@ fileprivate class _Lock { @discardableResult func withLock(_ body: () -> T) -> T { #if os(iOS) || os(macOS) || os(tvOS) || os(watchOS) - os_unfair_lock_lock(self.underlying) + unsafe os_unfair_lock_lock(self.underlying) #elseif os(Windows) AcquireSRWLockExclusive(self.underlying) #elseif os(WASI) @@ -302,7 +303,7 @@ fileprivate class _Lock { defer { #if os(iOS) || os(macOS) || os(tvOS) || os(watchOS) - os_unfair_lock_unlock(self.underlying) + unsafe os_unfair_lock_unlock(self.underlying) #elseif os(Windows) ReleaseSRWLockExclusive(self.underlying) #elseif os(WASI) diff --git a/stdlib/public/SwiftOnoneSupport/CMakeLists.txt b/stdlib/public/SwiftOnoneSupport/CMakeLists.txt index 0caff579a9304..a7a105938b15b 100644 --- a/stdlib/public/SwiftOnoneSupport/CMakeLists.txt +++ b/stdlib/public/SwiftOnoneSupport/CMakeLists.txt @@ -7,7 +7,7 @@ set(swiftOnoneSupport_common_options "${SWIFT_SOURCE_DIR}/stdlib/linker-support/magic-symbols-for-install-name.c" - SWIFT_COMPILE_FLAGS "-parse-stdlib" "-Xllvm" "-sil-inline-generics=false" "-Xfrontend" "-validate-tbd-against-ir=none" "-Xfrontend" "-check-onone-completeness" "-Xfrontend" "-disable-access-control" "${SWIFT_RUNTIME_SWIFT_COMPILE_FLAGS}" "${SWIFT_STANDARD_LIBRARY_SWIFT_FLAGS}" + SWIFT_COMPILE_FLAGS "-parse-stdlib" "-Xllvm" "-sil-inline-generics=false" "-Xfrontend" "-validate-tbd-against-ir=none" "-Xfrontend" "-check-onone-completeness" "-Xfrontend" "-disable-access-control" "-strict-memory-safety" "${SWIFT_RUNTIME_SWIFT_COMPILE_FLAGS}" "${SWIFT_STANDARD_LIBRARY_SWIFT_FLAGS}" LINK_FLAGS "${SWIFT_RUNTIME_SWIFT_LINK_FLAGS}") if(CMAKE_BUILD_TYPE STREQUAL "Debug" AND BOOTSTRAPPING_MODE STREQUAL "BOOTSTRAPPING") diff --git a/stdlib/public/SwiftOnoneSupport/SwiftOnoneSupport.swift b/stdlib/public/SwiftOnoneSupport/SwiftOnoneSupport.swift index 2c9d255ad5b42..841e65b9876ea 100644 --- a/stdlib/public/SwiftOnoneSupport/SwiftOnoneSupport.swift +++ b/stdlib/public/SwiftOnoneSupport/SwiftOnoneSupport.swift @@ -306,10 +306,10 @@ func _prespecializeIndexingIterator(_ x: IndexingIterator) w func prespecializeCollections(_ element: T) { var umbp = UnsafeMutableBufferPointer.allocate(capacity: 1) let cmp = { (_: T, _: T) in return false } - umbp._prespecializeMutableBirectionalCollection(range: 0..<0) - umbp._prespecializeMutableBirectionalCollection(range: 0..<0, cmp: cmp) - umbp._prespecializeMutableBirectionalCollection(range: 0..<0, end: 0, cmp: cmp) - try! umbp._prespecializeMutableRandomAccessCollection(cmp: cmp) + unsafe umbp._prespecializeMutableBirectionalCollection(range: 0..<0) + unsafe umbp._prespecializeMutableBirectionalCollection(range: 0..<0, cmp: cmp) + unsafe umbp._prespecializeMutableBirectionalCollection(range: 0..<0, end: 0, cmp: cmp) + try! unsafe umbp._prespecializeMutableRandomAccessCollection(cmp: cmp) let _: (Array, Builtin.RawPointer) = _prespecializeArray(0._builtinWordValue) @@ -319,20 +319,20 @@ func prespecializeCollections(_ element: T) { array._prespecializeArray(index: 0, flag: false) array._prespecializeArray(index: 0, flag: false, token: _DependenceToken()) array._prespecializeArray(arrayLiteral: element) - array._prespecializeArray(capacity: 0) { (_: inout UnsafeMutableBufferPointer, _: inout Int) in return } + unsafe array._prespecializeArray(capacity: 0) { (_: inout UnsafeMutableBufferPointer, _: inout Int) in return } array._prespecializeArray(flag: false) array._prespecializeArray(index: 0) array._prespecializeArray(index: 0, element: element) array._prespecializeArray(element: element, index: 0) array._prespecializeArray(range: 0..<0, collection: EmptyCollection()) - array._prespecializeArray(with: { (_: inout UnsafeMutableBufferPointer) -> Optional<()> in return () }) + unsafe array._prespecializeArray(with: { (_: inout UnsafeMutableBufferPointer) -> Optional<()> in return () }) array._prespecializeBidirectionalCollection() array._prespecializeRandomAccessCollection() try! array._prespecializeMutableRandomAccessCollection(cmp: cmp) let cab = _ContiguousArrayBuffer() cab._prespecializeContiguousArrayBuffer() - cab._prespecializeContiguousArrayBuffer(range: (0..<0), pointer: umbp.baseAddress!) + unsafe cab._prespecializeContiguousArrayBuffer(range: (0..<0), pointer: umbp.baseAddress!) cab._prespecializeContiguousArrayBuffer(count: 0, capacity: 0) cab._prespecializeContiguousArrayBuffer(buffer: cab, index: 0) @@ -341,7 +341,7 @@ func prespecializeCollections(_ element: T) { ab._prespecializeArrayBuffer() ab._prespecializeArrayBuffer(index: 0) ab._prespecializeArrayBuffer(range: (0..<0)) - ab._prespecializeArrayBuffer(range: (0..<0), pointer: umbp.baseAddress!) + unsafe ab._prespecializeArrayBuffer(range: (0..<0), pointer: umbp.baseAddress!) ab._prespecializeArrayBuffer(index: 0, flag: false) ab._prespecializeArrayBuffer(buffer: cab, index: 0) ab._prespecializeRandomAccessCollection(after: 0) diff --git a/stdlib/public/Synchronization/Atomics/Atomic.swift b/stdlib/public/Synchronization/Atomics/Atomic.swift index 1f67c776fb78f..da599199b6cee 100644 --- a/stdlib/public/Synchronization/Atomics/Atomic.swift +++ b/stdlib/public/Synchronization/Atomics/Atomic.swift @@ -22,7 +22,7 @@ public struct Atomic: ~Copyable { @_alwaysEmitIntoClient @_transparent var _address: UnsafeMutablePointer { - UnsafeMutablePointer(_rawAddress) + unsafe UnsafeMutablePointer(_rawAddress) } @available(SwiftStdlib 6.0, *) @@ -39,7 +39,7 @@ public struct Atomic: ~Copyable { @_alwaysEmitIntoClient @_transparent public init(_ initialValue: consuming Value) { - _address.initialize(to: Value.encodeAtomicRepresentation(initialValue)) + unsafe _address.initialize(to: Value.encodeAtomicRepresentation(initialValue)) } // Deinit's can't be marked @_transparent. Do these things need all of these @@ -48,10 +48,10 @@ public struct Atomic: ~Copyable { @_alwaysEmitIntoClient @inlinable deinit { - let oldValue = Value.decodeAtomicRepresentation(_address.pointee) + let oldValue = unsafe Value.decodeAtomicRepresentation(_address.pointee) _ = consume oldValue - _address.deinitialize(count: 1) + unsafe _address.deinitialize(count: 1) } } diff --git a/stdlib/public/Synchronization/Atomics/AtomicLazyReference.swift b/stdlib/public/Synchronization/Atomics/AtomicLazyReference.swift index 832bb8b3222de..444ca04b74788 100644 --- a/stdlib/public/Synchronization/Atomics/AtomicLazyReference.swift +++ b/stdlib/public/Synchronization/Atomics/AtomicLazyReference.swift @@ -17,6 +17,7 @@ @available(SwiftStdlib 6.0, *) @frozen @_staticExclusiveOnly +@safe public struct AtomicLazyReference: ~Copyable { @usableFromInline let storage: Atomic?> @@ -24,13 +25,13 @@ public struct AtomicLazyReference: ~Copyable { @available(SwiftStdlib 6.0, *) @inlinable public init() { - storage = Atomic?>(nil) + storage = unsafe Atomic?>(nil) } @inlinable deinit { - if let unmanaged = storage.load(ordering: .acquiring) { - unmanaged.release() + if let unmanaged = unsafe storage.load(ordering: .acquiring) { + unsafe unmanaged.release() } } } @@ -68,8 +69,8 @@ extension AtomicLazyReference { /// was passed to this function. @available(SwiftStdlib 6.0, *) public func storeIfNil(_ desired: consuming Instance) -> Instance { - let desiredUnmanaged = Unmanaged.passRetained(desired) - let (exchanged, current) = storage.compareExchange( + let desiredUnmanaged = unsafe Unmanaged.passRetained(desired) + let (exchanged, current) = unsafe storage.compareExchange( expected: nil, desired: desiredUnmanaged, ordering: .acquiringAndReleasing @@ -78,11 +79,11 @@ extension AtomicLazyReference { if !exchanged { // The reference has already been initialized. Balance the retain that we // performed on 'desired'. - desiredUnmanaged.release() - return current!.takeUnretainedValue() + unsafe desiredUnmanaged.release() + return unsafe current!.takeUnretainedValue() } - return desiredUnmanaged.takeUnretainedValue() + return unsafe desiredUnmanaged.takeUnretainedValue() } /// Atomically loads and returns the current value of this reference. @@ -94,8 +95,8 @@ extension AtomicLazyReference { /// `nil` if it has not been written to yet. @available(SwiftStdlib 6.0, *) public func load() -> Instance? { - let value = storage.load(ordering: .acquiring) - return value?.takeUnretainedValue() + let value = unsafe storage.load(ordering: .acquiring) + return unsafe value?.takeUnretainedValue() } } diff --git a/stdlib/public/Synchronization/Atomics/AtomicPointers.swift b/stdlib/public/Synchronization/Atomics/AtomicPointers.swift index 12b4446fda709..1cc77cb062cb0 100644 --- a/stdlib/public/Synchronization/Atomics/AtomicPointers.swift +++ b/stdlib/public/Synchronization/Atomics/AtomicPointers.swift @@ -15,7 +15,7 @@ //===----------------------------------------------------------------------===// @available(SwiftStdlib 6.0, *) -extension UnsafePointer: AtomicRepresentable where Pointee: ~Copyable { +extension UnsafePointer: @unsafe AtomicRepresentable where Pointee: ~Copyable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -37,7 +37,7 @@ extension UnsafePointer: AtomicRepresentable where Pointee: ~Copyable { public static func encodeAtomicRepresentation( _ value: consuming UnsafePointer ) -> AtomicRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -58,14 +58,14 @@ extension UnsafePointer: AtomicRepresentable where Pointee: ~Copyable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafePointer { - UnsafePointer( + unsafe UnsafePointer( bitPattern: Int.decodeAtomicRepresentation(representation) )! } } @available(SwiftStdlib 6.0, *) -extension UnsafePointer: AtomicOptionalRepresentable where Pointee: ~Copyable { +extension UnsafePointer: @unsafe AtomicOptionalRepresentable where Pointee: ~Copyable { /// The storage representation type that encodes to and decodes from /// `Optional` which is a suitable type when used in atomic operations /// on `Optional`. @@ -88,7 +88,7 @@ extension UnsafePointer: AtomicOptionalRepresentable where Pointee: ~Copyable { public static func encodeAtomicOptionalRepresentation( _ value: consuming UnsafePointer? ) -> AtomicOptionalRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -110,7 +110,7 @@ extension UnsafePointer: AtomicOptionalRepresentable where Pointee: ~Copyable { public static func decodeAtomicOptionalRepresentation( _ representation: consuming AtomicOptionalRepresentation ) -> UnsafePointer? { - UnsafePointer( + unsafe UnsafePointer( bitPattern: Int.decodeAtomicRepresentation(representation) ) } @@ -121,7 +121,7 @@ extension UnsafePointer: AtomicOptionalRepresentable where Pointee: ~Copyable { //===----------------------------------------------------------------------===// @available(SwiftStdlib 6.0, *) -extension UnsafeMutablePointer: AtomicRepresentable where Pointee: ~Copyable { +extension UnsafeMutablePointer: @unsafe AtomicRepresentable where Pointee: ~Copyable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -143,7 +143,7 @@ extension UnsafeMutablePointer: AtomicRepresentable where Pointee: ~Copyable { public static func encodeAtomicRepresentation( _ value: consuming UnsafeMutablePointer ) -> AtomicRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -164,14 +164,14 @@ extension UnsafeMutablePointer: AtomicRepresentable where Pointee: ~Copyable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafeMutablePointer { - UnsafeMutablePointer( + unsafe UnsafeMutablePointer( bitPattern: Int.decodeAtomicRepresentation(representation) )! } } @available(SwiftStdlib 6.0, *) -extension UnsafeMutablePointer: AtomicOptionalRepresentable +extension UnsafeMutablePointer: @unsafe AtomicOptionalRepresentable where Pointee: ~Copyable { /// The storage representation type that encodes to and decodes from /// `Optional` which is a suitable type when used in atomic operations @@ -195,7 +195,7 @@ where Pointee: ~Copyable { public static func encodeAtomicOptionalRepresentation( _ value: consuming UnsafeMutablePointer? ) -> AtomicOptionalRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -217,7 +217,7 @@ where Pointee: ~Copyable { public static func decodeAtomicOptionalRepresentation( _ representation: consuming AtomicOptionalRepresentation ) -> UnsafeMutablePointer? { - UnsafeMutablePointer( + unsafe UnsafeMutablePointer( bitPattern: Int.decodeAtomicRepresentation(representation) ) } @@ -228,7 +228,7 @@ where Pointee: ~Copyable { //===----------------------------------------------------------------------===// @available(SwiftStdlib 6.0, *) -extension UnsafeRawPointer: AtomicRepresentable { +extension UnsafeRawPointer: @unsafe AtomicRepresentable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -250,7 +250,7 @@ extension UnsafeRawPointer: AtomicRepresentable { public static func encodeAtomicRepresentation( _ value: consuming UnsafeRawPointer ) -> AtomicRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -271,14 +271,14 @@ extension UnsafeRawPointer: AtomicRepresentable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafeRawPointer { - UnsafeRawPointer( + unsafe UnsafeRawPointer( bitPattern: Int.decodeAtomicRepresentation(representation) )! } } @available(SwiftStdlib 6.0, *) -extension UnsafeRawPointer: AtomicOptionalRepresentable { +extension UnsafeRawPointer: @unsafe AtomicOptionalRepresentable { /// The storage representation type that encodes to and decodes from /// `Optional` which is a suitable type when used in atomic operations /// on `Optional`. @@ -301,7 +301,7 @@ extension UnsafeRawPointer: AtomicOptionalRepresentable { public static func encodeAtomicOptionalRepresentation( _ value: consuming UnsafeRawPointer? ) -> AtomicOptionalRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -323,7 +323,7 @@ extension UnsafeRawPointer: AtomicOptionalRepresentable { public static func decodeAtomicOptionalRepresentation( _ representation: consuming AtomicOptionalRepresentation ) -> UnsafeRawPointer? { - UnsafeRawPointer( + unsafe UnsafeRawPointer( bitPattern: Int.decodeAtomicRepresentation(representation) ) } @@ -334,7 +334,7 @@ extension UnsafeRawPointer: AtomicOptionalRepresentable { //===----------------------------------------------------------------------===// @available(SwiftStdlib 6.0, *) -extension UnsafeMutableRawPointer: AtomicRepresentable { +extension UnsafeMutableRawPointer: @unsafe AtomicRepresentable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -356,7 +356,7 @@ extension UnsafeMutableRawPointer: AtomicRepresentable { public static func encodeAtomicRepresentation( _ value: consuming UnsafeMutableRawPointer ) -> AtomicRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -377,14 +377,14 @@ extension UnsafeMutableRawPointer: AtomicRepresentable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafeMutableRawPointer { - UnsafeMutableRawPointer( + unsafe UnsafeMutableRawPointer( bitPattern: Int.decodeAtomicRepresentation(representation) )! } } @available(SwiftStdlib 6.0, *) -extension UnsafeMutableRawPointer: AtomicOptionalRepresentable { +extension UnsafeMutableRawPointer: @unsafe AtomicOptionalRepresentable { /// The storage representation type that encodes to and decodes from /// `Optional` which is a suitable type when used in atomic operations /// on `Optional`. @@ -407,7 +407,7 @@ extension UnsafeMutableRawPointer: AtomicOptionalRepresentable { public static func encodeAtomicOptionalRepresentation( _ value: consuming UnsafeMutableRawPointer? ) -> AtomicOptionalRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value) ) } @@ -429,7 +429,7 @@ extension UnsafeMutableRawPointer: AtomicOptionalRepresentable { public static func decodeAtomicOptionalRepresentation( _ representation: consuming AtomicOptionalRepresentation ) -> UnsafeMutableRawPointer? { - UnsafeMutableRawPointer( + unsafe UnsafeMutableRawPointer( bitPattern: Int.decodeAtomicRepresentation(representation) ) } @@ -440,7 +440,7 @@ extension UnsafeMutableRawPointer: AtomicOptionalRepresentable { //===----------------------------------------------------------------------===// @available(SwiftStdlib 6.0, *) -extension Unmanaged: AtomicRepresentable { +extension Unmanaged: @unsafe AtomicRepresentable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -462,7 +462,7 @@ extension Unmanaged: AtomicRepresentable { public static func encodeAtomicRepresentation( _ value: consuming Unmanaged ) -> AtomicRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( Int(bitPattern: value.toOpaque()) ) } @@ -483,14 +483,14 @@ extension Unmanaged: AtomicRepresentable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> Unmanaged { - Unmanaged.fromOpaque( + unsafe Unmanaged.fromOpaque( UnsafeRawPointer.decodeAtomicRepresentation(representation) ) } } @available(SwiftStdlib 6.0, *) -extension Unmanaged: AtomicOptionalRepresentable { +extension Unmanaged: @unsafe AtomicOptionalRepresentable { /// The storage representation type that encodes to and decodes from /// `Optional` which is a suitable type when used in atomic operations /// on `Optional`. @@ -517,8 +517,8 @@ extension Unmanaged: AtomicOptionalRepresentable { // // Int.AtomicRepresentation(Int(bitPattern: value?.toOpaque())._value) - if let unmanaged = value { - return Int.encodeAtomicRepresentation( + if let unmanaged = unsafe value { + return unsafe Int.encodeAtomicRepresentation( Int(bitPattern: unmanaged.toOpaque()) ) } @@ -543,8 +543,8 @@ extension Unmanaged: AtomicOptionalRepresentable { public static func decodeAtomicOptionalRepresentation( _ representation: consuming AtomicOptionalRepresentation ) -> Unmanaged? { - UnsafeRawPointer.decodeAtomicOptionalRepresentation(representation).map { - Unmanaged.fromOpaque($0) + unsafe UnsafeRawPointer.decodeAtomicOptionalRepresentation(representation).map { + unsafe Unmanaged.fromOpaque($0) } } } @@ -554,7 +554,7 @@ extension Unmanaged: AtomicOptionalRepresentable { //===----------------------------------------------------------------------===// @available(SwiftStdlib 6.0, *) -extension OpaquePointer: AtomicRepresentable { +extension OpaquePointer: @unsafe AtomicRepresentable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -597,14 +597,14 @@ extension OpaquePointer: AtomicRepresentable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> OpaquePointer { - OpaquePointer( + unsafe OpaquePointer( bitPattern: Int.decodeAtomicRepresentation(representation) )! } } @available(SwiftStdlib 6.0, *) -extension OpaquePointer: AtomicOptionalRepresentable { +extension OpaquePointer: @unsafe AtomicOptionalRepresentable { /// The storage representation type that encodes to and decodes from /// `Optional` which is a suitable type when used in atomic operations /// on `Optional`. @@ -649,7 +649,7 @@ extension OpaquePointer: AtomicOptionalRepresentable { public static func decodeAtomicOptionalRepresentation( _ representation: consuming AtomicOptionalRepresentation ) -> OpaquePointer? { - OpaquePointer( + unsafe OpaquePointer( bitPattern: Int.decodeAtomicRepresentation(representation) ) } @@ -704,7 +704,7 @@ extension ObjectIdentifier: AtomicRepresentable { _ representation: consuming AtomicRepresentation ) -> ObjectIdentifier { // ObjectIdentifier doesn't have a bitPattern init..? - unsafeBitCast( + unsafe unsafeBitCast( Int.decodeAtomicRepresentation(representation), to: ObjectIdentifier.self ) @@ -735,7 +735,7 @@ extension ObjectIdentifier: AtomicOptionalRepresentable { public static func encodeAtomicOptionalRepresentation( _ value: consuming ObjectIdentifier? ) -> AtomicOptionalRepresentation { - Int.encodeAtomicRepresentation( + unsafe Int.encodeAtomicRepresentation( // {U}Int have bitPattern inits for ObjectIdentifier, but not optional // ObjectIdentifier :sad: unsafeBitCast(value, to: Int.self) @@ -760,7 +760,7 @@ extension ObjectIdentifier: AtomicOptionalRepresentable { _ representation: consuming AtomicOptionalRepresentation ) -> ObjectIdentifier? { // ObjectIdentifier doesn't have a bitPattern init..? - unsafeBitCast( + unsafe unsafeBitCast( Int.decodeAtomicRepresentation(representation), to: ObjectIdentifier?.self ) @@ -774,7 +774,7 @@ extension ObjectIdentifier: AtomicOptionalRepresentable { #if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128)) @available(SwiftStdlib 6.0, *) -extension UnsafeBufferPointer: AtomicRepresentable where Element: ~Copyable { +extension UnsafeBufferPointer: @unsafe AtomicRepresentable where Element: ~Copyable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -796,9 +796,9 @@ extension UnsafeBufferPointer: AtomicRepresentable where Element: ~Copyable { public static func encodeAtomicRepresentation( _ value: consuming UnsafeBufferPointer ) -> AtomicRepresentation { - let valueCopy = value + let valueCopy = unsafe value - return WordPair.encodeAtomicRepresentation( + return unsafe WordPair.encodeAtomicRepresentation( WordPair( first: UInt(bitPattern: valueCopy.baseAddress), second: UInt(truncatingIfNeeded: valueCopy.count) @@ -822,9 +822,9 @@ extension UnsafeBufferPointer: AtomicRepresentable where Element: ~Copyable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafeBufferPointer { - let wp = WordPair.decodeAtomicRepresentation(representation) + let wp = unsafe WordPair.decodeAtomicRepresentation(representation) - return UnsafeBufferPointer( + return unsafe UnsafeBufferPointer( start: UnsafePointer(bitPattern: wp.first), count: Int(truncatingIfNeeded: wp.second) ) @@ -840,7 +840,7 @@ extension UnsafeBufferPointer: AtomicRepresentable where Element: ~Copyable { #if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128)) @available(SwiftStdlib 6.0, *) -extension UnsafeMutableBufferPointer: AtomicRepresentable +extension UnsafeMutableBufferPointer: @unsafe AtomicRepresentable where Element: ~Copyable { /// The storage representation type that `Self` encodes to and decodes from @@ -864,9 +864,9 @@ where Element: ~Copyable public static func encodeAtomicRepresentation( _ value: consuming UnsafeMutableBufferPointer ) -> AtomicRepresentation { - let valueCopy = value + let valueCopy = unsafe value - return WordPair.encodeAtomicRepresentation( + return unsafe WordPair.encodeAtomicRepresentation( WordPair( first: UInt(bitPattern: valueCopy.baseAddress), second: UInt(truncatingIfNeeded: valueCopy.count) @@ -890,9 +890,9 @@ where Element: ~Copyable public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafeMutableBufferPointer { - let wp = WordPair.decodeAtomicRepresentation(representation) + let wp = unsafe WordPair.decodeAtomicRepresentation(representation) - return UnsafeMutableBufferPointer( + return unsafe UnsafeMutableBufferPointer( start: UnsafeMutablePointer(bitPattern: wp.first), count: Int(truncatingIfNeeded: wp.second) ) @@ -908,7 +908,7 @@ where Element: ~Copyable #if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128)) @available(SwiftStdlib 6.0, *) -extension UnsafeRawBufferPointer: AtomicRepresentable { +extension UnsafeRawBufferPointer: @unsafe AtomicRepresentable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -930,9 +930,9 @@ extension UnsafeRawBufferPointer: AtomicRepresentable { public static func encodeAtomicRepresentation( _ value: consuming UnsafeRawBufferPointer ) -> AtomicRepresentation { - let valueCopy = value + let valueCopy = unsafe value - return WordPair.encodeAtomicRepresentation( + return unsafe WordPair.encodeAtomicRepresentation( WordPair( first: UInt(bitPattern: valueCopy.baseAddress), second: UInt(truncatingIfNeeded: valueCopy.count) @@ -956,9 +956,9 @@ extension UnsafeRawBufferPointer: AtomicRepresentable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafeRawBufferPointer { - let wp = WordPair.decodeAtomicRepresentation(representation) + let wp = unsafe WordPair.decodeAtomicRepresentation(representation) - return UnsafeRawBufferPointer( + return unsafe UnsafeRawBufferPointer( start: UnsafeRawPointer(bitPattern: wp.first), count: Int(truncatingIfNeeded: wp.second) ) @@ -974,7 +974,7 @@ extension UnsafeRawBufferPointer: AtomicRepresentable { #if (_pointerBitWidth(_32) && _hasAtomicBitWidth(_64)) || (_pointerBitWidth(_64) && _hasAtomicBitWidth(_128)) @available(SwiftStdlib 6.0, *) -extension UnsafeMutableRawBufferPointer: AtomicRepresentable { +extension UnsafeMutableRawBufferPointer: @unsafe AtomicRepresentable { /// The storage representation type that `Self` encodes to and decodes from /// which is a suitable type when used in atomic operations. @available(SwiftStdlib 6.0, *) @@ -996,9 +996,9 @@ extension UnsafeMutableRawBufferPointer: AtomicRepresentable { public static func encodeAtomicRepresentation( _ value: consuming UnsafeMutableRawBufferPointer ) -> AtomicRepresentation { - let valueCopy = value + let valueCopy = unsafe value - return WordPair.encodeAtomicRepresentation( + return unsafe WordPair.encodeAtomicRepresentation( WordPair( first: UInt(bitPattern: valueCopy.baseAddress), second: UInt(truncatingIfNeeded: valueCopy.count) @@ -1022,9 +1022,9 @@ extension UnsafeMutableRawBufferPointer: AtomicRepresentable { public static func decodeAtomicRepresentation( _ representation: consuming AtomicRepresentation ) -> UnsafeMutableRawBufferPointer { - let wp = WordPair.decodeAtomicRepresentation(representation) + let wp = unsafe WordPair.decodeAtomicRepresentation(representation) - return UnsafeMutableRawBufferPointer( + return unsafe UnsafeMutableRawBufferPointer( start: UnsafeMutableRawPointer(bitPattern: wp.first), count: Int(truncatingIfNeeded: wp.second) ) diff --git a/stdlib/public/Synchronization/CMakeLists.txt b/stdlib/public/Synchronization/CMakeLists.txt index 892d105c3f300..f7167514c9789 100644 --- a/stdlib/public/Synchronization/CMakeLists.txt +++ b/stdlib/public/Synchronization/CMakeLists.txt @@ -74,6 +74,7 @@ set(SWIFT_SYNCHRNOIZATION_SWIFT_FLAGS "-enable-experimental-feature" "RawLayout" "-enable-experimental-feature" "StaticExclusiveOnly" "-enable-experimental-feature" "Extern" + "-strict-memory-safety" ) add_swift_target_library(swiftSynchronization ${SWIFT_STDLIB_LIBRARY_BUILD_TYPES} IS_STDLIB IMPORTS_NON_OSSA diff --git a/stdlib/public/Synchronization/Cell.swift b/stdlib/public/Synchronization/Cell.swift index 75b8edf1a6a0f..75dde59cdcf85 100644 --- a/stdlib/public/Synchronization/Cell.swift +++ b/stdlib/public/Synchronization/Cell.swift @@ -21,7 +21,7 @@ internal struct _Cell: ~Copyable { @_alwaysEmitIntoClient @_transparent internal var _address: UnsafeMutablePointer { - UnsafeMutablePointer(_rawAddress) + unsafe UnsafeMutablePointer(_rawAddress) } @available(SwiftStdlib 6.0, *) @@ -35,13 +35,13 @@ internal struct _Cell: ~Copyable { @_alwaysEmitIntoClient @_transparent internal init(_ initialValue: consuming Value) { - _address.initialize(to: initialValue) + unsafe _address.initialize(to: initialValue) } @available(SwiftStdlib 6.0, *) @_alwaysEmitIntoClient @inlinable deinit { - _address.deinitialize(count: 1) + unsafe _address.deinitialize(count: 1) } } diff --git a/stdlib/public/Synchronization/Mutex/DarwinImpl.swift b/stdlib/public/Synchronization/Mutex/DarwinImpl.swift index 7b0afef515542..8d398b17bc8a0 100644 --- a/stdlib/public/Synchronization/Mutex/DarwinImpl.swift +++ b/stdlib/public/Synchronization/Mutex/DarwinImpl.swift @@ -30,20 +30,20 @@ public struct _MutexHandle: ~Copyable { @_alwaysEmitIntoClient @_transparent internal borrowing func _lock() { - os_unfair_lock_lock(value._address) + unsafe os_unfair_lock_lock(value._address) } @available(SwiftStdlib 6.0, *) @_alwaysEmitIntoClient @_transparent internal borrowing func _tryLock() -> Bool { - os_unfair_lock_trylock(value._address) + unsafe os_unfair_lock_trylock(value._address) } @available(SwiftStdlib 6.0, *) @_alwaysEmitIntoClient @_transparent internal borrowing func _unlock() { - os_unfair_lock_unlock(value._address) + unsafe os_unfair_lock_unlock(value._address) } } diff --git a/stdlib/public/Synchronization/Mutex/Mutex.swift b/stdlib/public/Synchronization/Mutex/Mutex.swift index 8b46810cf1fb8..a3b0d0eb1feb8 100644 --- a/stdlib/public/Synchronization/Mutex/Mutex.swift +++ b/stdlib/public/Synchronization/Mutex/Mutex.swift @@ -93,7 +93,7 @@ extension Mutex where Value: ~Copyable { handle._unlock() } - return try body(&value._address.pointee) + return try unsafe body(&value._address.pointee) } /// Attempts to acquire the lock and then calls the given closure if @@ -142,7 +142,7 @@ extension Mutex where Value: ~Copyable { handle._unlock() } - return try body(&value._address.pointee) + return unsafe try body(&value._address.pointee) } } diff --git a/stdlib/public/core/ASCII.swift b/stdlib/public/core/ASCII.swift index 6f629936bb055..47d1e5a100a5d 100644 --- a/stdlib/public/core/ASCII.swift +++ b/stdlib/public/core/ASCII.swift @@ -61,7 +61,7 @@ extension Unicode.ASCII: Unicode.Encoding { } else if _fastPath(FromEncoding.self == UTF8.self) { let c = _identityCast(content, to: UTF8.EncodedScalar.self) - let first = c.first.unsafelyUnwrapped + let first = unsafe c.first.unsafelyUnwrapped guard (first < 0x80) else { return nil } return EncodedScalar(CodeUnit(first)) } diff --git a/stdlib/public/core/AnyHashable.swift b/stdlib/public/core/AnyHashable.swift index 525f791f20e22..51451a92e06f3 100644 --- a/stdlib/public/core/AnyHashable.swift +++ b/stdlib/public/core/AnyHashable.swift @@ -103,7 +103,7 @@ internal struct _ConcreteHashableBox: _AnyHashableBox { internal func _downCastConditional(into result: UnsafeMutablePointer) -> Bool { guard let value = _baseHashable as? T else { return false } - result.initialize(to: value) + unsafe result.initialize(to: value) return true } } @@ -165,8 +165,8 @@ public struct AnyHashable { } self.init(_box: _ConcreteHashableBox(false)) // Dummy value - _withUnprotectedUnsafeMutablePointer(to: &self) { - _makeAnyHashableUpcastingToHashableBaseType( + unsafe _withUnprotectedUnsafeMutablePointer(to: &self) { + unsafe _makeAnyHashableUpcastingToHashableBaseType( base, storingResultInto: $0) } @@ -197,13 +197,13 @@ public struct AnyHashable { internal func _downCastConditional(into result: UnsafeMutablePointer) -> Bool { // Attempt the downcast. - if _box._downCastConditional(into: result) { return true } + if unsafe _box._downCastConditional(into: result) { return true } #if _runtime(_ObjC) // Bridge to Objective-C and then attempt the cast from there. // FIXME: This should also work without the Objective-C runtime. if let value = _bridgeAnythingToObjectiveC(_box._base) as? T { - result.initialize(to: value) + unsafe result.initialize(to: value) return true } #endif @@ -308,7 +308,7 @@ internal func _makeAnyHashableUsingDefaultRepresentation( of value: H, storingResultInto result: UnsafeMutablePointer ) { - result.pointee = AnyHashable(_usingDefaultRepresentationOf: value) + unsafe result.pointee = AnyHashable(_usingDefaultRepresentationOf: value) } /// Provided by AnyHashable.cpp. @@ -333,7 +333,7 @@ internal func _convertToAnyHashableIndirect( _ value: H, _ target: UnsafeMutablePointer ) { - target.initialize(to: AnyHashable(value)) + unsafe target.initialize(to: AnyHashable(value)) } /// Called by the casting machinery. @@ -343,5 +343,5 @@ internal func _anyHashableDownCastConditionalIndirect( _ value: UnsafePointer, _ target: UnsafeMutablePointer ) -> Bool { - return value.pointee._downCastConditional(into: target) + return unsafe value.pointee._downCastConditional(into: target) } diff --git a/stdlib/public/core/Array.swift b/stdlib/public/core/Array.swift index 2a03a8d64dc65..d8f1276070a0f 100644 --- a/stdlib/public/core/Array.swift +++ b/stdlib/public/core/Array.swift @@ -441,7 +441,7 @@ extension Array { @inlinable @_semantics("array.get_element_address") internal func _getElementAddress(_ index: Int) -> UnsafeMutablePointer { - return _buffer.firstElementAddress + index + return unsafe _buffer.firstElementAddress + index } } @@ -496,7 +496,7 @@ extension Array: _ArrayProtocol { @inlinable public var _baseAddressIfContiguous: UnsafeMutablePointer? { @inline(__always) // FIXME(TODO: JIRA): Hack around test failure - get { return _buffer.firstElementAddressIfContiguous } + get { return unsafe _buffer.firstElementAddressIfContiguous } } } @@ -755,9 +755,9 @@ extension Array: RandomAccessCollection, MutableCollection { _modify { _makeMutableAndUnique() // makes the array native, too _checkSubscript_mutating(index) - let address = _buffer.mutableFirstElementAddress + index + let address = unsafe _buffer.mutableFirstElementAddress + index defer { _endMutation() } - yield &address.pointee + yield unsafe &address.pointee } } @@ -797,7 +797,7 @@ extension Array: RandomAccessCollection, MutableCollection { _checkIndex(bounds.upperBound) // If the replacement buffer has same identity, and the ranges match, // then this was a pinned in-place modification, nothing further needed. - if self[bounds]._buffer.identity != rhs._buffer.identity + if unsafe self[bounds]._buffer.identity != rhs._buffer.identity || bounds != rhs.startIndex.. - (self, p) = Array._allocateUninitialized(count) + unsafe (self, p) = unsafe Array._allocateUninitialized(count) for _ in 0.. (Array, UnsafeMutablePointer) { let result = Array(_uninitializedCount: count) - return (result, result._buffer.firstElementAddress) + return unsafe (result, result._buffer.firstElementAddress) } @@ -978,7 +978,7 @@ extension Array: RangeReplaceableCollection { count: count, storage: storage) - return ( + return unsafe ( Array( _buffer: _Buffer(_buffer: innerBuffer, shiftedToStartIndex: 0)), innerBuffer.firstElementAddress) @@ -1118,7 +1118,7 @@ extension Array: RangeReplaceableCollection { let newCount = oldCount &+ 1 var newBuffer = _buffer._forceCreateUniqueMutableBuffer( countForNewBuffer: oldCount, minNewCapacity: newCount) - _buffer._arrayOutOfPlaceUpdate(&newBuffer, oldCount, 0) + unsafe _buffer._arrayOutOfPlaceUpdate(&newBuffer, oldCount, 0) } @inlinable @@ -1166,7 +1166,7 @@ extension Array: RangeReplaceableCollection { _internalInvariant(_buffer.mutableCapacity >= _buffer.mutableCount &+ 1) _buffer.mutableCount = oldCount &+ 1 - (_buffer.mutableFirstElementAddress + oldCount).initialize(to: newElement) + unsafe (_buffer.mutableFirstElementAddress + oldCount).initialize(to: newElement) } /// Adds a new element at the end of the array. @@ -1234,15 +1234,15 @@ extension Array: RangeReplaceableCollection { growForAppend: true) let oldCount = _buffer.mutableCount - let startNewElements = _buffer.mutableFirstElementAddress + oldCount - let buf = UnsafeMutableBufferPointer( + let startNewElements = unsafe _buffer.mutableFirstElementAddress + oldCount + let buf = unsafe UnsafeMutableBufferPointer( start: startNewElements, count: _buffer.mutableCapacity - oldCount) - var (remainder,writtenUpTo) = buf.initialize(from: newElements) + var (remainder,writtenUpTo) = unsafe buf.initialize(from: newElements) // trap on underflow from the sequence's underestimate: - let writtenCount = buf.distance(from: buf.startIndex, to: writtenUpTo) + let writtenCount = unsafe buf.distance(from: buf.startIndex, to: writtenUpTo) _precondition(newElementsCount <= writtenCount, "newElements.underestimatedCount was an overestimate") // can't check for overflow as sequences can underestimate @@ -1272,11 +1272,11 @@ extension Array: RangeReplaceableCollection { _reserveCapacityAssumingUniqueBuffer(oldCount: newCount) let currentCapacity = _buffer.mutableCapacity - let base = _buffer.mutableFirstElementAddress + let base = unsafe _buffer.mutableFirstElementAddress // fill while there is another item and spare capacity while let next = nextItem, newCount < currentCapacity { - (base + newCount).initialize(to: next) + unsafe (base + newCount).initialize(to: next) newCount += 1 nextItem = remainder.next() } @@ -1304,8 +1304,8 @@ extension Array: RangeReplaceableCollection { _makeMutableAndUnique() let newCount = _buffer.mutableCount - 1 _precondition(newCount >= 0, "Can't removeLast from an empty Array") - let pointer = (_buffer.mutableFirstElementAddress + newCount) - let element = pointer.move() + let pointer = unsafe (_buffer.mutableFirstElementAddress + newCount) + let element = unsafe pointer.move() _buffer.mutableCount = newCount _endMutation() return element @@ -1337,9 +1337,9 @@ extension Array: RangeReplaceableCollection { _precondition(index < currentCount, "Index out of range") _precondition(index >= 0, "Index out of range") let newCount = currentCount - 1 - let pointer = (_buffer.mutableFirstElementAddress + index) - let result = pointer.move() - pointer.moveInitialize(from: pointer + 1, count: newCount - index) + let pointer = unsafe (_buffer.mutableFirstElementAddress + index) + let result = unsafe pointer.move() + unsafe pointer.moveInitialize(from: pointer + 1, count: newCount - index) _buffer.mutableCount = newCount _endMutation() return result @@ -1402,9 +1402,9 @@ extension Array: RangeReplaceableCollection { public mutating func _withUnsafeMutableBufferPointerIfSupported( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { - return try withUnsafeMutableBufferPointer { + return unsafe try withUnsafeMutableBufferPointer { (bufferPointer) -> R in - return try body(&bufferPointer) + return try unsafe body(&bufferPointer) } } @@ -1412,9 +1412,9 @@ extension Array: RangeReplaceableCollection { public mutating func withContiguousMutableStorageIfAvailable( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { - return try withUnsafeMutableBufferPointer { + return unsafe try withUnsafeMutableBufferPointer { (bufferPointer) -> R in - return try body(&bufferPointer) + return try unsafe body(&bufferPointer) } } @@ -1422,9 +1422,9 @@ extension Array: RangeReplaceableCollection { public func withContiguousStorageIfAvailable( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { - return try withUnsafeBufferPointer { + return unsafe try withUnsafeBufferPointer { (bufferPointer) -> R in - return try body(bufferPointer) + return try unsafe body(bufferPointer) } } @@ -1486,12 +1486,12 @@ extension Array: CustomStringConvertible, CustomDebugStringConvertible { extension Array { @usableFromInline @_transparent internal func _cPointerArgs() -> (AnyObject?, UnsafeRawPointer?) { - let p = _baseAddressIfContiguous - if _fastPath(p != nil || isEmpty) { + let p = unsafe _baseAddressIfContiguous + if unsafe _fastPath(p != nil || isEmpty) { return (_owner, UnsafeRawPointer(p)) } let n = ContiguousArray(self._buffer)._buffer - return (n.owner, UnsafeRawPointer(n.firstElementAddress)) + return unsafe (n.owner, UnsafeRawPointer(n.firstElementAddress)) } } @@ -1506,11 +1506,11 @@ extension Array { _ initializedCount: inout Int) throws -> Void ) rethrows { var firstElementAddress: UnsafeMutablePointer - (self, firstElementAddress) = - Array._allocateUninitialized(_unsafeUninitializedCapacity) + unsafe (self, firstElementAddress) = + unsafe Array._allocateUninitialized(_unsafeUninitializedCapacity) var initializedCount = 0 - var buffer = UnsafeMutableBufferPointer( + var buffer = unsafe UnsafeMutableBufferPointer( start: firstElementAddress, count: _unsafeUninitializedCapacity) defer { // Update self.count even if initializer throws an error. @@ -1518,14 +1518,14 @@ extension Array { initializedCount <= _unsafeUninitializedCapacity, "Initialized count set to greater than specified capacity." ) - _precondition( + unsafe _precondition( buffer.baseAddress == firstElementAddress, "Can't reassign buffer in Array(unsafeUninitializedCapacity:initializingWith:)" ) self._buffer.mutableCount = initializedCount _endMutation() } - try initializer(&buffer, &initializedCount) + try unsafe initializer(&buffer, &initializedCount) } /// Creates an array with the specified capacity, then calls the given @@ -1560,7 +1560,7 @@ extension Array { _ buffer: inout UnsafeMutableBufferPointer, _ initializedCount: inout Int) throws -> Void ) rethrows { - self = try Array( + self = try unsafe Array( _unsafeUninitializedCapacity: unsafeUninitializedCapacity, initializingWith: initializer) } @@ -1572,7 +1572,7 @@ extension Array { func withUnsafeBufferPointer( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { - return try _buffer.withUnsafeBufferPointer(body) + return try unsafe _buffer.withUnsafeBufferPointer(body) } /// Calls a closure with a pointer to the array's contiguous storage. @@ -1608,7 +1608,7 @@ extension Array { public func withUnsafeBufferPointer( _ body: (UnsafeBufferPointer) throws(E) -> R ) throws(E) -> R { - return try _buffer.withUnsafeBufferPointer(body) + return try unsafe _buffer.withUnsafeBufferPointer(body) } // Superseded by the typed-throws version of this function, but retained @@ -1625,12 +1625,12 @@ extension Array { let count = _buffer.mutableCount // Create an UnsafeBufferPointer that we can pass to body - let pointer = _buffer.mutableFirstElementAddress - var inoutBufferPointer = UnsafeMutableBufferPointer( + let pointer = unsafe _buffer.mutableFirstElementAddress + var inoutBufferPointer = unsafe UnsafeMutableBufferPointer( start: pointer, count: count) defer { - _precondition( + unsafe _precondition( inoutBufferPointer.baseAddress == pointer && inoutBufferPointer.count == count, "Array withUnsafeMutableBufferPointer: replacing the buffer is not allowed") @@ -1639,7 +1639,7 @@ extension Array { } // Invoke the body. - return try body(&inoutBufferPointer) + return try unsafe body(&inoutBufferPointer) } /// Calls the given closure with a pointer to the array's mutable contiguous @@ -1692,12 +1692,12 @@ extension Array { let count = _buffer.mutableCount // Create an UnsafeBufferPointer that we can pass to body - let pointer = _buffer.mutableFirstElementAddress - var inoutBufferPointer = UnsafeMutableBufferPointer( + let pointer = unsafe _buffer.mutableFirstElementAddress + var inoutBufferPointer = unsafe UnsafeMutableBufferPointer( start: pointer, count: count) defer { - _precondition( + unsafe _precondition( inoutBufferPointer.baseAddress == pointer && inoutBufferPointer.count == count, "Array withUnsafeMutableBufferPointer: replacing the buffer is not allowed") @@ -1706,7 +1706,7 @@ extension Array { } // Invoke the body. - return try body(&inoutBufferPointer) + return try unsafe body(&inoutBufferPointer) } @inlinable @@ -1723,21 +1723,21 @@ extension Array { _precondition(self.count <= buffer.count, "Insufficient space allocated to copy array contents") - if let s = _baseAddressIfContiguous { - p.initialize(from: s, count: self.count) + if let s = unsafe _baseAddressIfContiguous { + unsafe p.initialize(from: s, count: self.count) // Need a _fixLifetime bracketing the _baseAddressIfContiguous getter // and all uses of the pointer it returns: _fixLifetime(self._owner) } else { for x in self { - p.initialize(to: x) - p += 1 + unsafe p.initialize(to: x) + unsafe p += 1 } } var it = IndexingIterator(_elements: self) it._position = endIndex - return (it,buffer.index(buffer.startIndex, offsetBy: self.count)) + return (it,unsafe buffer.index(buffer.startIndex, offsetBy: self.count)) } } @@ -1819,7 +1819,7 @@ extension Array: Equatable where Element: Equatable { } // Test referential equality. - if lhsCount == 0 || lhs._buffer.identity == rhs._buffer.identity { + if unsafe lhsCount == 0 || lhs._buffer.identity == rhs._buffer.identity { return true } @@ -1898,8 +1898,8 @@ extension Array { public mutating func withUnsafeMutableBytes( _ body: (UnsafeMutableRawBufferPointer) throws -> R ) rethrows -> R { - return try self.withUnsafeMutableBufferPointer { - return try body(UnsafeMutableRawBufferPointer($0)) + return try unsafe self.withUnsafeMutableBufferPointer { + return try unsafe body(UnsafeMutableRawBufferPointer($0)) } } @@ -1934,8 +1934,8 @@ extension Array { public func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { - return try self.withUnsafeBufferPointer { - try body(UnsafeRawBufferPointer($0)) + return try unsafe self.withUnsafeBufferPointer { + try unsafe body(UnsafeRawBufferPointer($0)) } } } @@ -1947,8 +1947,8 @@ extension Array { // copy.) @_alwaysEmitIntoClient public func _copyToNewArray() -> [Element] { - Array(unsafeUninitializedCapacity: self.count) { buffer, count in - var (it, c) = self._buffer._copyContents(initializing: buffer) + unsafe Array(unsafeUninitializedCapacity: self.count) { buffer, count in + var (it, c) = unsafe self._buffer._copyContents(initializing: buffer) _precondition(it.next() == nil) count = c } @@ -2075,7 +2075,7 @@ internal struct _ArrayAnyHashableBox into result: UnsafeMutablePointer ) -> Bool { guard let value = _value as? T else { return false } - result.initialize(to: value) + unsafe result.initialize(to: value) return true } } diff --git a/stdlib/public/core/ArrayBuffer.swift b/stdlib/public/core/ArrayBuffer.swift index 61a985f815415..229593d1d9717 100644 --- a/stdlib/public/core/ArrayBuffer.swift +++ b/stdlib/public/core/ArrayBuffer.swift @@ -201,12 +201,12 @@ extension _ArrayBuffer { if bufferIsUnique { // As an optimization, if the original buffer is unique, we can just move // the elements instead of copying. - let dest = newBuffer.firstElementAddress - dest.moveInitialize(from: mutableFirstElementAddress, + let dest = unsafe newBuffer.firstElementAddress + unsafe dest.moveInitialize(from: mutableFirstElementAddress, count: c) _native.mutableCount = 0 } else { - _copyContents( + unsafe _copyContents( subRange: 0.. UnsafeMutablePointer { _typeCheck(bounds) if _fastPath(_isNative) { - return _native._copyContents(subRange: bounds, initializing: target) + return unsafe _native._copyContents(subRange: bounds, initializing: target) } - let buffer = UnsafeMutableRawPointer(target) + let buffer = unsafe UnsafeMutableRawPointer(target) .assumingMemoryBound(to: AnyObject.self) - let result = _nonNative._copyContents( + let result = unsafe _nonNative._copyContents( subRange: bounds, initializing: buffer) - return UnsafeMutableRawPointer(result).assumingMemoryBound(to: Element.self) + return unsafe UnsafeMutableRawPointer(result).assumingMemoryBound(to: Element.self) } @inlinable @@ -322,15 +322,15 @@ extension _ArrayBuffer { initializing buffer: UnsafeMutableBufferPointer ) -> (Iterator, UnsafeMutableBufferPointer.Index) { if _fastPath(_isNative) { - let (_, c) = _native._copyContents(initializing: buffer) + let (_, c) = unsafe _native._copyContents(initializing: buffer) return (IndexingIterator(_elements: self, _position: c), c) } guard buffer.count > 0 else { return (makeIterator(), 0) } - let ptr = UnsafeMutableRawPointer(buffer.baseAddress)? + let ptr = unsafe UnsafeMutableRawPointer(buffer.baseAddress)? .assumingMemoryBound(to: AnyObject.self) - let (_, c) = _nonNative._copyContents( + let (_, c) = unsafe _nonNative._copyContents( initializing: UnsafeMutableBufferPointer(start: ptr, count: buffer.count)) - return (IndexingIterator(_elements: self, _position: c), c) + return unsafe (IndexingIterator(_elements: self, _position: c), c) } /// Returns a `_SliceBuffer` containing the given sub-range of elements in @@ -355,7 +355,7 @@ extension _ArrayBuffer { @inlinable internal var firstElementAddress: UnsafeMutablePointer { _internalInvariant(_isNative, "must be a native buffer") - return _native.firstElementAddress + return unsafe _native.firstElementAddress } /// A mutable pointer to the first element. @@ -364,12 +364,12 @@ extension _ArrayBuffer { @_alwaysEmitIntoClient internal var mutableFirstElementAddress: UnsafeMutablePointer { _internalInvariant(_isNative, "must be a native buffer") - return _native.mutableFirstElementAddress + return unsafe _native.mutableFirstElementAddress } @inlinable internal var firstElementAddressIfContiguous: UnsafeMutablePointer? { - return _fastPath(_isNative) ? firstElementAddress : nil + return unsafe _fastPath(_isNative) ? firstElementAddress : nil } /// The number of elements the buffer stores. @@ -500,7 +500,7 @@ extension _ArrayBuffer { if _fastPath(wasNativeTypeChecked) { return _nativeTypeChecked[i] } - return unsafeBitCast(_getElementSlowPath(i), to: Element.self) + return unsafe unsafeBitCast(_getElementSlowPath(i), to: Element.self) } @inline(never) @@ -570,24 +570,24 @@ extension _ArrayBuffer { @inlinable @_alwaysEmitIntoClient static var associationKey: UnsafeRawPointer { //We never dereference this, we just need an address to use as a unique key - UnsafeRawPointer(Builtin.addressof(&_swiftEmptyArrayStorage)) + unsafe UnsafeRawPointer(Builtin.addressof(&_swiftEmptyArrayStorage)) } @inlinable @_alwaysEmitIntoClient internal func getAssociatedBuffer() -> _ContiguousArrayBuffer? { - let getter = unsafeBitCast( + let getter = unsafe unsafeBitCast( getGetAssociatedObjectPtr(), to: (@convention(c)( AnyObject, UnsafeRawPointer ) -> UnsafeRawPointer?).self ) - if let assocPtr = getter( + if let assocPtr = unsafe getter( _storage.objCInstance, _ArrayBuffer.associationKey ) { let buffer: _ContiguousArrayStorage - buffer = Unmanaged.fromOpaque(assocPtr).takeUnretainedValue() + buffer = unsafe Unmanaged.fromOpaque(assocPtr).takeUnretainedValue() return _ContiguousArrayBuffer(buffer) } return nil @@ -595,13 +595,13 @@ extension _ArrayBuffer { @inlinable @_alwaysEmitIntoClient internal func setAssociatedBuffer(_ buffer: _ContiguousArrayBuffer) { - let setter = unsafeBitCast(getSetAssociatedObjectPtr(), to: (@convention(c)( + let setter = unsafe unsafeBitCast(getSetAssociatedObjectPtr(), to: (@convention(c)( AnyObject, UnsafeRawPointer, AnyObject?, UInt ) -> Void).self) - setter( + unsafe setter( _storage.objCInstance, _ArrayBuffer.associationKey, buffer._storage, @@ -627,13 +627,13 @@ extension _ArrayBuffer { unwrapped = associatedBuffer } else { associatedBuffer = ContiguousArray(self)._buffer - unwrapped = associatedBuffer.unsafelyUnwrapped + unwrapped = unsafe associatedBuffer.unsafelyUnwrapped setAssociatedBuffer(unwrapped) } defer { _fixLifetime(unwrapped) } objc_sync_exit(lock) } - return try body( + return try unsafe body( UnsafeBufferPointer( start: unwrapped.firstElementAddress, count: unwrapped.count @@ -653,7 +653,7 @@ extension _ArrayBuffer { ) rethrows -> R { if _fastPath(_isNative) { defer { _fixLifetime(self) } - return try body( + return try unsafe body( UnsafeBufferPointer(start: firstElementAddress, count: count)) } return try ContiguousArray(self).withUnsafeBufferPointer(body) @@ -668,10 +668,10 @@ extension _ArrayBuffer { ) throws(E) -> R { if _fastPath(_isNative) { defer { _fixLifetime(self) } - return try body( + return try unsafe body( UnsafeBufferPointer(start: firstElementAddress, count: count)) } - return try withUnsafeBufferPointer_nonNative(body) + return try unsafe withUnsafeBufferPointer_nonNative(body) } // Superseded by the typed-throws version of this function, but retained @@ -681,7 +681,7 @@ extension _ArrayBuffer { internal mutating func __abi_withUnsafeMutableBufferPointer( _ body: (UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { - return try withUnsafeMutableBufferPointer(body) + return try unsafe withUnsafeMutableBufferPointer(body) } /// Call `body(p)`, where `p` is an `UnsafeMutableBufferPointer` @@ -697,7 +697,7 @@ extension _ArrayBuffer { "Array is bridging an opaque NSArray; can't get a pointer to the elements" ) defer { _fixLifetime(self) } - return try body(UnsafeMutableBufferPointer( + return try unsafe body(UnsafeMutableBufferPointer( start: firstElementAddressIfContiguous, count: count)) } @@ -722,10 +722,10 @@ extension _ArrayBuffer { @inlinable internal var identity: UnsafeRawPointer { if _isNative { - return _native.identity + return unsafe _native.identity } else { - return UnsafeRawPointer( + return unsafe UnsafeRawPointer( Unmanaged.passUnretained(_nonNative.buffer).toOpaque()) } } diff --git a/stdlib/public/core/ArrayBufferProtocol.swift b/stdlib/public/core/ArrayBufferProtocol.swift index 8b04a7760c680..efc5b93088960 100644 --- a/stdlib/public/core/ArrayBufferProtocol.swift +++ b/stdlib/public/core/ArrayBufferProtocol.swift @@ -140,7 +140,7 @@ where Indices == Range { extension _ArrayBufferProtocol { @inlinable internal var subscriptBaseAddress: UnsafeMutablePointer { - return firstElementAddress + return unsafe firstElementAddress } // Make sure the compiler does not inline _copyBuffer to reduce code size. @@ -150,7 +150,7 @@ extension _ArrayBufferProtocol { internal init(copying buffer: Self) { let newBuffer = _ContiguousArrayBuffer( _uninitializedCount: buffer.count, minimumCapacity: buffer.count) - buffer._copyContents( + unsafe buffer._copyContents( subRange: buffer.indices, initializing: newBuffer.firstElementAddress) self = Self( _buffer: newBuffer, shiftedToStartIndex: buffer.startIndex) @@ -163,20 +163,20 @@ extension _ArrayBufferProtocol { elementsOf newValues: __owned C ) where C: Collection, C.Element == Element { _internalInvariant(startIndex == 0, "_SliceBuffer should override this function.") - let elements = self.firstElementAddress + let elements = unsafe self.firstElementAddress // erase all the elements we're replacing to create a hole - let holeStart = elements + subrange.lowerBound - let holeEnd = holeStart + newCount + let holeStart = unsafe elements + subrange.lowerBound + let holeEnd = unsafe holeStart + newCount let eraseCount = subrange.count - holeStart.deinitialize(count: eraseCount) + unsafe holeStart.deinitialize(count: eraseCount) let growth = newCount - eraseCount if growth != 0 { - let tailStart = elements + subrange.upperBound + let tailStart = unsafe elements + subrange.upperBound let tailCount = self.count - subrange.upperBound - holeEnd.moveInitialize(from: tailStart, count: tailCount) + unsafe holeEnd.moveInitialize(from: tailStart, count: tailCount) self.count += growth } @@ -190,14 +190,14 @@ extension _ArrayBufferProtocol { $0.count == newCount, "invalid Collection: count differed in successive traversals" ) - holeStart.initialize(from: $0.baseAddress!, count: newCount) + unsafe holeStart.initialize(from: $0.baseAddress!, count: newCount) } if done == nil { - var place = holeStart + var place = unsafe holeStart var i = newValues.startIndex - while place < holeEnd { - place.initialize(to: newValues[i]) - place += 1 + while unsafe place < holeEnd { + unsafe place.initialize(to: newValues[i]) + unsafe place += 1 newValues.formIndex(after: &i) } _expectEnd(of: newValues, is: i) diff --git a/stdlib/public/core/ArrayCast.swift b/stdlib/public/core/ArrayCast.swift index b0753ce8e3005..184d616458d59 100644 --- a/stdlib/public/core/ArrayCast.swift +++ b/stdlib/public/core/ArrayCast.swift @@ -21,7 +21,7 @@ internal func _arrayDownCastIndirect( _ source: UnsafePointer>, _ target: UnsafeMutablePointer>) { - target.initialize(to: _arrayForceCast(source.pointee)) + unsafe target.initialize(to: _arrayForceCast(source.pointee)) } /// Implements `source as! [TargetElement]`. @@ -58,8 +58,8 @@ internal func _arrayDownCastConditionalIndirect( _ source: UnsafePointer>, _ target: UnsafeMutablePointer> ) -> Bool { - if let result: Array = _arrayConditionalCast(source.pointee) { - target.initialize(to: result) + if let result: Array = unsafe _arrayConditionalCast(source.pointee) { + unsafe target.initialize(to: result) return true } return false diff --git a/stdlib/public/core/ArrayShared.swift b/stdlib/public/core/ArrayShared.swift index 12b33b3ad8c6b..05dcec577ebbd 100644 --- a/stdlib/public/core/ArrayShared.swift +++ b/stdlib/public/core/ArrayShared.swift @@ -49,11 +49,11 @@ func _allocateUninitializedArray(_ builtinCount: Builtin.Word) _ContiguousArrayStorage.self, builtinCount, Element.self) #endif - let (array, ptr) = Array._adoptStorage(bufferObject, count: count) + let (array, ptr) = unsafe Array._adoptStorage(bufferObject, count: count) return (array, ptr._rawValue) } // For an empty array no buffer allocation is needed. - let (array, ptr) = Array._allocateUninitialized(count) + let (array, ptr) = unsafe Array._allocateUninitialized(count) return (array, ptr._rawValue) } @@ -149,15 +149,15 @@ extension _ArrayBufferProtocol { var newBuffer = _forceCreateUniqueMutableBuffer( newCount: newCount, requiredCapacity: newCount) - _arrayOutOfPlaceUpdate( + unsafe _arrayOutOfPlaceUpdate( &newBuffer, bounds.lowerBound - startIndex, insertCount, { rawMemory, count in - var p = rawMemory + var p = unsafe rawMemory var q = newValues.startIndex for _ in 0.. UnsafeMutablePointer { - return _buffer.subscriptBaseAddress + index + return unsafe _buffer.subscriptBaseAddress + index } } @@ -545,9 +545,9 @@ extension ArraySlice: RandomAccessCollection, MutableCollection { _modify { _makeMutableAndUnique() // makes the array native, too _checkSubscript_native(index) - let address = _buffer.subscriptBaseAddress + index + let address = unsafe _buffer.subscriptBaseAddress + index defer { _endMutation() } - yield &address.pointee + yield unsafe &address.pointee } } @@ -587,7 +587,7 @@ extension ArraySlice: RandomAccessCollection, MutableCollection { _checkIndex(bounds.upperBound) // If the replacement buffer has same identity, and the ranges match, // then this was a pinned in-place modification, nothing further needed. - if self[bounds]._buffer.identity != rhs._buffer.identity + if unsafe self[bounds]._buffer.identity != rhs._buffer.identity || bounds != rhs.startIndex..( _uninitializedCount: count, minimumCapacity: minimumCapacity) - _buffer._copyContents( + unsafe _buffer._copyContents( subRange: _buffer.indices, initializing: newBuffer.firstElementAddress) _buffer = _Buffer( @@ -850,7 +850,7 @@ extension ArraySlice: RangeReplaceableCollection { let newCount = oldCount &+ 1 var newBuffer = _buffer._forceCreateUniqueMutableBuffer( countForNewBuffer: oldCount, minNewCapacity: newCount) - _buffer._arrayOutOfPlaceUpdate( + unsafe _buffer._arrayOutOfPlaceUpdate( &newBuffer, oldCount, 0) } @@ -892,7 +892,7 @@ extension ArraySlice: RangeReplaceableCollection { _internalInvariant(_buffer.capacity >= _buffer.count &+ 1) _buffer.count = oldCount &+ 1 - (_buffer.firstElementAddress + oldCount).initialize(to: newElement) + unsafe (_buffer.firstElementAddress + oldCount).initialize(to: newElement) } /// Adds a new element at the end of the array. @@ -952,15 +952,15 @@ extension ArraySlice: RangeReplaceableCollection { _ = _buffer.beginCOWMutation() let oldCount = self.count - let startNewElements = _buffer.firstElementAddress + oldCount - let buf = UnsafeMutableBufferPointer( + let startNewElements = unsafe _buffer.firstElementAddress + oldCount + let buf = unsafe UnsafeMutableBufferPointer( start: startNewElements, count: self.capacity - oldCount) - let (remainder,writtenUpTo) = buf.initialize(from: newElements) + let (remainder,writtenUpTo) = unsafe buf.initialize(from: newElements) // trap on underflow from the sequence's underestimate: - let writtenCount = buf.distance(from: buf.startIndex, to: writtenUpTo) + let writtenCount = unsafe buf.distance(from: buf.startIndex, to: writtenUpTo) _precondition(newElementsCount <= writtenCount, "newElements.underestimatedCount was an overestimate") // can't check for overflow as sequences can underestimate @@ -1090,9 +1090,9 @@ extension ArraySlice: RangeReplaceableCollection { public mutating func _withUnsafeMutableBufferPointerIfSupported( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { - return try withUnsafeMutableBufferPointer { + return unsafe try withUnsafeMutableBufferPointer { (bufferPointer) -> R in - return try body(&bufferPointer) + return try unsafe body(&bufferPointer) } } @@ -1100,9 +1100,9 @@ extension ArraySlice: RangeReplaceableCollection { public mutating func withContiguousMutableStorageIfAvailable( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { - return try withUnsafeMutableBufferPointer { + return unsafe try withUnsafeMutableBufferPointer { (bufferPointer) -> R in - return try body(&bufferPointer) + return try unsafe body(&bufferPointer) } } @@ -1110,9 +1110,9 @@ extension ArraySlice: RangeReplaceableCollection { public func withContiguousStorageIfAvailable( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { - return try withUnsafeBufferPointer { + return unsafe try withUnsafeBufferPointer { (bufferPointer) -> R in - return try body(bufferPointer) + return try unsafe body(bufferPointer) } } @@ -1154,12 +1154,12 @@ extension ArraySlice: CustomStringConvertible, CustomDebugStringConvertible { extension ArraySlice { @usableFromInline @_transparent internal func _cPointerArgs() -> (AnyObject?, UnsafeRawPointer?) { - let p = _baseAddressIfContiguous - if _fastPath(p != nil || isEmpty) { + let p = unsafe _baseAddressIfContiguous + if unsafe _fastPath(p != nil || isEmpty) { return (_owner, UnsafeRawPointer(p)) } let n = ContiguousArray(self._buffer)._buffer - return (n.owner, UnsafeRawPointer(n.firstElementAddress)) + return unsafe (n.owner, UnsafeRawPointer(n.firstElementAddress)) } } @@ -1219,7 +1219,7 @@ extension ArraySlice { mutating func __abi_withUnsafeMutableBufferPointer( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { - return try withUnsafeMutableBufferPointer(body) + return try unsafe withUnsafeMutableBufferPointer(body) } /// Calls the given closure with a pointer to the array's mutable contiguous @@ -1273,11 +1273,11 @@ extension ArraySlice { // Create an UnsafeBufferPointer that we can pass to body let pointer = _buffer.firstElementAddress - var inoutBufferPointer = UnsafeMutableBufferPointer( + var inoutBufferPointer = unsafe UnsafeMutableBufferPointer( start: pointer, count: count) defer { - _precondition( + unsafe _precondition( inoutBufferPointer.baseAddress == pointer && inoutBufferPointer.count == count, "ArraySlice withUnsafeMutableBufferPointer: replacing the buffer is not allowed") @@ -1286,7 +1286,7 @@ extension ArraySlice { } // Invoke the body. - return try body(&inoutBufferPointer) + return try unsafe body(&inoutBufferPointer) } @inlinable @@ -1303,21 +1303,21 @@ extension ArraySlice { _precondition(self.count <= buffer.count, "Insufficient space allocated to copy array contents") - if let s = _baseAddressIfContiguous { - p.initialize(from: s, count: self.count) + if let s = unsafe _baseAddressIfContiguous { + unsafe p.initialize(from: s, count: self.count) // Need a _fixLifetime bracketing the _baseAddressIfContiguous getter // and all uses of the pointer it returns: _fixLifetime(self._owner) } else { for x in self { - p.initialize(to: x) - p += 1 + unsafe p.initialize(to: x) + unsafe p += 1 } } var it = IndexingIterator(_elements: self) it._position = endIndex - return (it,buffer.index(buffer.startIndex, offsetBy: self.count)) + return (it,unsafe buffer.index(buffer.startIndex, offsetBy: self.count)) } } @@ -1401,7 +1401,7 @@ extension ArraySlice: Equatable where Element: Equatable { } // Test referential equality. - if lhsCount == 0 || lhs._buffer.identity == rhs._buffer.identity { + if unsafe lhsCount == 0 || lhs._buffer.identity == rhs._buffer.identity { return true } @@ -1483,8 +1483,8 @@ extension ArraySlice { public mutating func withUnsafeMutableBytes( _ body: (UnsafeMutableRawBufferPointer) throws -> R ) rethrows -> R { - return try self.withUnsafeMutableBufferPointer { - return try body(UnsafeMutableRawBufferPointer($0)) + return try unsafe self.withUnsafeMutableBufferPointer { + return try unsafe body(UnsafeMutableRawBufferPointer($0)) } } @@ -1519,8 +1519,8 @@ extension ArraySlice { public func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { - return try self.withUnsafeBufferPointer { - try body(UnsafeRawBufferPointer($0)) + return try unsafe self.withUnsafeBufferPointer { + try unsafe body(UnsafeRawBufferPointer($0)) } } } @@ -1546,7 +1546,7 @@ extension ArraySlice { // copy.) @_alwaysEmitIntoClient public func _copyToNewArray() -> [Element] { - Array(unsafeUninitializedCapacity: self.count) { buffer, count in + unsafe Array(unsafeUninitializedCapacity: self.count) { buffer, count in var (it, c) = self._buffer._copyContents(initializing: buffer) _precondition(it.next() == nil) count = c diff --git a/stdlib/public/core/AssertCommon.swift b/stdlib/public/core/AssertCommon.swift index c9eb7540a1df9..76ac14bf46e8b 100644 --- a/stdlib/public/core/AssertCommon.swift +++ b/stdlib/public/core/AssertCommon.swift @@ -106,7 +106,7 @@ internal func _assertionFailure( (message) -> Void in file.withUTF8Buffer { (file) -> Void in - _swift_stdlib_reportFatalErrorInFile( + unsafe _swift_stdlib_reportFatalErrorInFile( prefix.baseAddress!, CInt(prefix.count), message.baseAddress!, CInt(message.count), file.baseAddress!, CInt(file.count), UInt32(line), @@ -150,7 +150,7 @@ internal func _assertionFailure( (messageUTF8) -> Void in file.withUTF8Buffer { (file) -> Void in - _swift_stdlib_reportFatalErrorInFile( + unsafe _swift_stdlib_reportFatalErrorInFile( prefix.baseAddress!, CInt(prefix.count), messageUTF8.baseAddress!, CInt(messageUTF8.count), file.baseAddress!, CInt(file.count), UInt32(line), @@ -185,7 +185,7 @@ internal func _assertionFailure( var message = message message.withUTF8 { (messageUTF8) -> Void in - _swift_stdlib_reportFatalError( + unsafe _swift_stdlib_reportFatalError( prefix.baseAddress!, CInt(prefix.count), messageUTF8.baseAddress!, CInt(messageUTF8.count), flags) @@ -255,7 +255,7 @@ func _unimplementedInitializer(className: StaticString, (initName) in file.withUTF8Buffer { (file) in - _swift_stdlib_reportUnimplementedInitializerInFile( + unsafe _swift_stdlib_reportUnimplementedInitializerInFile( className.baseAddress!, CInt(className.count), initName.baseAddress!, CInt(initName.count), file.baseAddress!, CInt(file.count), @@ -269,7 +269,7 @@ func _unimplementedInitializer(className: StaticString, (className) in initName.withUTF8Buffer { (initName) in - _swift_stdlib_reportUnimplementedInitializer( + unsafe _swift_stdlib_reportUnimplementedInitializer( className.baseAddress!, CInt(className.count), initName.baseAddress!, CInt(initName.count), /*flags:*/ 0) diff --git a/stdlib/public/core/AtomicInt.swift.gyb b/stdlib/public/core/AtomicInt.swift.gyb index 5141cc075ccc4..8662e27c900ad 100644 --- a/stdlib/public/core/AtomicInt.swift.gyb +++ b/stdlib/public/core/AtomicInt.swift.gyb @@ -20,7 +20,7 @@ public final class _stdlib_AtomicInt { internal var _value: Int internal var _valuePtr: UnsafeMutablePointer { - return _getUnsafePointerToStoredProperties(self).assumingMemoryBound( + return unsafe _getUnsafePointerToStoredProperties(self).assumingMemoryBound( to: Int.self) } @@ -29,17 +29,17 @@ public final class _stdlib_AtomicInt { } public func store(_ desired: Int) { - return _swift_stdlib_atomicStoreInt(object: _valuePtr, desired: desired) + return unsafe _swift_stdlib_atomicStoreInt(object: _valuePtr, desired: desired) } public func load() -> Int { - return _swift_stdlib_atomicLoadInt(object: _valuePtr) + return unsafe _swift_stdlib_atomicLoadInt(object: _valuePtr) } % for operation_name, operation in [ ('Add', '+'), ('And', '&'), ('Or', '|'), ('Xor', '^') ]: @discardableResult public func fetchAnd${operation_name}(_ operand: Int) -> Int { - return _swift_stdlib_atomicFetch${operation_name}Int( + return unsafe _swift_stdlib_atomicFetch${operation_name}Int( object: _valuePtr, operand: operand) } @@ -51,7 +51,7 @@ public final class _stdlib_AtomicInt { public func compareExchange(expected: inout Int, desired: Int) -> Bool { var expectedVar = expected - let result = _swift_stdlib_atomicCompareExchangeStrongInt( + let result = unsafe _swift_stdlib_atomicCompareExchangeStrongInt( object: _valuePtr, expected: &expectedVar, desired: desired) @@ -70,14 +70,14 @@ internal func _swift_stdlib_atomicCompareExchangeStrongInt( desired: Int) -> Bool { #if _pointerBitWidth(_64) let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Int64( - target._rawValue, expected.pointee._value, desired._value) + target._rawValue, unsafe expected.pointee._value, desired._value) #elseif _pointerBitWidth(_32) let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Int32( - target._rawValue, expected.pointee._value, desired._value) + target._rawValue, unsafe expected.pointee._value, desired._value) #else #error("Unknown platform") #endif - expected.pointee._value = oldValue + unsafe expected.pointee._value = oldValue return Bool(won) } @@ -121,12 +121,12 @@ func _swift_stdlib_atomicFetch${operation}Int( operand: Int) -> Int { let rawTarget = UnsafeMutableRawPointer(target) #if _pointerBitWidth(_64) - let value = _swift_stdlib_atomicFetch${operation}Int64( - object: rawTarget.assumingMemoryBound(to: Int64.self), + let value = unsafe _swift_stdlib_atomicFetch${operation}Int64( + object: unsafe rawTarget.assumingMemoryBound(to: Int64.self), operand: Int64(operand)) #elseif _pointerBitWidth(_32) - let value = _swift_stdlib_atomicFetch${operation}Int32( - object: rawTarget.assumingMemoryBound(to: Int32.self), + let value = unsafe _swift_stdlib_atomicFetch${operation}Int32( + object: unsafe rawTarget.assumingMemoryBound(to: Int32.self), operand: Int32(operand)) #else #error("Unknown platform") diff --git a/stdlib/public/core/Bitset.swift b/stdlib/public/core/Bitset.swift index d6448d8d1c883..ad0e874feb8dd 100644 --- a/stdlib/public/core/Bitset.swift +++ b/stdlib/public/core/Bitset.swift @@ -18,17 +18,19 @@ /// is fixed at its initialization. @frozen @usableFromInline // @testable +@unsafe internal struct _UnsafeBitset { @usableFromInline internal let words: UnsafeMutablePointer @usableFromInline + @safe internal let wordCount: Int @inlinable @inline(__always) internal init(words: UnsafeMutablePointer, wordCount: Int) { - self.words = words + unsafe self.words = unsafe words self.wordCount = wordCount } } @@ -43,7 +45,7 @@ extension _UnsafeBitset { _internalInvariant(element >= 0) // Note: We perform on UInts to get faster unsigned math (shifts). let element = UInt(bitPattern: element) - let capacity = UInt(bitPattern: Word.capacity) + let capacity = unsafe UInt(bitPattern: Word.capacity) return Int(bitPattern: element / capacity) } @@ -53,21 +55,21 @@ extension _UnsafeBitset { _internalInvariant(element >= 0) // Note: We perform on UInts to get faster unsigned math (masking). let element = UInt(bitPattern: element) - let capacity = UInt(bitPattern: Word.capacity) + let capacity = unsafe UInt(bitPattern: Word.capacity) return Int(bitPattern: element % capacity) } @inlinable @inline(__always) internal static func split(_ element: Int) -> (word: Int, bit: Int) { - return (word(for: element), bit(for: element)) + return unsafe (word(for: element), bit(for: element)) } @inlinable @inline(__always) internal static func join(word: Int, bit: Int) -> Int { - _internalInvariant(bit >= 0 && bit < Word.capacity) - return word &* Word.capacity &+ bit + unsafe _internalInvariant(bit >= 0 && bit < Word.capacity) + return unsafe word &* Word.capacity &+ bit } } @@ -75,57 +77,57 @@ extension _UnsafeBitset { @inlinable @inline(__always) internal static func wordCount(forCapacity capacity: Int) -> Int { - return word(for: capacity &+ Word.capacity &- 1) + return unsafe word(for: capacity &+ Word.capacity &- 1) } @inlinable internal var capacity: Int { @inline(__always) get { - return wordCount &* Word.capacity + return unsafe wordCount &* Word.capacity } } @inlinable @inline(__always) internal func isValid(_ element: Int) -> Bool { - return element >= 0 && element <= capacity + return unsafe element >= 0 && element <= capacity } @inlinable @inline(__always) internal func uncheckedContains(_ element: Int) -> Bool { - _internalInvariant(isValid(element)) - let (word, bit) = _UnsafeBitset.split(element) - return words[word].uncheckedContains(bit) + unsafe _internalInvariant(isValid(element)) + let (word, bit) = unsafe _UnsafeBitset.split(element) + return unsafe words[word].uncheckedContains(bit) } @inlinable @inline(__always) @discardableResult internal func uncheckedInsert(_ element: Int) -> Bool { - _internalInvariant(isValid(element)) - let (word, bit) = _UnsafeBitset.split(element) - return words[word].uncheckedInsert(bit) + unsafe _internalInvariant(isValid(element)) + let (word, bit) = unsafe _UnsafeBitset.split(element) + return unsafe words[word].uncheckedInsert(bit) } @inlinable @inline(__always) @discardableResult internal func uncheckedRemove(_ element: Int) -> Bool { - _internalInvariant(isValid(element)) - let (word, bit) = _UnsafeBitset.split(element) - return words[word].uncheckedRemove(bit) + unsafe _internalInvariant(isValid(element)) + let (word, bit) = unsafe _UnsafeBitset.split(element) + return unsafe words[word].uncheckedRemove(bit) } @inlinable @inline(__always) internal func clear() { - words.update(repeating: .empty, count: wordCount) + unsafe words.update(repeating: .empty, count: wordCount) } } -extension _UnsafeBitset: Sequence { +extension _UnsafeBitset: @unsafe Sequence { @usableFromInline internal typealias Element = Int @@ -133,19 +135,19 @@ extension _UnsafeBitset: Sequence { internal var count: Int { var count = 0 for w in 0 ..< wordCount { - count += words[w].count + unsafe count += words[w].count } return count } @inlinable internal var underestimatedCount: Int { - return count + return unsafe count } @inlinable func makeIterator() -> Iterator { - return Iterator(self) + return unsafe Iterator(self) } @usableFromInline @@ -160,21 +162,21 @@ extension _UnsafeBitset: Sequence { @inlinable internal init(_ bitset: _UnsafeBitset) { - self.bitset = bitset - self.index = 0 - self.word = bitset.wordCount > 0 ? bitset.words[0] : .empty + unsafe self.bitset = unsafe bitset + unsafe self.index = 0 + unsafe self.word = unsafe bitset.wordCount > 0 ? bitset.words[0] : .empty } @inlinable internal mutating func next() -> Int? { - if let bit = word.next() { - return _UnsafeBitset.join(word: index, bit: bit) + if let bit = unsafe word.next() { + return unsafe _UnsafeBitset.join(word: index, bit: bit) } - while (index + 1) < bitset.wordCount { - index += 1 - word = bitset.words[index] - if let bit = word.next() { - return _UnsafeBitset.join(word: index, bit: bit) + while unsafe (index + 1) < bitset.wordCount { + unsafe index += 1 + unsafe word = unsafe bitset.words[index] + if let bit = unsafe word.next() { + return unsafe _UnsafeBitset.join(word: index, bit: bit) } } return nil @@ -196,7 +198,7 @@ extension _UnsafeBitset { @inlinable internal init(_ value: UInt) { - self.value = value + unsafe self.value = value } } } @@ -214,7 +216,7 @@ extension _UnsafeBitset.Word { @inline(__always) internal func uncheckedContains(_ bit: Int) -> Bool { _internalInvariant(bit >= 0 && bit < UInt.bitWidth) - return value & (1 &<< bit) != 0 + return unsafe value & (1 &<< bit) != 0 } @inlinable @@ -223,8 +225,8 @@ extension _UnsafeBitset.Word { internal mutating func uncheckedInsert(_ bit: Int) -> Bool { _internalInvariant(bit >= 0 && bit < UInt.bitWidth) let mask: UInt = 1 &<< bit - let inserted = value & mask == 0 - value |= mask + let inserted = unsafe value & mask == 0 + unsafe value |= mask return inserted } @@ -234,8 +236,8 @@ extension _UnsafeBitset.Word { internal mutating func uncheckedRemove(_ bit: Int) -> Bool { _internalInvariant(bit >= 0 && bit < UInt.bitWidth) let mask: UInt = 1 &<< bit - let removed = value & mask != 0 - value &= ~mask + let removed = unsafe value & mask != 0 + unsafe value &= ~mask return removed } } @@ -245,8 +247,8 @@ extension _UnsafeBitset.Word { var minimum: Int? { @inline(__always) get { - guard value != 0 else { return nil } - return value.trailingZeroBitCount + guard unsafe value != 0 else { return nil } + return unsafe value.trailingZeroBitCount } } @@ -254,8 +256,8 @@ extension _UnsafeBitset.Word { var maximum: Int? { @inline(__always) get { - guard value != 0 else { return nil } - return _UnsafeBitset.Word.capacity &- 1 &- value.leadingZeroBitCount + guard unsafe value != 0 else { return nil } + return unsafe _UnsafeBitset.Word.capacity &- 1 &- value.leadingZeroBitCount } } @@ -263,32 +265,32 @@ extension _UnsafeBitset.Word { var complement: _UnsafeBitset.Word { @inline(__always) get { - return _UnsafeBitset.Word(~value) + return unsafe _UnsafeBitset.Word(~value) } } @inlinable @inline(__always) internal func subtracting(elementsBelow bit: Int) -> _UnsafeBitset.Word { - _internalInvariant(bit >= 0 && bit < _UnsafeBitset.Word.capacity) + unsafe _internalInvariant(bit >= 0 && bit < _UnsafeBitset.Word.capacity) let mask = UInt.max &<< bit - return _UnsafeBitset.Word(value & mask) + return unsafe _UnsafeBitset.Word(value & mask) } @inlinable @inline(__always) internal func intersecting(elementsBelow bit: Int) -> _UnsafeBitset.Word { - _internalInvariant(bit >= 0 && bit < _UnsafeBitset.Word.capacity) + unsafe _internalInvariant(bit >= 0 && bit < _UnsafeBitset.Word.capacity) let mask: UInt = (1 as UInt &<< bit) &- 1 - return _UnsafeBitset.Word(value & mask) + return unsafe _UnsafeBitset.Word(value & mask) } @inlinable @inline(__always) internal func intersecting(elementsAbove bit: Int) -> _UnsafeBitset.Word { - _internalInvariant(bit >= 0 && bit < _UnsafeBitset.Word.capacity) + unsafe _internalInvariant(bit >= 0 && bit < _UnsafeBitset.Word.capacity) let mask = (UInt.max &<< bit) &<< 1 - return _UnsafeBitset.Word(value & mask) + return unsafe _UnsafeBitset.Word(value & mask) } } @@ -297,7 +299,7 @@ extension _UnsafeBitset.Word { internal static var empty: _UnsafeBitset.Word { @inline(__always) get { - return _UnsafeBitset.Word(0) + return unsafe _UnsafeBitset.Word(0) } } @@ -305,7 +307,7 @@ extension _UnsafeBitset.Word { internal static var allBits: _UnsafeBitset.Word { @inline(__always) get { - return _UnsafeBitset.Word(UInt.max) + return unsafe _UnsafeBitset.Word(UInt.max) } } } @@ -314,26 +316,26 @@ extension _UnsafeBitset.Word { // Iteration with `next()` destroys the word's value; however, this won't cause // problems in normal use, because `next()` is usually called on a separate // iterator, not the original word. -extension _UnsafeBitset.Word: Sequence, IteratorProtocol { +extension _UnsafeBitset.Word: @unsafe Sequence, @unsafe IteratorProtocol { @usableFromInline typealias Element = Int @inlinable internal var count: Int { - return value.nonzeroBitCount + return unsafe value.nonzeroBitCount } @inlinable internal var underestimatedCount: Int { - return count + return unsafe count } @inlinable internal var isEmpty: Bool { @inline(__always) get { - return value == 0 + return unsafe value == 0 } } @@ -341,9 +343,9 @@ extension _UnsafeBitset.Word: Sequence, IteratorProtocol { /// and also destructively clear it. @inlinable internal mutating func next() -> Int? { - guard value != 0 else { return nil } - let bit = value.trailingZeroBitCount - value &= value &- 1 // Clear lowest nonzero bit. + guard unsafe value != 0 else { return nil } + let bit = unsafe value.trailingZeroBitCount + unsafe value &= value &- 1 // Clear lowest nonzero bit. return bit } } @@ -355,12 +357,12 @@ extension _UnsafeBitset { wordCount: Int, body: (_UnsafeBitset) throws -> R ) rethrows -> R { - try withUnsafeTemporaryAllocation( + try unsafe withUnsafeTemporaryAllocation( of: _UnsafeBitset.Word.self, capacity: wordCount ) { buffer in - let bitset = _UnsafeBitset( + let bitset = unsafe _UnsafeBitset( words: buffer.baseAddress!, wordCount: buffer.count) - return try body(bitset) + return try unsafe body(bitset) } } @@ -370,12 +372,12 @@ extension _UnsafeBitset { capacity: Int, body: (_UnsafeBitset) throws -> R ) rethrows -> R { - let wordCount = Swift.max(1, Self.wordCount(forCapacity: capacity)) - return try _withTemporaryUninitializedBitset( + let wordCount = unsafe Swift.max(1, Self.wordCount(forCapacity: capacity)) + return try unsafe _withTemporaryUninitializedBitset( wordCount: wordCount ) { bitset in - bitset.clear() - return try body(bitset) + unsafe bitset.clear() + return try unsafe body(bitset) } } } @@ -387,11 +389,11 @@ extension _UnsafeBitset { of original: _UnsafeBitset, body: (_UnsafeBitset) throws -> R ) rethrows -> R { - try _withTemporaryUninitializedBitset( + try unsafe _withTemporaryUninitializedBitset( wordCount: original.wordCount ) { bitset in - bitset.words.initialize(from: original.words, count: original.wordCount) - return try body(bitset) + unsafe bitset.words.initialize(from: original.words, count: original.wordCount) + return try unsafe body(bitset) } } } diff --git a/stdlib/public/core/BridgeObjectiveC.swift b/stdlib/public/core/BridgeObjectiveC.swift index 5dc4ead62b263..a58c1e3965a38 100644 --- a/stdlib/public/core/BridgeObjectiveC.swift +++ b/stdlib/public/core/BridgeObjectiveC.swift @@ -95,9 +95,9 @@ internal func _SwiftCreateBridgedArray_DoNotCall( values: UnsafePointer, numValues: Int ) -> Unmanaged { - let bufPtr = UnsafeBufferPointer(start: values, count: numValues) - let bridged = Array(bufPtr)._bridgeToObjectiveCImpl() - return Unmanaged.passRetained(bridged) + let bufPtr = unsafe UnsafeBufferPointer(start: values, count: numValues) + let bridged = unsafe Array(bufPtr)._bridgeToObjectiveCImpl() + return unsafe Unmanaged.passRetained(bridged) } // Note: This function is not intended to be called from Swift. The @@ -110,9 +110,9 @@ internal func _SwiftCreateBridgedMutableArray_DoNotCall( values: UnsafePointer, numValues: Int ) -> Unmanaged { - let bufPtr = UnsafeBufferPointer(start: values, count: numValues) - let bridged = _SwiftNSMutableArray(Array(bufPtr)) - return Unmanaged.passRetained(bridged) + let bufPtr = unsafe UnsafeBufferPointer(start: values, count: numValues) + let bridged = unsafe _SwiftNSMutableArray(Array(bufPtr)) + return unsafe Unmanaged.passRetained(bridged) } @_silgen_name("swift_stdlib_connectNSBaseClasses") @@ -207,7 +207,7 @@ extension _BridgeableMetatype: Sendable {} @inlinable public func _bridgeAnythingToObjectiveC(_ x: T) -> AnyObject { if _fastPath(_isClassOrObjCExistential(T.self)) { - return unsafeBitCast(x, to: AnyObject.self) + return unsafe unsafeBitCast(x, to: AnyObject.self) } return _bridgeAnythingNonVerbatimToObjectiveC(x) } @@ -328,7 +328,7 @@ internal func _bridgeNonVerbatimBoxedValue( _ x: UnsafePointer, _ result: inout NativeType? ) { - result = x.pointee + result = unsafe x.pointee } /// Runtime optional to conditionally perform a bridge from an object to a value @@ -407,15 +407,16 @@ public func _getBridgedNonVerbatimObjectiveCType(_: T.Type) -> Any.Type? /// because it only needs to reference the results of inout conversions, which /// already have writeback-scoped lifetime. @frozen +@unsafe public struct AutoreleasingUnsafeMutablePointer - : _Pointer { + : @unsafe _Pointer { public let _rawValue: Builtin.RawPointer @_transparent public // COMPILER_INTRINSIC init(_ _rawValue: Builtin.RawPointer) { - self._rawValue = _rawValue + unsafe self._rawValue = _rawValue } /// Retrieve or set the `Pointee` instance referenced by `self`. @@ -440,15 +441,15 @@ public struct AutoreleasingUnsafeMutablePointer // optional type, so we actually need to load it as an optional, and // explicitly handle the nil case. let unmanaged = - UnsafePointer>>(_rawValue).pointee - return _unsafeReferenceCast( + unsafe UnsafePointer>>(_rawValue).pointee + return unsafe _unsafeReferenceCast( unmanaged?.takeUnretainedValue(), to: Pointee.self) } @_transparent nonmutating set { // Autorelease the object reference. - let object = _unsafeReferenceCast(newValue, to: Optional.self) + let object = unsafe _unsafeReferenceCast(newValue, to: Optional.self) Builtin.retain(object) Builtin.autorelease(object) @@ -456,11 +457,11 @@ public struct AutoreleasingUnsafeMutablePointer // memory addressed by this pointer. let unmanaged: Optional> if let object = object { - unmanaged = Unmanaged.passUnretained(object) + unsafe unmanaged = unsafe Unmanaged.passUnretained(object) } else { - unmanaged = nil + unsafe unmanaged = nil } - UnsafeMutablePointer>>(_rawValue).pointee = + unsafe UnsafeMutablePointer>>(_rawValue).pointee = unmanaged } } @@ -473,7 +474,7 @@ public struct AutoreleasingUnsafeMutablePointer public subscript(i: Int) -> Pointee { @_transparent get { - return self.advanced(by: i).pointee + return unsafe self.advanced(by: i).pointee } } @@ -487,7 +488,7 @@ public struct AutoreleasingUnsafeMutablePointer /// the underlying memory's bound type is undefined. @_transparent public init(@_nonEphemeral _ from: UnsafeMutablePointer) { - self._rawValue = from._rawValue + unsafe self._rawValue = from._rawValue } /// Explicit construction from an UnsafeMutablePointer. @@ -502,8 +503,8 @@ public struct AutoreleasingUnsafeMutablePointer /// the underlying memory's bound type is undefined. @_transparent public init?(@_nonEphemeral _ from: UnsafeMutablePointer?) { - guard let unwrapped = from else { return nil } - self.init(unwrapped) + guard let unwrapped = unsafe from else { return nil } + unsafe self.init(unwrapped) } /// Explicit construction from a UnsafePointer. @@ -517,7 +518,7 @@ public struct AutoreleasingUnsafeMutablePointer internal init( @_nonEphemeral _ from: UnsafePointer ) { - self._rawValue = from._rawValue + unsafe self._rawValue = from._rawValue } /// Explicit construction from a UnsafePointer. @@ -533,8 +534,8 @@ public struct AutoreleasingUnsafeMutablePointer internal init?( @_nonEphemeral _ from: UnsafePointer? ) { - guard let unwrapped = from else { return nil } - self.init(unwrapped) + guard let unwrapped = unsafe from else { return nil } + unsafe self.init(unwrapped) } } @@ -547,7 +548,7 @@ extension UnsafeMutableRawPointer { public init( @_nonEphemeral _ other: AutoreleasingUnsafeMutablePointer ) { - _rawValue = other._rawValue + _rawValue = unsafe other._rawValue } /// Creates a new raw pointer from an `AutoreleasingUnsafeMutablePointer` @@ -559,8 +560,8 @@ extension UnsafeMutableRawPointer { public init?( @_nonEphemeral _ other: AutoreleasingUnsafeMutablePointer? ) { - guard let unwrapped = other else { return nil } - self.init(unwrapped) + guard let unwrapped = unsafe other else { return nil } + unsafe self.init(unwrapped) } } @@ -573,7 +574,7 @@ extension UnsafeRawPointer { public init( @_nonEphemeral _ other: AutoreleasingUnsafeMutablePointer ) { - _rawValue = other._rawValue + _rawValue = unsafe other._rawValue } /// Creates a new raw pointer from an `AutoreleasingUnsafeMutablePointer` @@ -585,14 +586,15 @@ extension UnsafeRawPointer { public init?( @_nonEphemeral _ other: AutoreleasingUnsafeMutablePointer? ) { - guard let unwrapped = other else { return nil } - self.init(unwrapped) + guard let unwrapped = unsafe other else { return nil } + unsafe self.init(unwrapped) } } @available(*, unavailable) extension AutoreleasingUnsafeMutablePointer: Sendable { } +@unsafe internal struct _CocoaFastEnumerationStackBuf { // Clang uses 16 pointers. So do we. internal var _item0: UnsafeRawPointer? @@ -618,24 +620,24 @@ internal struct _CocoaFastEnumerationStackBuf { } internal init() { - _item0 = nil - _item1 = _item0 - _item2 = _item0 - _item3 = _item0 - _item4 = _item0 - _item5 = _item0 - _item6 = _item0 - _item7 = _item0 - _item8 = _item0 - _item9 = _item0 - _item10 = _item0 - _item11 = _item0 - _item12 = _item0 - _item13 = _item0 - _item14 = _item0 - _item15 = _item0 - - _internalInvariant(MemoryLayout.size(ofValue: self) >= + unsafe _item0 = nil + unsafe _item1 = unsafe _item0 + unsafe _item2 = unsafe _item0 + unsafe _item3 = unsafe _item0 + unsafe _item4 = unsafe _item0 + unsafe _item5 = unsafe _item0 + unsafe _item6 = unsafe _item0 + unsafe _item7 = unsafe _item0 + unsafe _item8 = unsafe _item0 + unsafe _item9 = unsafe _item0 + unsafe _item10 = unsafe _item0 + unsafe _item11 = unsafe _item0 + unsafe _item12 = unsafe _item0 + unsafe _item13 = unsafe _item0 + unsafe _item14 = unsafe _item0 + unsafe _item15 = unsafe _item0 + + unsafe _internalInvariant(MemoryLayout.size(ofValue: self) >= MemoryLayout>.size * count) } } @@ -649,7 +651,7 @@ public func _getObjCTypeEncoding(_ type: T.Type) -> UnsafePointer { // This must be `@_transparent` because `Builtin.getObjCTypeEncoding` is // only supported by the compiler for concrete types that are representable // in ObjC. - return UnsafePointer(Builtin.getObjCTypeEncoding(type)) + return unsafe UnsafePointer(Builtin.getObjCTypeEncoding(type)) } #endif diff --git a/stdlib/public/core/BridgingBuffer.swift b/stdlib/public/core/BridgingBuffer.swift index db83773ee7d53..a1300c04c053a 100644 --- a/stdlib/public/core/BridgingBuffer.swift +++ b/stdlib/public/core/BridgingBuffer.swift @@ -33,8 +33,8 @@ where Header == _BridgingBufferHeader, Element == AnyObject { self.init( _uncheckedBufferClass: __BridgingBufferStorage.self, minimumCapacity: count) - self.withUnsafeMutablePointerToHeader { - $0.initialize(to: Header(count)) + unsafe self.withUnsafeMutablePointerToHeader { + unsafe $0.initialize(to: Header(count)) } } @@ -52,14 +52,14 @@ where Header == _BridgingBufferHeader, Element == AnyObject { internal subscript(i: Int) -> Element { @inline(__always) get { - return withUnsafeMutablePointerToElements { $0[i] } + return unsafe withUnsafeMutablePointerToElements { unsafe $0[i] } } } internal var baseAddress: UnsafeMutablePointer { @inline(__always) get { - return withUnsafeMutablePointerToElements { $0 } + return unsafe withUnsafeMutablePointerToElements { unsafe $0 } } } diff --git a/stdlib/public/core/Builtin.swift b/stdlib/public/core/Builtin.swift index 547d816b34a6b..6c2f57fb5dc6c 100644 --- a/stdlib/public/core/Builtin.swift +++ b/stdlib/public/core/Builtin.swift @@ -129,14 +129,14 @@ public func _specialize(_ x: T, for: U.Type) -> U? { /// `unsafeBitCast` something to `AnyObject`. @usableFromInline @_transparent internal func _reinterpretCastToAnyObject(_ x: T) -> AnyObject { - return unsafeBitCast(x, to: AnyObject.self) + return unsafe unsafeBitCast(x, to: AnyObject.self) } @usableFromInline @_transparent internal func == ( lhs: Builtin.NativeObject, rhs: Builtin.NativeObject ) -> Bool { - return unsafeBitCast(lhs, to: Int.self) == unsafeBitCast(rhs, to: Int.self) + return unsafe unsafeBitCast(lhs, to: Int.self) == unsafeBitCast(rhs, to: Int.self) } @usableFromInline @_transparent @@ -150,7 +150,7 @@ internal func != ( internal func == ( lhs: Builtin.RawPointer, rhs: Builtin.RawPointer ) -> Bool { - return unsafeBitCast(lhs, to: Int.self) == unsafeBitCast(rhs, to: Int.self) + return unsafe unsafeBitCast(lhs, to: Int.self) == unsafeBitCast(rhs, to: Int.self) } @usableFromInline @_transparent @@ -284,10 +284,10 @@ public func _unsafeUncheckedDowncast(_ x: AnyObject, to type: T.Ty @inline(__always) public func _getUnsafePointerToStoredProperties(_ x: AnyObject) -> UnsafeMutableRawPointer { - let storedPropertyOffset = _roundUp( + let storedPropertyOffset = unsafe _roundUp( MemoryLayout.size, toAlignment: MemoryLayout>.alignment) - return UnsafeMutableRawPointer(Builtin.bridgeToRawPointer(x)) + + return unsafe UnsafeMutableRawPointer(Builtin.bridgeToRawPointer(x)) + storedPropertyOffset } @@ -945,7 +945,7 @@ func _trueAfterDiagnostics() -> Builtin.Int1 { public func type(of value: T) -> Metatype { // This implementation is never used, since calls to `Swift.type(of:)` are // resolved as a special case by the type checker. - Builtin.staticReport(_trueAfterDiagnostics(), true._value, + unsafe Builtin.staticReport(_trueAfterDiagnostics(), true._value, ("internal consistency error: 'type(of:)' operation failed to resolve" as StaticString).utf8Start._rawValue) Builtin.unreachable() @@ -1042,7 +1042,7 @@ public func withoutActuallyEscaping( // This implementation is never used, since calls to // `Swift.withoutActuallyEscaping(_:do:)` are resolved as a special case by // the type checker. - Builtin.staticReport(_trueAfterDiagnostics(), true._value, + unsafe Builtin.staticReport(_trueAfterDiagnostics(), true._value, ("internal consistency error: 'withoutActuallyEscaping(_:do:)' operation failed to resolve" as StaticString).utf8Start._rawValue) Builtin.unreachable() @@ -1057,7 +1057,7 @@ func __abi_withoutActuallyEscaping( // This implementation is never used, since calls to // `Swift.withoutActuallyEscaping(_:do:)` are resolved as a special case by // the type checker. - Builtin.staticReport(_trueAfterDiagnostics(), true._value, + unsafe Builtin.staticReport(_trueAfterDiagnostics(), true._value, ("internal consistency error: 'withoutActuallyEscaping(_:do:)' operation failed to resolve" as StaticString).utf8Start._rawValue) Builtin.unreachable() @@ -1073,7 +1073,7 @@ public func _openExistential( // This implementation is never used, since calls to // `Swift._openExistential(_:do:)` are resolved as a special case by // the type checker. - Builtin.staticReport(_trueAfterDiagnostics(), true._value, + unsafe Builtin.staticReport(_trueAfterDiagnostics(), true._value, ("internal consistency error: '_openExistential(_:do:)' operation failed to resolve" as StaticString).utf8Start._rawValue) Builtin.unreachable() @@ -1103,5 +1103,5 @@ func __abi_openExistential( @_alwaysEmitIntoClient public // @SPI(OSLog) func _getGlobalStringTablePointer(_ constant: String) -> UnsafePointer { - return UnsafePointer(Builtin.globalStringTablePointer(constant)); + return unsafe UnsafePointer(Builtin.globalStringTablePointer(constant)); } diff --git a/stdlib/public/core/CMakeLists.txt b/stdlib/public/core/CMakeLists.txt index c6ec3293caf70..73a98711a642f 100644 --- a/stdlib/public/core/CMakeLists.txt +++ b/stdlib/public/core/CMakeLists.txt @@ -322,6 +322,7 @@ list(APPEND swift_stdlib_compile_flags "-enable-experimental-feature" "BitwiseCo list(APPEND swift_stdlib_compile_flags "-enable-experimental-feature" "ValueGenerics") list(APPEND swift_stdlib_compile_flags "-enable-experimental-feature" "AddressableParameters") list(APPEND swift_stdlib_compile_flags "-enable-experimental-feature" "AddressableTypes") +list(APPEND swift_stdlib_compile_flags "-strict-memory-safety") if("${SWIFT_NATIVE_SWIFT_TOOLS_PATH}" STREQUAL "") set(swift_bin_dir "${CMAKE_BINARY_DIR}/bin") diff --git a/stdlib/public/core/CString.swift b/stdlib/public/core/CString.swift index b26b6600dd220..caa56549530b0 100644 --- a/stdlib/public/core/CString.swift +++ b/stdlib/public/core/CString.swift @@ -45,10 +45,10 @@ extension String { /// - Parameter nullTerminatedUTF8: /// A pointer to a null-terminated sequence of UTF-8 code units. public init(cString nullTerminatedUTF8: UnsafePointer) { - let len = UTF8._nullCodeUnitOffset(in: nullTerminatedUTF8) - let buffer = UnsafeBufferPointer(start: nullTerminatedUTF8, count: len) - self = buffer.withMemoryRebound(to: UInt8.self) { - String._fromUTF8Repairing($0).0 + let len = unsafe UTF8._nullCodeUnitOffset(in: nullTerminatedUTF8) + let buffer = unsafe UnsafeBufferPointer(start: nullTerminatedUTF8, count: len) + self = unsafe buffer.withMemoryRebound(to: UInt8.self) { + unsafe String._fromUTF8Repairing($0).0 } } @@ -71,19 +71,19 @@ extension String { "Use String(decoding: array, as: UTF8.self) instead, after truncating the null termination." ) public init(cString nullTerminatedUTF8: [CChar]) { - self = nullTerminatedUTF8.withUnsafeBufferPointer { - $0.withMemoryRebound(to: UInt8.self, String.init(_checkingCString:)) + self = unsafe nullTerminatedUTF8.withUnsafeBufferPointer { + unsafe $0.withMemoryRebound(to: UInt8.self, String.init(_checkingCString:)) } } @_alwaysEmitIntoClient internal init(_checkingCString bytes: UnsafeBufferPointer) { - guard let length = bytes.firstIndex(of: 0) else { + guard let length = unsafe bytes.firstIndex(of: 0) else { _preconditionFailure( "input of String.init(cString:) must be null-terminated" ) } - self = String._fromUTF8Repairing( + self = unsafe String._fromUTF8Repairing( UnsafeBufferPointer( start: bytes.baseAddress._unsafelyUnwrappedUnchecked, count: length @@ -112,8 +112,8 @@ extension String { /// - Parameter nullTerminatedUTF8: /// A pointer to a null-terminated sequence of UTF-8 code units. public init(cString nullTerminatedUTF8: UnsafePointer) { - let len = UTF8._nullCodeUnitOffset(in: nullTerminatedUTF8) - self = String._fromUTF8Repairing( + let len = unsafe UTF8._nullCodeUnitOffset(in: nullTerminatedUTF8) + self = unsafe String._fromUTF8Repairing( UnsafeBufferPointer(start: nullTerminatedUTF8, count: len)).0 } @@ -135,8 +135,8 @@ extension String { "Use String(decoding: array, as: UTF8.self) instead, after truncating the null termination." ) public init(cString nullTerminatedUTF8: [UInt8]) { - self = nullTerminatedUTF8.withUnsafeBufferPointer { - String(_checkingCString: $0) + self = unsafe nullTerminatedUTF8.withUnsafeBufferPointer { + unsafe String(_checkingCString: $0) } } @@ -144,7 +144,7 @@ extension String { @_alwaysEmitIntoClient @available(*, deprecated, message: "Use a copy of the String argument") public init(cString nullTerminatedUTF8: String) { - self = nullTerminatedUTF8.withCString(String.init(cString:)) + self = unsafe nullTerminatedUTF8.withCString(String.init(cString:)) } @inlinable @@ -188,11 +188,11 @@ extension String { /// A pointer to a null-terminated sequence of UTF-8 code units. @_silgen_name("$sSS14validatingUTF8SSSgSPys4Int8VG_tcfC") public init?(validatingCString nullTerminatedUTF8: UnsafePointer) { - let len = UTF8._nullCodeUnitOffset(in: nullTerminatedUTF8) - let validated = nullTerminatedUTF8.withMemoryRebound( + let len = unsafe UTF8._nullCodeUnitOffset(in: nullTerminatedUTF8) + let validated = unsafe nullTerminatedUTF8.withMemoryRebound( to: UInt8.self, capacity: len, - { String._tryFromUTF8(UnsafeBufferPointer(start: $0, count: len)) } + { unsafe String._tryFromUTF8(UnsafeBufferPointer(start: $0, count: len)) } ) guard let validated else { return nil } self = validated @@ -233,7 +233,7 @@ extension String { @available(swift, deprecated: 6, renamed: "String.init(validatingCString:)") @_silgen_name("_swift_se0405_String_validatingUTF8") public init?(validatingUTF8 cString: UnsafePointer) { - self.init(validatingCString: cString) + unsafe self.init(validatingCString: cString) } /// Creates a new string by copying and validating the null-terminated UTF-8 @@ -259,8 +259,8 @@ extension String { "input of String.init(validatingCString:) must be null-terminated" ) } - let string = nullTerminatedUTF8.prefix(length).withUnsafeBufferPointer { - $0.withMemoryRebound(to: UInt8.self, String._tryFromUTF8(_:)) + let string = unsafe nullTerminatedUTF8.prefix(length).withUnsafeBufferPointer { + unsafe $0.withMemoryRebound(to: UInt8.self, String._tryFromUTF8(_:)) } guard let string else { return nil } self = string @@ -291,7 +291,7 @@ extension String { @_alwaysEmitIntoClient @available(*, deprecated, message: "Use a copy of the String argument") public init?(validatingCString nullTerminatedUTF8: String) { - self = nullTerminatedUTF8.withCString(String.init(cString:)) + self = unsafe nullTerminatedUTF8.withCString(String.init(cString:)) } @inlinable @@ -370,29 +370,29 @@ extension String { as encoding: Encoding.Type, repairingInvalidCodeUnits isRepairing: Bool = true ) -> (result: String, repairsMade: Bool)? { - guard let cPtr = cString else { return nil } + guard let cPtr = unsafe cString else { return nil } if _fastPath(encoding == Unicode.UTF8.self) { - let len = UTF8._nullCodeUnitOffset( + let len = unsafe UTF8._nullCodeUnitOffset( in: UnsafeRawPointer(cPtr).assumingMemoryBound(to: UInt8.self) ) - let bytes = UnsafeBufferPointer(start: cPtr, count: len) - return bytes.withMemoryRebound(to: UInt8.self) { codeUnits in + let bytes = unsafe UnsafeBufferPointer(start: cPtr, count: len) + return unsafe bytes.withMemoryRebound(to: UInt8.self) { codeUnits in if isRepairing { - return String._fromUTF8Repairing(codeUnits) + return unsafe String._fromUTF8Repairing(codeUnits) } - else if let str = String._tryFromUTF8(codeUnits) { + else if let str = unsafe String._tryFromUTF8(codeUnits) { return (str, false) } return nil } } - var end = cPtr - while end.pointee != 0 { end += 1 } - let len = end - cPtr - let codeUnits = UnsafeBufferPointer(start: cPtr, count: len) - return String._fromCodeUnits( + var end = unsafe cPtr + while unsafe end.pointee != 0 { unsafe end += 1 } + let len = unsafe end - cPtr + let codeUnits = unsafe UnsafeBufferPointer(start: cPtr, count: len) + return unsafe String._fromCodeUnits( codeUnits, encoding: encoding, repair: isRepairing) } @@ -410,13 +410,13 @@ extension String { } if _fastPath(encoding == Unicode.UTF8.self) { - return cString.prefix(length).withUnsafeBufferPointer { + return unsafe cString.prefix(length).withUnsafeBufferPointer { buffer -> (result: String, repairsMade: Bool)? in - return buffer.withMemoryRebound(to: UInt8.self) { codeUnits in + return unsafe buffer.withMemoryRebound(to: UInt8.self) { codeUnits in if isRepairing { - return String._fromUTF8Repairing(codeUnits) + return unsafe String._fromUTF8Repairing(codeUnits) } - else if let str = String._tryFromUTF8(codeUnits) { + else if let str = unsafe String._tryFromUTF8(codeUnits) { return (str, false) } return nil @@ -424,9 +424,9 @@ extension String { } } - return cString.prefix(length).withUnsafeBufferPointer { + return unsafe cString.prefix(length).withUnsafeBufferPointer { buf -> (result: String, repairsMade: Bool)? in - String._fromCodeUnits(buf, encoding: encoding, repair: isRepairing) + unsafe String._fromCodeUnits(buf, encoding: encoding, repair: isRepairing) } } @@ -438,8 +438,8 @@ extension String { as encoding: Encoding.Type, repairingInvalidCodeUnits isRepairing: Bool = true ) -> (result: String, repairsMade: Bool)? { - return cString.withCString(encodedAs: encoding) { - String.decodeCString( + return unsafe cString.withCString(encodedAs: encoding) { + unsafe String.decodeCString( $0, as: encoding, repairingInvalidCodeUnits: isRepairing ) } @@ -480,7 +480,7 @@ extension String { decodingCString nullTerminatedCodeUnits: UnsafePointer, as encoding: Encoding.Type ) { - self = String.decodeCString(nullTerminatedCodeUnits, as: encoding)!.0 + self = unsafe String.decodeCString(nullTerminatedCodeUnits, as: encoding)!.0 } /// Creates a new string by copying the null-terminated sequence of code units @@ -518,8 +518,8 @@ extension String { decodingCString nullTerminatedCodeUnits: String, as encoding: Encoding.Type ) { - self = nullTerminatedCodeUnits.withCString(encodedAs: encoding) { - String(decodingCString: $0, as: encoding.self) + self = unsafe nullTerminatedCodeUnits.withCString(encodedAs: encoding) { + unsafe String(decodingCString: $0, as: encoding.self) } } @@ -543,7 +543,7 @@ extension UnsafePointer where Pointee == UInt8 { @inlinable internal var _asCChar: UnsafePointer { @inline(__always) get { - return UnsafeRawPointer(self).assumingMemoryBound(to: CChar.self) + return unsafe UnsafeRawPointer(self).assumingMemoryBound(to: CChar.self) } } } @@ -551,7 +551,7 @@ extension UnsafePointer where Pointee == CChar { @inlinable internal var _asUInt8: UnsafePointer { @inline(__always) get { - return UnsafeRawPointer(self).assumingMemoryBound(to: UInt8.self) + return unsafe UnsafeRawPointer(self).assumingMemoryBound(to: UInt8.self) } } } diff --git a/stdlib/public/core/CTypes.swift b/stdlib/public/core/CTypes.swift index 628ba99a0312e..cee7b70a06ae4 100644 --- a/stdlib/public/core/CTypes.swift +++ b/stdlib/public/core/CTypes.swift @@ -157,12 +157,13 @@ public typealias CBool = Bool @frozen @unsafe public struct OpaquePointer { - @usableFromInline + @unsafe @usableFromInline + @safe internal var _rawValue: Builtin.RawPointer @usableFromInline @_transparent internal init(_ v: Builtin.RawPointer) { - self._rawValue = v + unsafe self._rawValue = v } } @@ -178,7 +179,7 @@ extension OpaquePointer { @_transparent public init?(bitPattern: Int) { if bitPattern == 0 { return nil } - self._rawValue = Builtin.inttoptr_Word(bitPattern._builtinWordValue) + unsafe self._rawValue = Builtin.inttoptr_Word(bitPattern._builtinWordValue) } /// Creates a new `OpaquePointer` from the given address, specified as a bit @@ -189,7 +190,7 @@ extension OpaquePointer { @_transparent public init?(bitPattern: UInt) { if bitPattern == 0 { return nil } - self._rawValue = Builtin.inttoptr_Word(bitPattern._builtinWordValue) + unsafe self._rawValue = Builtin.inttoptr_Word(bitPattern._builtinWordValue) } } @@ -197,8 +198,9 @@ extension OpaquePointer { /// Converts a typed `UnsafePointer` to an opaque C pointer. @_transparent @_preInverseGenerics + @safe public init(@_nonEphemeral _ from: UnsafePointer) { - self._rawValue = from._rawValue + unsafe self._rawValue = from._rawValue } /// Converts a typed `UnsafePointer` to an opaque C pointer. @@ -206,8 +208,9 @@ extension OpaquePointer { /// The result is `nil` if `from` is `nil`. @_transparent @_preInverseGenerics + @safe public init?(@_nonEphemeral _ from: UnsafePointer?) { - guard let unwrapped = from else { return nil } + guard let unwrapped = unsafe from else { return nil } self.init(unwrapped) } } @@ -216,8 +219,9 @@ extension OpaquePointer { /// Converts a typed `UnsafeMutablePointer` to an opaque C pointer. @_transparent @_preInverseGenerics + @safe public init(@_nonEphemeral _ from: UnsafeMutablePointer) { - self._rawValue = from._rawValue + unsafe self._rawValue = from._rawValue } /// Converts a typed `UnsafeMutablePointer` to an opaque C pointer. @@ -225,36 +229,40 @@ extension OpaquePointer { /// The result is `nil` if `from` is `nil`. @_transparent @_preInverseGenerics + @safe public init?(@_nonEphemeral _ from: UnsafeMutablePointer?) { - guard let unwrapped = from else { return nil } + guard let unwrapped = unsafe from else { return nil } self.init(unwrapped) } } extension OpaquePointer: Equatable { @inlinable // unsafe-performance + @safe public static func == (lhs: OpaquePointer, rhs: OpaquePointer) -> Bool { - return Bool(Builtin.cmp_eq_RawPointer(lhs._rawValue, rhs._rawValue)) + return unsafe Bool(Builtin.cmp_eq_RawPointer(lhs._rawValue, rhs._rawValue)) } } -extension OpaquePointer: Hashable { +extension OpaquePointer: @unsafe Hashable { /// Hashes the essential components of this value by feeding them into the /// given hasher. /// /// - Parameter hasher: The hasher to use when combining the components /// of this instance. @inlinable + @safe public func hash(into hasher: inout Hasher) { - hasher.combine(Int(Builtin.ptrtoint_Word(_rawValue))) + unsafe hasher.combine(Int(Builtin.ptrtoint_Word(_rawValue))) } } @_unavailableInEmbedded extension OpaquePointer: CustomDebugStringConvertible { /// A textual representation of the pointer, suitable for debugging. + @safe public var debugDescription: String { - return _rawPointerToString(_rawValue) + return unsafe _rawPointerToString(_rawValue) } } @@ -267,8 +275,9 @@ extension Int { /// - Parameter pointer: The pointer to use as the source for the new /// integer. @inlinable // unsafe-performance + @safe public init(bitPattern pointer: OpaquePointer?) { - self.init(bitPattern: UnsafeRawPointer(pointer)) + unsafe self.init(bitPattern: UnsafeRawPointer(pointer)) } } @@ -281,8 +290,9 @@ extension UInt { /// - Parameter pointer: The pointer to use as the source for the new /// integer. @inlinable // unsafe-performance + @safe public init(bitPattern pointer: OpaquePointer?) { - self.init(bitPattern: UnsafeRawPointer(pointer)) + unsafe self.init(bitPattern: UnsafeRawPointer(pointer)) } } @@ -291,7 +301,7 @@ extension UInt { @frozen @unsafe public struct CVaListPointer { - @usableFromInline // unsafe-performance + @unsafe @usableFromInline // unsafe-performance internal var _value: (__stack: UnsafeMutablePointer?, __gr_top: UnsafeMutablePointer?, __vr_top: UnsafeMutablePointer?, @@ -311,6 +321,7 @@ public struct CVaListPointer { @_unavailableInEmbedded extension CVaListPointer: CustomDebugStringConvertible { + @safe public var debugDescription: String { return "(\(_value.__stack.debugDescription), " + "\(_value.__gr_top.debugDescription), " + @@ -325,21 +336,22 @@ extension CVaListPointer: CustomDebugStringConvertible { @frozen @unsafe public struct CVaListPointer { - @usableFromInline // unsafe-performance + @unsafe @usableFromInline // unsafe-performance internal var _value: UnsafeMutableRawPointer @inlinable // unsafe-performance public // @testable init(_fromUnsafeMutablePointer from: UnsafeMutableRawPointer) { - _value = from + unsafe _value = from } } @_unavailableInEmbedded extension CVaListPointer: CustomDebugStringConvertible { /// A textual representation of the pointer, suitable for debugging. + @safe public var debugDescription: String { - return _value.debugDescription + return unsafe _value.debugDescription } } diff --git a/stdlib/public/core/CocoaArray.swift b/stdlib/public/core/CocoaArray.swift index 2bd15571c0b0b..d6d719de87064 100644 --- a/stdlib/public/core/CocoaArray.swift +++ b/stdlib/public/core/CocoaArray.swift @@ -40,7 +40,7 @@ internal struct _CocoaArrayWrapper: RandomAccessCollection { internal var core: _NSArrayCore { @inline(__always) get { - return unsafeBitCast(buffer, to: _NSArrayCore.self) + return unsafe unsafeBitCast(buffer, to: _NSArrayCore.self) } } @@ -73,9 +73,9 @@ internal struct _CocoaArrayWrapper: RandomAccessCollection { } // Look for contiguous storage in the NSArray - let cocoaStorageBaseAddress = self.contiguousStorage(self.indices) + let cocoaStorageBaseAddress = unsafe self.contiguousStorage(self.indices) - if let cocoaStorageBaseAddress = cocoaStorageBaseAddress { + if let cocoaStorageBaseAddress = unsafe cocoaStorageBaseAddress { return _SliceBuffer( owner: self.buffer, subscriptBaseAddress: cocoaStorageBaseAddress, @@ -88,11 +88,11 @@ internal struct _CocoaArrayWrapper: RandomAccessCollection { _uninitializedCount: boundsCount, minimumCapacity: 0) - let base = UnsafeMutableRawPointer(result.firstElementAddress) + let base = unsafe UnsafeMutableRawPointer(result.firstElementAddress) .assumingMemoryBound(to: AnyObject.self) for idx in 0.. UnsafeMutablePointer? { _internalInvariant(!subRange.isEmpty) - var enumerationState = _makeSwiftNSFastEnumerationState() + var enumerationState = unsafe _makeSwiftNSFastEnumerationState() // This function currently returns nil unless the first // subRange.upperBound items are stored contiguously. This is an // acceptable conservative behavior, but could potentially be // optimized for other cases. - let contiguousCount = withUnsafeMutablePointer(to: &enumerationState) { - core.countByEnumerating(with: $0, objects: nil, count: 0) + let contiguousCount = unsafe withUnsafeMutablePointer(to: &enumerationState) { + unsafe core.countByEnumerating(with: $0, objects: nil, count: 0) } - return contiguousCount >= subRange.upperBound + return unsafe contiguousCount >= subRange.upperBound ? UnsafeMutableRawPointer(enumerationState.itemsPtr!) .assumingMemoryBound(to: AnyObject.self) + subRange.lowerBound @@ -135,21 +135,21 @@ internal struct _CocoaArrayWrapper: RandomAccessCollection { subRange bounds: Range, initializing target: UnsafeMutablePointer ) -> UnsafeMutablePointer { - return withExtendedLifetime(buffer) { + return unsafe withExtendedLifetime(buffer) { let nsSubRange = SwiftShims._SwiftNSRange( location: bounds.lowerBound, length: bounds.upperBound - bounds.lowerBound) // Copies the references out of the NSArray without retaining them - core.getObjects(target, range: nsSubRange) + unsafe core.getObjects(target, range: nsSubRange) // Make another pass to retain the copied objects - var result = target + var result = unsafe target for _ in bounds { - result.initialize(to: result.pointee) - result += 1 + unsafe result.initialize(to: result.pointee) + unsafe result += 1 } - return result + return unsafe result } } @@ -160,7 +160,7 @@ internal struct _CocoaArrayWrapper: RandomAccessCollection { guard buffer.count > 0 else { return (makeIterator(), 0) } let start = buffer.baseAddress! let c = Swift.min(self.count, buffer.count) - _ = _copyContents(subRange: 0 ..< c, initializing: start) + _ = unsafe _copyContents(subRange: 0 ..< c, initializing: start) return (IndexingIterator(_elements: self, _position: c), c) } } diff --git a/stdlib/public/core/Codable.swift b/stdlib/public/core/Codable.swift index 3e4537c87bde1..3eabc8fb54ece 100644 --- a/stdlib/public/core/Codable.swift +++ b/stdlib/public/core/Codable.swift @@ -4014,105 +4014,105 @@ internal final class _KeyedEncodingContainerBox< override internal func encodeNil(forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeNil(forKey: key) } override internal func encode(_ value: Bool, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: String, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: Double, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: Float, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: Int, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: Int8, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: Int16, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: Int32, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: Int64, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } @available(SwiftStdlib 6.0, *) override internal func encode(_ value: Int128, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: UInt, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: UInt8, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: UInt16, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: UInt32, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } override internal func encode(_ value: UInt64, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } @available(SwiftStdlib 6.0, *) override internal func encode(_ value: UInt128, forKey key: K) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } @@ -4121,7 +4121,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encode(value, forKey: key) } @@ -4130,7 +4130,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeConditional(object, forKey: key) } @@ -4139,7 +4139,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4148,7 +4148,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4157,7 +4157,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4166,7 +4166,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4175,7 +4175,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4184,7 +4184,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4193,7 +4193,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4202,7 +4202,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4211,7 +4211,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4221,7 +4221,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4230,7 +4230,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4239,7 +4239,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4248,7 +4248,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4257,7 +4257,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4266,7 +4266,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4276,7 +4276,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4285,7 +4285,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) throws { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) try concrete.encodeIfPresent(value, forKey: key) } @@ -4294,7 +4294,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) -> KeyedEncodingContainer { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return concrete.nestedContainer(keyedBy: NestedKey.self, forKey: key) } @@ -4302,7 +4302,7 @@ internal final class _KeyedEncodingContainerBox< forKey key: K ) -> UnkeyedEncodingContainer { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return concrete.nestedUnkeyedContainer(forKey: key) } @@ -4312,7 +4312,7 @@ internal final class _KeyedEncodingContainerBox< override internal func superEncoder(forKey key: K) -> Encoder { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return concrete.superEncoder(forKey: key) } } @@ -4623,13 +4623,13 @@ internal final class _KeyedDecodingContainerBox< override internal func contains(_ key: K) -> Bool { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return concrete.contains(key) } override internal func decodeNil(forKey key: K) throws -> Bool { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeNil(forKey: key) } @@ -4638,7 +4638,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Bool { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Bool.self, forKey: key) } @@ -4647,7 +4647,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> String { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(String.self, forKey: key) } @@ -4656,7 +4656,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Double { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Double.self, forKey: key) } @@ -4665,7 +4665,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Float { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Float.self, forKey: key) } @@ -4674,7 +4674,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Int.self, forKey: key) } @@ -4683,7 +4683,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int8 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Int8.self, forKey: key) } @@ -4692,7 +4692,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int16 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Int16.self, forKey: key) } @@ -4701,7 +4701,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int32 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Int32.self, forKey: key) } @@ -4710,7 +4710,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int64 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Int64.self, forKey: key) } @@ -4720,7 +4720,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int128 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(Int128.self, forKey: key) } @@ -4729,7 +4729,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(UInt.self, forKey: key) } @@ -4738,7 +4738,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt8 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(UInt8.self, forKey: key) } @@ -4747,7 +4747,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt16 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(UInt16.self, forKey: key) } @@ -4756,7 +4756,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt32 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(UInt32.self, forKey: key) } @@ -4765,7 +4765,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt64 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(UInt64.self, forKey: key) } @@ -4775,7 +4775,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt128 { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(UInt128.self, forKey: key) } @@ -4784,7 +4784,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> T { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decode(T.self, forKey: key) } @@ -4793,7 +4793,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Bool? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Bool.self, forKey: key) } @@ -4802,7 +4802,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> String? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(String.self, forKey: key) } @@ -4811,7 +4811,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Double? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Double.self, forKey: key) } @@ -4820,7 +4820,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Float? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Float.self, forKey: key) } @@ -4829,7 +4829,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Int.self, forKey: key) } @@ -4838,7 +4838,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int8? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Int8.self, forKey: key) } @@ -4847,7 +4847,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int16? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Int16.self, forKey: key) } @@ -4856,7 +4856,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int32? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Int32.self, forKey: key) } @@ -4865,7 +4865,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int64? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Int64.self, forKey: key) } @@ -4875,7 +4875,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> Int128? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(Int128.self, forKey: key) } @@ -4884,7 +4884,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(UInt.self, forKey: key) } @@ -4893,7 +4893,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt8? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(UInt8.self, forKey: key) } @@ -4902,7 +4902,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt16? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(UInt16.self, forKey: key) } @@ -4911,7 +4911,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt32? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(UInt32.self, forKey: key) } @@ -4920,7 +4920,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt64? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(UInt64.self, forKey: key) } @@ -4930,7 +4930,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UInt128? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(UInt128.self, forKey: key) } @@ -4939,7 +4939,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> T? { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.decodeIfPresent(T.self, forKey: key) } @@ -4948,7 +4948,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> KeyedDecodingContainer { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.nestedContainer(keyedBy: NestedKey.self, forKey: key) } @@ -4956,7 +4956,7 @@ internal final class _KeyedDecodingContainerBox< forKey key: K ) throws -> UnkeyedDecodingContainer { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.nestedUnkeyedContainer(forKey: key) } @@ -4966,7 +4966,7 @@ internal final class _KeyedDecodingContainerBox< override internal func superDecoder(forKey key: K) throws -> Decoder { _internalInvariant(K.self == Key.self) - let key = unsafeBitCast(key, to: Key.self) + let key = unsafe unsafeBitCast(key, to: Key.self) return try concrete.superDecoder(forKey: key) } } diff --git a/stdlib/public/core/CollectionAlgorithms.swift b/stdlib/public/core/CollectionAlgorithms.swift index 0a8cf4a847f97..bdd301b03361e 100644 --- a/stdlib/public/core/CollectionAlgorithms.swift +++ b/stdlib/public/core/CollectionAlgorithms.swift @@ -398,11 +398,11 @@ extension MutableCollection where Self: BidirectionalCollection { public mutating func partition( by belongsInSecondPartition: (Element) throws -> Bool ) rethrows -> Index { - let maybeOffset = try withContiguousMutableStorageIfAvailable { + let maybeOffset = try unsafe withContiguousMutableStorageIfAvailable { (bufferPointer) -> Int in - let unsafeBufferPivot = try bufferPointer._partitionImpl( + let unsafeBufferPivot = try unsafe bufferPointer._partitionImpl( by: belongsInSecondPartition) - return unsafeBufferPivot - bufferPointer.startIndex + return unsafe unsafeBufferPivot - bufferPointer.startIndex } if let offset = maybeOffset { return index(startIndex, offsetBy: offset) diff --git a/stdlib/public/core/CommandLine.swift b/stdlib/public/core/CommandLine.swift index c52dbcb3ea141..9bff349f9320d 100644 --- a/stdlib/public/core/CommandLine.swift +++ b/stdlib/public/core/CommandLine.swift @@ -36,7 +36,7 @@ extension CommandLine { @usableFromInline internal static var _unsafeArgv: UnsafeMutablePointer?> - = _swift_stdlib_getUnsafeArgvArgc(&_argc) + = unsafe _swift_stdlib_getUnsafeArgvArgc(&_argc) /// Access to the raw argc value from C. public static var argc: Int32 { @@ -52,7 +52,7 @@ extension CommandLine { // over synchronizing access to argc and argv. var argc: Int32 = 0 - while let _ = _unsafeArgv[Int(argc)] { + while let _ = unsafe _unsafeArgv[Int(argc)] { argc += 1 } @@ -68,14 +68,14 @@ extension CommandLine { /// Where possible, use ``arguments`` instead. public static var unsafeArgv: UnsafeMutablePointer?> { - return _unsafeArgv + return unsafe _unsafeArgv } // This is extremely unsafe and allows for concurrent writes with no // synchronization to the underlying data. In a future version of Swift you // will not be able to write to 'CommandLine.arguments'. static nonisolated(unsafe) var _arguments: [String] = (0 ..< Int(argc)).map { - String(cString: _unsafeArgv[$0]!) + unsafe String(cString: _unsafeArgv[$0]!) } /// An array that provides access to this program's command line arguments. diff --git a/stdlib/public/core/ContiguousArray.swift b/stdlib/public/core/ContiguousArray.swift index 3c7cf01fea388..3f9cca0bd0937 100644 --- a/stdlib/public/core/ContiguousArray.swift +++ b/stdlib/public/core/ContiguousArray.swift @@ -35,6 +35,7 @@ /// which `ContiguousArray` shares most properties and methods. @frozen @_eagerMove +@safe public struct ContiguousArray: _DestructorSafeContainer { @usableFromInline internal typealias _Buffer = _ContiguousArrayBuffer @@ -112,7 +113,7 @@ extension ContiguousArray { @inlinable @_semantics("array.get_element_address") internal func _getElementAddress(_ index: Int) -> UnsafeMutablePointer { - return _buffer.firstElementAddress + index + return unsafe _buffer.firstElementAddress + index } } @@ -163,12 +164,12 @@ extension ContiguousArray: _ArrayProtocol { @inlinable public var _baseAddressIfContiguous: UnsafeMutablePointer? { @inline(__always) // FIXME(TODO: JIRA): Hack around test failure - get { return _buffer.firstElementAddressIfContiguous } + get { return unsafe _buffer.firstElementAddressIfContiguous } } @inlinable internal var _baseAddress: UnsafeMutablePointer { - return _buffer.firstElementAddress + return unsafe _buffer.firstElementAddress } } @@ -415,9 +416,9 @@ extension ContiguousArray: RandomAccessCollection, MutableCollection { _modify { _makeMutableAndUnique() _checkSubscript_mutating(index) - let address = _buffer.mutableFirstElementAddress + index + let address = unsafe _buffer.mutableFirstElementAddress + index defer { _endMutation() } - yield &address.pointee + yield unsafe &address.pointee } } @@ -457,7 +458,7 @@ extension ContiguousArray: RandomAccessCollection, MutableCollection { _checkIndex(bounds.upperBound) // If the replacement buffer has same identity, and the ranges match, // then this was a pinned in-place modification, nothing further needed. - if self[bounds]._buffer.identity != rhs._buffer.identity + if unsafe self[bounds]._buffer.identity != rhs._buffer.identity || bounds != rhs.startIndex.. - (self, p) = ContiguousArray._allocateUninitialized(count) + unsafe (self, p) = ContiguousArray._allocateUninitialized(count) for _ in 0.. (ContiguousArray, UnsafeMutablePointer) { let result = ContiguousArray(_uninitializedCount: count) - return (result, result._buffer.firstElementAddress) + return unsafe (result, result._buffer.firstElementAddress) } //===--- basic mutations ------------------------------------------------===// @@ -734,7 +735,7 @@ extension ContiguousArray: RangeReplaceableCollection { let newCount = oldCount &+ 1 var newBuffer = _buffer._forceCreateUniqueMutableBuffer( countForNewBuffer: oldCount, minNewCapacity: newCount) - _buffer._arrayOutOfPlaceUpdate( + unsafe _buffer._arrayOutOfPlaceUpdate( &newBuffer, oldCount, 0) } @@ -780,7 +781,7 @@ extension ContiguousArray: RangeReplaceableCollection { _internalInvariant(_buffer.mutableCapacity >= _buffer.mutableCount &+ 1) _buffer.mutableCount = oldCount &+ 1 - (_buffer.mutableFirstElementAddress + oldCount).initialize(to: newElement) + unsafe (_buffer.mutableFirstElementAddress + oldCount).initialize(to: newElement) } /// Adds a new element at the end of the array. @@ -846,15 +847,15 @@ extension ContiguousArray: RangeReplaceableCollection { growForAppend: true) let oldCount = _buffer.mutableCount - let startNewElements = _buffer.mutableFirstElementAddress + oldCount - let buf = UnsafeMutableBufferPointer( + let startNewElements = unsafe _buffer.mutableFirstElementAddress + oldCount + let buf = unsafe UnsafeMutableBufferPointer( start: startNewElements, count: _buffer.mutableCapacity - oldCount) - var (remainder,writtenUpTo) = buf.initialize(from: newElements) + var (remainder,writtenUpTo) = unsafe buf.initialize(from: newElements) // trap on underflow from the sequence's underestimate: - let writtenCount = buf.distance(from: buf.startIndex, to: writtenUpTo) + let writtenCount = unsafe buf.distance(from: buf.startIndex, to: writtenUpTo) _precondition(newElementsCount <= writtenCount, "newElements.underestimatedCount was an overestimate") // can't check for overflow as sequences can underestimate @@ -873,11 +874,11 @@ extension ContiguousArray: RangeReplaceableCollection { _reserveCapacityAssumingUniqueBuffer(oldCount: newCount) let currentCapacity = _buffer.mutableCapacity - let base = _buffer.mutableFirstElementAddress + let base = unsafe _buffer.mutableFirstElementAddress // fill while there is another item and spare capacity while let next = nextItem, newCount < currentCapacity { - (base + newCount).initialize(to: next) + unsafe (base + newCount).initialize(to: next) newCount += 1 nextItem = remainder.next() } @@ -902,8 +903,8 @@ extension ContiguousArray: RangeReplaceableCollection { _makeMutableAndUnique() let newCount = _buffer.mutableCount - 1 _precondition(newCount >= 0, "Can't removeLast from an empty ContiguousArray") - let pointer = (_buffer.mutableFirstElementAddress + newCount) - let element = pointer.move() + let pointer = unsafe (_buffer.mutableFirstElementAddress + newCount) + let element = unsafe pointer.move() _buffer.mutableCount = newCount _endMutation() return element @@ -933,9 +934,9 @@ extension ContiguousArray: RangeReplaceableCollection { _precondition(index < currentCount, "Index out of range") _precondition(index >= 0, "Index out of range") let newCount = currentCount - 1 - let pointer = (_buffer.mutableFirstElementAddress + index) - let result = pointer.move() - pointer.moveInitialize(from: pointer + 1, count: newCount - index) + let pointer = unsafe (_buffer.mutableFirstElementAddress + index) + let result = unsafe pointer.move() + unsafe pointer.moveInitialize(from: pointer + 1, count: newCount - index) _buffer.mutableCount = newCount _endMutation() return result @@ -996,7 +997,7 @@ extension ContiguousArray: RangeReplaceableCollection { ) rethrows -> R? { return try withUnsafeMutableBufferPointer { (bufferPointer) -> R in - return try body(&bufferPointer) + return try unsafe body(&bufferPointer) } } @@ -1006,7 +1007,7 @@ extension ContiguousArray: RangeReplaceableCollection { ) rethrows -> R? { return try withUnsafeMutableBufferPointer { (bufferPointer) -> R in - return try body(&bufferPointer) + return try unsafe body(&bufferPointer) } } @@ -1016,7 +1017,7 @@ extension ContiguousArray: RangeReplaceableCollection { ) rethrows -> R? { return try withUnsafeBufferPointer { (bufferPointer) -> R in - return try body(bufferPointer) + return try unsafe body(bufferPointer) } } @@ -1059,11 +1060,11 @@ extension ContiguousArray { @usableFromInline @_transparent internal func _cPointerArgs() -> (AnyObject?, UnsafeRawPointer?) { let p = _baseAddressIfContiguous - if _fastPath(p != nil || isEmpty) { + if unsafe _fastPath(p != nil || isEmpty) { return (_owner, UnsafeRawPointer(p)) } let n = ContiguousArray(self._buffer)._buffer - return (n.owner, UnsafeRawPointer(n.firstElementAddress)) + return unsafe (n.owner, UnsafeRawPointer(n.firstElementAddress)) } } @@ -1100,7 +1101,7 @@ extension ContiguousArray { _ buffer: inout UnsafeMutableBufferPointer, _ initializedCount: inout Int) throws -> Void ) rethrows { - self = try ContiguousArray(Array( + self = try unsafe ContiguousArray(Array( _unsafeUninitializedCapacity: unsafeUninitializedCapacity, initializingWith: initializer)) } @@ -1112,7 +1113,7 @@ extension ContiguousArray { func withUnsafeBufferPointer( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { - return try _buffer.withUnsafeBufferPointer(body) + return try unsafe _buffer.withUnsafeBufferPointer(body) } /// Calls a closure with a pointer to the array's contiguous storage. @@ -1148,7 +1149,7 @@ extension ContiguousArray { public func withUnsafeBufferPointer( _ body: (UnsafeBufferPointer) throws(E) -> R ) throws(E) -> R { - return try _buffer.withUnsafeBufferPointer(body) + return try unsafe _buffer.withUnsafeBufferPointer(body) } // Superseded by the typed-throws version of this function, but retained @@ -1212,12 +1213,12 @@ extension ContiguousArray { let count = _buffer.mutableCount // Create an UnsafeBufferPointer that we can pass to body - let pointer = _buffer.mutableFirstElementAddress - var inoutBufferPointer = UnsafeMutableBufferPointer( + let pointer = unsafe _buffer.mutableFirstElementAddress + var inoutBufferPointer = unsafe UnsafeMutableBufferPointer( start: pointer, count: count) defer { - _precondition( + unsafe _precondition( inoutBufferPointer.baseAddress == pointer && inoutBufferPointer.count == count, "ContiguousArray withUnsafeMutableBufferPointer: replacing the buffer is not allowed") @@ -1226,7 +1227,7 @@ extension ContiguousArray { } // Invoke the body. - return try body(&inoutBufferPointer) + return try unsafe body(&inoutBufferPointer) } @inlinable @@ -1244,20 +1245,20 @@ extension ContiguousArray { "Insufficient space allocated to copy array contents") if let s = _baseAddressIfContiguous { - p.initialize(from: s, count: self.count) + unsafe p.initialize(from: s, count: self.count) // Need a _fixLifetime bracketing the _baseAddressIfContiguous getter // and all uses of the pointer it returns: _fixLifetime(self._owner) } else { for x in self { - p.initialize(to: x) - p += 1 + unsafe p.initialize(to: x) + unsafe p += 1 } } var it = IndexingIterator(_elements: self) it._position = endIndex - return (it,buffer.index(buffer.startIndex, offsetBy: self.count)) + return (it,unsafe buffer.index(buffer.startIndex, offsetBy: self.count)) } } @@ -1338,7 +1339,7 @@ extension ContiguousArray: Equatable where Element: Equatable { } // Test referential equality. - if lhsCount == 0 || lhs._buffer.identity == rhs._buffer.identity { + if unsafe lhsCount == 0 || lhs._buffer.identity == rhs._buffer.identity { return true } @@ -1418,7 +1419,7 @@ extension ContiguousArray { _ body: (UnsafeMutableRawBufferPointer) throws -> R ) rethrows -> R { return try self.withUnsafeMutableBufferPointer { - return try body(UnsafeMutableRawBufferPointer($0)) + return try unsafe body(UnsafeMutableRawBufferPointer($0)) } } @@ -1454,7 +1455,7 @@ extension ContiguousArray { _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { return try self.withUnsafeBufferPointer { - try body(UnsafeRawBufferPointer($0)) + try unsafe body(UnsafeRawBufferPointer($0)) } } } diff --git a/stdlib/public/core/ContiguousArrayBuffer.swift b/stdlib/public/core/ContiguousArrayBuffer.swift index 0796d97668071..f49d15fb5afb1 100644 --- a/stdlib/public/core/ContiguousArrayBuffer.swift +++ b/stdlib/public/core/ContiguousArrayBuffer.swift @@ -42,7 +42,7 @@ internal final class __EmptyArrayStorage override internal func _withVerbatimBridgedUnsafeBuffer( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { - return try body(UnsafeBufferPointer(start: nil, count: 0)) + return unsafe try body(UnsafeBufferPointer(start: nil, count: 0)) } override internal func _getNonVerbatimBridgingBuffer() -> _BridgingBuffer { @@ -70,6 +70,7 @@ internal final class __EmptyArrayStorage // // TODO: We should figure out how to make this a constant so that it's placed in // non-writable memory (can't be a let, Builtin.addressof below requires a var). +@unsafe public var _swiftEmptyArrayStorage: (Int, Int, Int, Int) = (/*isa*/0, /*refcount*/-1, /*count*/0, /*flags*/1) #endif @@ -121,7 +122,7 @@ internal final class __StaticArrayStorage /// `[Native]Array`s. @inlinable internal var _emptyArrayStorage: __EmptyArrayStorage { - return Builtin.bridgeFromRawPointer( + return unsafe Builtin.bridgeFromRawPointer( Builtin.addressof(&_swiftEmptyArrayStorage)) } @@ -134,7 +135,7 @@ internal final class _ContiguousArrayStorage< @inlinable deinit { - _elementPointer.deinitialize(count: countAndCapacity.count) + unsafe _elementPointer.deinitialize(count: countAndCapacity.count) _fixLifetime(self) } @@ -145,10 +146,10 @@ internal final class _ContiguousArrayStorage< ) rethrows -> R { _internalInvariant(_isBridgedVerbatimToObjectiveC(Element.self)) let count = countAndCapacity.count - let elements = UnsafeRawPointer(_elementPointer) + let elements = unsafe UnsafeRawPointer(_elementPointer) .assumingMemoryBound(to: AnyObject.self) defer { _fixLifetime(self) } - return try body(UnsafeBufferPointer(start: elements, count: count)) + return try unsafe body(UnsafeBufferPointer(start: elements, count: count)) } @objc(countByEnumeratingWithState:objects:count:) @@ -157,19 +158,19 @@ internal final class _ContiguousArrayStorage< with state: UnsafeMutablePointer<_SwiftNSFastEnumerationState>, objects: UnsafeMutablePointer?, count: Int ) -> Int { - var enumerationState = state.pointee + var enumerationState = unsafe state.pointee - if enumerationState.state != 0 { + if unsafe enumerationState.state != 0 { return 0 } - return withUnsafeBufferOfObjects { + return unsafe withUnsafeBufferOfObjects { objects in - enumerationState.mutationsPtr = _fastEnumerationStorageMutationsPtr - enumerationState.itemsPtr = - AutoreleasingUnsafeMutablePointer(objects.baseAddress) - enumerationState.state = 1 - state.pointee = enumerationState + unsafe enumerationState.mutationsPtr = _fastEnumerationStorageMutationsPtr + unsafe enumerationState.itemsPtr = + unsafe AutoreleasingUnsafeMutablePointer(objects.baseAddress) + unsafe enumerationState.state = 1 + unsafe state.pointee = enumerationState return objects.count } } @@ -177,30 +178,30 @@ internal final class _ContiguousArrayStorage< @inline(__always) @_effects(readonly) @nonobjc private func _objectAt(_ index: Int) -> Unmanaged { - return withUnsafeBufferOfObjects { + return unsafe withUnsafeBufferOfObjects { objects in _precondition( _isValidArraySubscript(index, count: objects.count), "Array index out of range") - return Unmanaged.passUnretained(objects[index]) + return unsafe Unmanaged.passUnretained(objects[index]) } } @objc(objectAtIndexedSubscript:) @_effects(readonly) final override internal func objectAtSubscript(_ index: Int) -> Unmanaged { - return _objectAt(index) + return unsafe _objectAt(index) } @objc(objectAtIndex:) @_effects(readonly) final override internal func objectAt(_ index: Int) -> Unmanaged { - return _objectAt(index) + return unsafe _objectAt(index) } @objc internal override final var count: Int { @_effects(readonly) get { - return withUnsafeBufferOfObjects { $0.count } + return unsafe withUnsafeBufferOfObjects { $0.count } } } @@ -208,7 +209,7 @@ internal final class _ContiguousArrayStorage< @objc internal override final func getObjects( _ aBuffer: UnsafeMutablePointer, range: _SwiftNSRange ) { - return withUnsafeBufferOfObjects { + return unsafe withUnsafeBufferOfObjects { objects in _precondition( _isValidArrayIndex(range.location, count: objects.count), @@ -224,7 +225,7 @@ internal final class _ContiguousArrayStorage< // These objects are "returned" at +0, so treat them as pointer values to // avoid retains. Copy bytes via a raw pointer to circumvent reference // counting while correctly aliasing with all other pointer types. - UnsafeMutableRawPointer(aBuffer).copyMemory( + unsafe UnsafeMutableRawPointer(aBuffer).copyMemory( from: objects.baseAddress! + range.location, byteCount: range.length * MemoryLayout.stride) } @@ -237,8 +238,8 @@ internal final class _ContiguousArrayStorage< _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { var result: R? - try self._withVerbatimBridgedUnsafeBufferImpl { - result = try body($0) + try unsafe self._withVerbatimBridgedUnsafeBufferImpl { + result = unsafe try body($0) } return result } @@ -250,10 +251,10 @@ internal final class _ContiguousArrayStorage< ) rethrows { if _isBridgedVerbatimToObjectiveC(Element.self) { let count = countAndCapacity.count - let elements = UnsafeRawPointer(_elementPointer) + let elements = unsafe UnsafeRawPointer(_elementPointer) .assumingMemoryBound(to: AnyObject.self) defer { _fixLifetime(self) } - try body(UnsafeBufferPointer(start: elements, count: count)) + try unsafe body(UnsafeBufferPointer(start: elements, count: count)) } } @@ -266,10 +267,10 @@ internal final class _ContiguousArrayStorage< "Verbatim bridging should be handled separately") let count = countAndCapacity.count let result = _BridgingBuffer(count) - let resultPtr = result.baseAddress - let p = _elementPointer + let resultPtr = unsafe result.baseAddress + let p = unsafe _elementPointer for i in 0.. { - return UnsafeMutablePointer(Builtin.projectTailElems(self, Element.self)) + return unsafe UnsafeMutablePointer(Builtin.projectTailElems(self, Element.self)) } } @@ -362,13 +363,13 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { let storageAddr = UnsafeMutableRawPointer(Builtin.bridgeToRawPointer(_storage)) let allocSize: Int? #if !$Embedded - allocSize = _mallocSize(ofAllocation: storageAddr) + allocSize = unsafe _mallocSize(ofAllocation: storageAddr) #else allocSize = nil #endif if let allocSize { - let endAddr = storageAddr + allocSize - let realCapacity = endAddr.assumingMemoryBound(to: Element.self) - firstElementAddress + let endAddr = unsafe storageAddr + allocSize + let realCapacity = unsafe endAddr.assumingMemoryBound(to: Element.self) - firstElementAddress _initStorageHeader( count: uninitializedCount, capacity: realCapacity) } else { @@ -426,7 +427,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { /// A pointer to the first element. @inlinable internal var firstElementAddress: UnsafeMutablePointer { - return UnsafeMutablePointer(Builtin.projectTailElems(_storage, + return unsafe UnsafeMutablePointer(Builtin.projectTailElems(_storage, Element.self)) } @@ -435,13 +436,13 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { /// - Precondition: The buffer must be mutable. @_alwaysEmitIntoClient internal var mutableFirstElementAddress: UnsafeMutablePointer { - return UnsafeMutablePointer(Builtin.projectTailElems(mutableOrEmptyStorage, + return unsafe UnsafeMutablePointer(Builtin.projectTailElems(mutableOrEmptyStorage, Element.self)) } @inlinable internal var firstElementAddressIfContiguous: UnsafeMutablePointer? { - return firstElementAddress + return unsafe firstElementAddress } // Superseded by the typed-throws version of this function, but retained @@ -452,7 +453,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } - return try body(UnsafeBufferPointer(start: firstElementAddress, + return try unsafe body(UnsafeBufferPointer(start: firstElementAddress, count: count)) } @@ -463,7 +464,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { _ body: (UnsafeBufferPointer) throws(E) -> R ) throws(E) -> R { defer { _fixLifetime(self) } - return try body(UnsafeBufferPointer(start: firstElementAddress, + return try unsafe body(UnsafeBufferPointer(start: firstElementAddress, count: count)) } @@ -475,7 +476,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { _ body: (UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } - return try body( + return try unsafe body( UnsafeMutableBufferPointer(start: firstElementAddress, count: count)) } @@ -486,7 +487,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { _ body: (UnsafeMutableBufferPointer) throws(E) -> R ) throws(E) -> R { defer { _fixLifetime(self) } - return try body( + return try unsafe body( UnsafeMutableBufferPointer(start: firstElementAddress, count: count)) } @@ -530,9 +531,9 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { @inline(__always) internal func getElement(_ i: Int) -> Element { _internalInvariant(i >= 0 && i < count, "Array index out of range") - let addr = UnsafePointer( + let addr = unsafe UnsafePointer( Builtin.projectTailElems(immutableStorage, Element.self)) - return addr[i] + return unsafe addr[i] } /// The storage of an immutable buffer. @@ -624,8 +625,8 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { // firstElementAddress[i] = newValue var nv = newValue let tmp = nv - nv = firstElementAddress[i] - firstElementAddress[i] = tmp + nv = unsafe firstElementAddress[i] + unsafe firstElementAddress[i] = tmp } } @@ -748,10 +749,10 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { _internalInvariant(bounds.upperBound <= count) let initializedCount = bounds.upperBound - bounds.lowerBound - target.initialize( + unsafe target.initialize( from: firstElementAddress + bounds.lowerBound, count: initializedCount) _fixLifetime(owner) - return target + initializedCount + return unsafe target + initializedCount } @inlinable @@ -760,7 +761,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { ) -> (Iterator, UnsafeMutableBufferPointer.Index) { guard buffer.count > 0 else { return (makeIterator(), 0) } let c = Swift.min(self.count, buffer.count) - buffer.baseAddress!.initialize( + unsafe buffer.baseAddress!.initialize( from: firstElementAddress, count: c) _fixLifetime(owner) @@ -777,7 +778,7 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { #else let storage = _storage #endif - return _SliceBuffer( + return unsafe _SliceBuffer( owner: storage, subscriptBaseAddress: firstElementAddress, indices: bounds, @@ -874,12 +875,12 @@ internal struct _ContiguousArrayBuffer: _ArrayBufferProtocol { if bufferIsUnique { // As an optimization, if the original buffer is unique, we can just move // the elements instead of copying. - let dest = newBuffer.mutableFirstElementAddress - dest.moveInitialize(from: firstElementAddress, + let dest = unsafe newBuffer.mutableFirstElementAddress + unsafe dest.moveInitialize(from: firstElementAddress, count: c) mutableCount = 0 } else { - _copyContents( + unsafe _copyContents( subRange: 0..: _ArrayBufferProtocol { /// identity and count. @inlinable internal var identity: UnsafeRawPointer { - return UnsafeRawPointer(firstElementAddress) + return unsafe UnsafeRawPointer(firstElementAddress) } /// Returns `true` if we have storage for elements of the given @@ -1004,7 +1005,7 @@ internal func += ( let buf: UnsafeMutableBufferPointer if _fastPath(newCount <= lhs.capacity) { - buf = UnsafeMutableBufferPointer( + unsafe buf = unsafe UnsafeMutableBufferPointer( start: lhs.firstElementAddress + oldCount, count: rhs.count) lhs.mutableCount = newCount @@ -1014,16 +1015,16 @@ internal func += ( _uninitializedCount: newCount, minimumCapacity: _growArrayCapacity(lhs.capacity)) - newLHS.firstElementAddress.moveInitialize( + unsafe newLHS.firstElementAddress.moveInitialize( from: lhs.firstElementAddress, count: oldCount) lhs.mutableCount = 0 (lhs, newLHS) = (newLHS, lhs) - buf = UnsafeMutableBufferPointer( + unsafe buf = unsafe UnsafeMutableBufferPointer( start: lhs.firstElementAddress + oldCount, count: rhs.count) } - var (remainders,writtenUpTo) = buf.initialize(from: rhs) + var (remainders,writtenUpTo) = unsafe buf.initialize(from: rhs) // ensure that exactly rhs.count elements were written _precondition(remainders.next() == nil, "rhs underreported its count") @@ -1065,7 +1066,7 @@ internal func _copySequenceToContiguousArray< >(_ source: S) -> ContiguousArray { let initialCapacity = source.underestimatedCount var builder = - _UnsafePartiallyInitializedContiguousArrayBuffer( + unsafe _UnsafePartiallyInitializedContiguousArrayBuffer( initialCapacity: initialCapacity) var iterator = source.makeIterator() @@ -1074,15 +1075,15 @@ internal func _copySequenceToContiguousArray< // Add elements up to the initial capacity without checking for regrowth. for _ in 0.. { @usableFromInline internal var result: _ContiguousArrayBuffer @@ -1159,50 +1161,50 @@ internal struct _UnsafePartiallyInitializedContiguousArrayBuffer { @inline(__always) // For performance reasons. internal init(initialCapacity: Int) { if initialCapacity == 0 { - result = _ContiguousArrayBuffer() + unsafe result = _ContiguousArrayBuffer() } else { - result = _ContiguousArrayBuffer( + unsafe result = _ContiguousArrayBuffer( _uninitializedCount: initialCapacity, minimumCapacity: 0) } - p = result.firstElementAddress - remainingCapacity = result.capacity + unsafe p = unsafe result.firstElementAddress + unsafe remainingCapacity = unsafe result.capacity } /// Add an element to the buffer, reallocating if necessary. @inlinable @inline(__always) // For performance reasons. internal mutating func add(_ element: Element) { - if remainingCapacity == 0 { + if unsafe remainingCapacity == 0 { // Reallocate. - let newCapacity = max(_growArrayCapacity(result.capacity), 1) + let newCapacity = unsafe max(_growArrayCapacity(result.capacity), 1) var newResult = _ContiguousArrayBuffer( _uninitializedCount: newCapacity, minimumCapacity: 0) - p = newResult.firstElementAddress + result.capacity - remainingCapacity = newResult.capacity - result.capacity - if !result.isEmpty { + unsafe p = unsafe newResult.firstElementAddress + result.capacity + unsafe remainingCapacity = unsafe newResult.capacity - result.capacity + if unsafe !result.isEmpty { // This check prevents a data race writing to _swiftEmptyArrayStorage // Since count is always 0 there, this code does nothing anyway - newResult.firstElementAddress.moveInitialize( + unsafe newResult.firstElementAddress.moveInitialize( from: result.firstElementAddress, count: result.capacity) - result.mutableCount = 0 + unsafe result.mutableCount = 0 } - (result, newResult) = (newResult, result) + unsafe (result, newResult) = unsafe (newResult, result) } - addWithExistingCapacity(element) + unsafe addWithExistingCapacity(element) } /// Add an element to the buffer, which must have remaining capacity. @inlinable @inline(__always) // For performance reasons. internal mutating func addWithExistingCapacity(_ element: Element) { - _internalInvariant(remainingCapacity > 0, + unsafe _internalInvariant(remainingCapacity > 0, "_UnsafePartiallyInitializedContiguousArrayBuffer has no more capacity") - remainingCapacity -= 1 + unsafe remainingCapacity -= 1 - p.initialize(to: element) - p += 1 + unsafe p.initialize(to: element) + unsafe p += 1 } /// Finish initializing the buffer, adjusting its count to the final @@ -1214,14 +1216,14 @@ internal struct _UnsafePartiallyInitializedContiguousArrayBuffer { @inline(__always) // For performance reasons. internal mutating func finish() -> ContiguousArray { // Adjust the initialized count of the buffer. - if (result.capacity != 0) { - result.mutableCount = result.capacity - remainingCapacity + if unsafe (result.capacity != 0) { + unsafe result.mutableCount = unsafe result.capacity - remainingCapacity } else { - _internalInvariant(remainingCapacity == 0) - _internalInvariant(result.count == 0) + unsafe _internalInvariant(remainingCapacity == 0) + unsafe _internalInvariant(result.count == 0) } - return finishWithOriginalCount() + return unsafe finishWithOriginalCount() } /// Finish initializing the buffer, assuming that the number of elements @@ -1233,11 +1235,11 @@ internal struct _UnsafePartiallyInitializedContiguousArrayBuffer { @inlinable @inline(__always) // For performance reasons. internal mutating func finishWithOriginalCount() -> ContiguousArray { - _internalInvariant(remainingCapacity == result.capacity - result.count, + unsafe _internalInvariant(remainingCapacity == result.capacity - result.count, "_UnsafePartiallyInitializedContiguousArrayBuffer has incorrect count") var finalResult = _ContiguousArrayBuffer() - (finalResult, result) = (result, finalResult) - remainingCapacity = 0 + unsafe (finalResult, result) = unsafe (result, finalResult) + unsafe remainingCapacity = 0 finalResult.endCOWMutation() return ContiguousArray(_buffer: finalResult) } diff --git a/stdlib/public/core/ContiguouslyStored.swift b/stdlib/public/core/ContiguouslyStored.swift index b614a1fb13efc..340ebd3313624 100644 --- a/stdlib/public/core/ContiguouslyStored.swift +++ b/stdlib/public/core/ContiguouslyStored.swift @@ -16,6 +16,7 @@ @usableFromInline internal protocol _HasContiguousBytes { + @safe func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R @@ -43,38 +44,42 @@ extension Array: _HasContiguousBytes { extension ContiguousArray: _HasContiguousBytes {} extension UnsafeBufferPointer: _HasContiguousBytes { @inlinable @inline(__always) + @safe func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { - let ptr = UnsafeRawPointer(self.baseAddress) + let ptr = unsafe UnsafeRawPointer(self.baseAddress) let len = self.count &* MemoryLayout.stride - return try body(UnsafeRawBufferPointer(start: ptr, count: len)) + return try unsafe body(UnsafeRawBufferPointer(start: ptr, count: len)) } } extension UnsafeMutableBufferPointer: _HasContiguousBytes { @inlinable @inline(__always) + @safe func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { let ptr = UnsafeRawPointer(self.baseAddress) let len = self.count &* MemoryLayout.stride - return try body(UnsafeRawBufferPointer(start: ptr, count: len)) + return try unsafe body(UnsafeRawBufferPointer(start: ptr, count: len)) } } extension UnsafeRawBufferPointer: _HasContiguousBytes { @inlinable @inline(__always) + @safe func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { - return try body(self) + return try unsafe body(self) } } extension UnsafeMutableRawBufferPointer: _HasContiguousBytes { @inlinable @inline(__always) + @safe func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { - return try body(UnsafeRawBufferPointer(self)) + return try unsafe body(UnsafeRawBufferPointer(self)) } } extension String: _HasContiguousBytes { @@ -84,11 +89,12 @@ extension String: _HasContiguousBytes { } @inlinable @inline(__always) + @safe internal func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { var copy = self - return try copy.withUTF8 { return try body(UnsafeRawBufferPointer($0)) } + return try copy.withUTF8 { return try unsafe body(UnsafeRawBufferPointer($0)) } } } extension Substring: _HasContiguousBytes { @@ -98,10 +104,11 @@ extension Substring: _HasContiguousBytes { } @inlinable @inline(__always) + @safe internal func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> R ) rethrows -> R { var copy = self - return try copy.withUTF8 { return try body(UnsafeRawBufferPointer($0)) } + return try copy.withUTF8 { return try unsafe body(UnsafeRawBufferPointer($0)) } } } diff --git a/stdlib/public/core/DebuggerSupport.swift b/stdlib/public/core/DebuggerSupport.swift index 0910fe2206497..62a3fd5723675 100644 --- a/stdlib/public/core/DebuggerSupport.swift +++ b/stdlib/public/core/DebuggerSupport.swift @@ -130,7 +130,7 @@ public enum _DebuggerSupport { private static func asObjectAddress(_ value: Any) -> String { let address = checkValue(value, - ifClass: { return unsafeBitCast($0, to: Int.self) }, + ifClass: { return unsafe unsafeBitCast($0, to: Int.self) }, otherwise: { return 0 }) return String(address, radix: 16, uppercase: false) } @@ -344,8 +344,8 @@ internal func _withHeapObject( _ body: (UnsafeMutableRawPointer) -> R ) -> R { defer { _fixLifetime(object) } - let unmanaged = Unmanaged.passUnretained(object) - return body(unmanaged.toOpaque()) + let unmanaged = unsafe Unmanaged.passUnretained(object) + return unsafe body(unmanaged.toOpaque()) } @_extern(c, "swift_retainCount") @usableFromInline @@ -358,18 +358,18 @@ internal func _swift_weakRetainCount(_: UnsafeMutableRawPointer) -> Int // Utilities to get refcount(s) of class objects. @_alwaysEmitIntoClient public func _getRetainCount(_ object: AnyObject) -> UInt { - let count = _withHeapObject(of: object) { _swift_retainCount($0) } + let count = unsafe _withHeapObject(of: object) { unsafe _swift_retainCount($0) } return UInt(bitPattern: count) } @_alwaysEmitIntoClient public func _getUnownedRetainCount(_ object: AnyObject) -> UInt { - let count = _withHeapObject(of: object) { _swift_unownedRetainCount($0) } + let count = unsafe _withHeapObject(of: object) { unsafe _swift_unownedRetainCount($0) } return UInt(bitPattern: count) } @_alwaysEmitIntoClient public func _getWeakRetainCount(_ object: AnyObject) -> UInt { - let count = _withHeapObject(of: object) { _swift_weakRetainCount($0) } + let count = unsafe _withHeapObject(of: object) { unsafe _swift_weakRetainCount($0) } return UInt(bitPattern: count) } diff --git a/stdlib/public/core/Dictionary.swift b/stdlib/public/core/Dictionary.swift index 3553b28bcdeb7..83d514aa90ac4 100644 --- a/stdlib/public/core/Dictionary.swift +++ b/stdlib/public/core/Dictionary.swift @@ -910,9 +910,9 @@ extension Dictionary { let value = defaultValue() native._insert(at: bucket, key: key, value: value) } - let address = native._values + bucket.offset + let address = unsafe native._values + bucket.offset defer { _fixLifetime(self) } - yield &address.pointee + yield unsafe &address.pointee } } @@ -1388,7 +1388,7 @@ extension Dictionary { return true } #else - if lhs._variant.asNative._storage === rhs._variant.asNative._storage { + if unsafe lhs._variant.asNative._storage === rhs._variant.asNative._storage { return true } #endif @@ -1459,9 +1459,9 @@ extension Dictionary { _modify { let native = _variant.ensureUniqueNative() let bucket = native.validatedBucket(for: position) - let address = native._values + bucket.offset + let address = unsafe native._values + bucket.offset defer { _fixLifetime(self) } - yield &address.pointee + yield unsafe &address.pointee } } @@ -1685,7 +1685,7 @@ internal struct _DictionaryAnyHashableBox into result: UnsafeMutablePointer ) -> Bool { guard let value = _value as? T else { return false } - result.initialize(to: value) + unsafe result.initialize(to: value) return true } } @@ -1773,6 +1773,7 @@ extension Dictionary { @frozen @usableFromInline + @safe internal enum _Variant { case native(_HashTable.Index) #if _runtime(_ObjC) @@ -1847,7 +1848,7 @@ extension Dictionary.Index { internal var _asNative: _HashTable.Index { switch _variant { case .native(let nativeIndex): - return nativeIndex + return unsafe nativeIndex #if _runtime(_ObjC) case .cocoa: _preconditionFailure( @@ -1874,7 +1875,7 @@ extension Dictionary.Index { _preconditionFailure( "Attempting to access Dictionary elements using an invalid index") } - let dummy = _HashTable.Index(bucket: _HashTable.Bucket(offset: 0), age: 0) + let dummy = unsafe _HashTable.Index(bucket: _HashTable.Bucket(offset: 0), age: 0) _variant = .native(dummy) defer { _variant = .cocoa(cocoa) } yield &cocoa @@ -1891,7 +1892,7 @@ extension Dictionary.Index: Equatable { ) -> Bool { switch (lhs._variant, rhs._variant) { case (.native(let lhsNative), .native(let rhsNative)): - return lhsNative == rhsNative + return unsafe lhsNative == rhsNative #if _runtime(_ObjC) case (.cocoa(let lhsCocoa), .cocoa(let rhsCocoa)): lhs._cocoaPath() @@ -1911,7 +1912,7 @@ extension Dictionary.Index: Comparable { ) -> Bool { switch (lhs._variant, rhs._variant) { case (.native(let lhsNative), .native(let rhsNative)): - return lhsNative < rhsNative + return unsafe lhsNative < rhsNative #if _runtime(_ObjC) case (.cocoa(let lhsCocoa), .cocoa(let rhsCocoa)): lhs._cocoaPath() @@ -1933,9 +1934,9 @@ extension Dictionary.Index: Hashable { return } hasher.combine(0 as UInt8) - hasher.combine(_asNative.bucket.offset) + unsafe hasher.combine(_asNative.bucket.offset) #else - hasher.combine(_asNative.bucket.offset) + unsafe hasher.combine(_asNative.bucket.offset) #endif } } diff --git a/stdlib/public/core/DictionaryBridging.swift b/stdlib/public/core/DictionaryBridging.swift index 5d58267e4f3e4..8008276d185bb 100644 --- a/stdlib/public/core/DictionaryBridging.swift +++ b/stdlib/public/core/DictionaryBridging.swift @@ -19,10 +19,10 @@ import SwiftShims internal func _stdlib_NSDictionary_allKeys( _ object: AnyObject ) -> _BridgingBuffer { - let nsd = unsafeBitCast(object, to: _NSDictionary.self) + let nsd = unsafe unsafeBitCast(object, to: _NSDictionary.self) let count = nsd.count let storage = _BridgingBuffer(count) - nsd.getObjects(nil, andKeys: storage.baseAddress, count: count) + unsafe nsd.getObjects(nil, andKeys: storage.baseAddress, count: count) return storage } @@ -36,11 +36,11 @@ extension _NativeDictionary { // Bridging // Temporary var for SOME type safety. let nsDictionary: _NSDictionaryCore - if _storage === __RawDictionaryStorage.empty || count == 0 { - nsDictionary = __RawDictionaryStorage.empty + if unsafe _storage === __RawDictionaryStorage.empty || count == 0 { + unsafe nsDictionary = __RawDictionaryStorage.empty } else if _isBridgedVerbatimToObjectiveC(Key.self), _isBridgedVerbatimToObjectiveC(Value.self) { - nsDictionary = unsafeDowncast( + unsafe nsDictionary = unsafeDowncast( _storage, to: _DictionaryStorage.self) } else { @@ -53,6 +53,7 @@ extension _NativeDictionary { // Bridging /// An NSEnumerator that works with any _NativeDictionary of /// verbatim bridgeable elements. Used by the various NSDictionary impls. +@safe final internal class _SwiftDictionaryNSEnumerator : __SwiftNativeNSEnumerator, _NSEnumerator { @@ -71,8 +72,8 @@ final internal class _SwiftDictionaryNSEnumerator _internalInvariant(_orphanedFoundationSubclassesReparented) self.base = base self.bridgedKeys = nil - self.nextBucket = base.hashTable.startBucket - self.endBucket = base.hashTable.endBucket + self.nextBucket = unsafe base.hashTable.startBucket + self.endBucket = unsafe base.hashTable.endBucket super.init() } @@ -81,27 +82,27 @@ final internal class _SwiftDictionaryNSEnumerator _internalInvariant(!_isBridgedVerbatimToObjectiveC(Key.self)) _internalInvariant(_orphanedFoundationSubclassesReparented) self.base = deferred.native - self.bridgedKeys = deferred.bridgeKeys() - self.nextBucket = base.hashTable.startBucket - self.endBucket = base.hashTable.endBucket + self.bridgedKeys = unsafe deferred.bridgeKeys() + self.nextBucket = unsafe base.hashTable.startBucket + self.endBucket = unsafe base.hashTable.endBucket super.init() } private func bridgedKey(at bucket: _HashTable.Bucket) -> AnyObject { - _internalInvariant(base.hashTable.isOccupied(bucket)) + unsafe _internalInvariant(base.hashTable.isOccupied(bucket)) if let bridgedKeys = self.bridgedKeys { - return bridgedKeys[bucket] + return unsafe bridgedKeys[bucket] } return _bridgeAnythingToObjectiveC(base.uncheckedKey(at: bucket)) } @objc internal func nextObject() -> AnyObject? { - if nextBucket == endBucket { + if unsafe nextBucket == endBucket { return nil } let bucket = nextBucket - nextBucket = base.hashTable.occupiedBucket(after: nextBucket) + nextBucket = unsafe base.hashTable.occupiedBucket(after: nextBucket) return self.bridgedKey(at: bucket) } @@ -111,24 +112,24 @@ final internal class _SwiftDictionaryNSEnumerator objects: UnsafeMutablePointer, count: Int ) -> Int { - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr } - if nextBucket == endBucket { - state.pointee = theState + if unsafe nextBucket == endBucket { + unsafe state.pointee = theState return 0 } // Return only a single element so that code can start iterating via fast // enumeration, terminate it, and continue via NSEnumerator. - let unmanagedObjects = _UnmanagedAnyObjectArray(objects) - unmanagedObjects[0] = self.bridgedKey(at: nextBucket) - nextBucket = base.hashTable.occupiedBucket(after: nextBucket) - state.pointee = theState + let unmanagedObjects = unsafe _UnmanagedAnyObjectArray(objects) + unsafe unmanagedObjects[0] = self.bridgedKey(at: nextBucket) + nextBucket = unsafe base.hashTable.occupiedBucket(after: nextBucket) + unsafe state.pointee = theState return 1 } } @@ -180,82 +181,82 @@ final internal class _SwiftDeferredNSDictionary @nonobjc private var _bridgedKeysPtr: UnsafeMutablePointer { - return _getUnsafePointerToStoredProperties(self) + return unsafe _getUnsafePointerToStoredProperties(self) .assumingMemoryBound(to: Optional.self) } @nonobjc private var _bridgedValuesPtr: UnsafeMutablePointer { - return _bridgedKeysPtr + 1 + return unsafe _bridgedKeysPtr + 1 } /// The buffer for bridged keys, if present. @nonobjc private var _bridgedKeys: __BridgingHashBuffer? { - guard let ref = _stdlib_atomicLoadARCRef(object: _bridgedKeysPtr) else { + guard let ref = unsafe _stdlib_atomicLoadARCRef(object: _bridgedKeysPtr) else { return nil } - return unsafeDowncast(ref, to: __BridgingHashBuffer.self) + return unsafe unsafeDowncast(ref, to: __BridgingHashBuffer.self) } /// The buffer for bridged values, if present. @nonobjc private var _bridgedValues: __BridgingHashBuffer? { - guard let ref = _stdlib_atomicLoadARCRef(object: _bridgedValuesPtr) else { + guard let ref = unsafe _stdlib_atomicLoadARCRef(object: _bridgedValuesPtr) else { return nil } - return unsafeDowncast(ref, to: __BridgingHashBuffer.self) + return unsafe unsafeDowncast(ref, to: __BridgingHashBuffer.self) } /// Attach a buffer for bridged Dictionary keys. @nonobjc private func _initializeBridgedKeys(_ storage: __BridgingHashBuffer) { - _stdlib_atomicInitializeARCRef(object: _bridgedKeysPtr, desired: storage) + unsafe _stdlib_atomicInitializeARCRef(object: _bridgedKeysPtr, desired: storage) } /// Attach a buffer for bridged Dictionary values. @nonobjc private func _initializeBridgedValues(_ storage: __BridgingHashBuffer) { - _stdlib_atomicInitializeARCRef(object: _bridgedValuesPtr, desired: storage) + unsafe _stdlib_atomicInitializeARCRef(object: _bridgedValuesPtr, desired: storage) } @nonobjc internal func bridgeKeys() -> __BridgingHashBuffer? { if _isBridgedVerbatimToObjectiveC(Key.self) { return nil } - if let bridgedKeys = _bridgedKeys { return bridgedKeys } + if let bridgedKeys = unsafe _bridgedKeys { return unsafe bridgedKeys } // Allocate and initialize heap storage for bridged keys. - let bridged = __BridgingHashBuffer.allocate( + let bridged = unsafe __BridgingHashBuffer.allocate( owner: native._storage, hashTable: native.hashTable) - for bucket in native.hashTable { + for unsafe bucket in unsafe native.hashTable { let object = _bridgeAnythingToObjectiveC(native.uncheckedKey(at: bucket)) - bridged.initialize(at: bucket, to: object) + unsafe bridged.initialize(at: bucket, to: object) } // Atomically put the bridged keys in place. - _initializeBridgedKeys(bridged) - return _bridgedKeys! + unsafe _initializeBridgedKeys(bridged) + return unsafe _bridgedKeys! } @nonobjc internal func bridgeValues() -> __BridgingHashBuffer? { if _isBridgedVerbatimToObjectiveC(Value.self) { return nil } - if let bridgedValues = _bridgedValues { return bridgedValues } + if let bridgedValues = unsafe _bridgedValues { return unsafe bridgedValues } // Allocate and initialize heap storage for bridged values. - let bridged = __BridgingHashBuffer.allocate( + let bridged = unsafe __BridgingHashBuffer.allocate( owner: native._storage, hashTable: native.hashTable) - for bucket in native.hashTable { + for unsafe bucket in unsafe native.hashTable { let value = native.uncheckedValue(at: bucket) let cocoaValue = _bridgeAnythingToObjectiveC(value) - bridged.initialize(at: bucket, to: cocoaValue) + unsafe bridged.initialize(at: bucket, to: cocoaValue) } // Atomically put the bridged values in place. - _initializeBridgedValues(bridged) - return _bridgedValues! + unsafe _initializeBridgedValues(bridged) + return unsafe _bridgedValues! } @objc(copyWithZone:) @@ -270,8 +271,8 @@ final internal class _SwiftDeferredNSDictionary at bucket: Bucket, bridgedKeys: __BridgingHashBuffer? ) -> AnyObject { - if let bridgedKeys = bridgedKeys { - return bridgedKeys[bucket] + if let bridgedKeys = unsafe bridgedKeys { + return unsafe bridgedKeys[bucket] } return _bridgeAnythingToObjectiveC(native.uncheckedKey(at: bucket)) } @@ -281,8 +282,8 @@ final internal class _SwiftDeferredNSDictionary at bucket: Bucket, bridgedValues: __BridgingHashBuffer? ) -> AnyObject { - if let bridgedValues = bridgedValues { - return bridgedValues[bucket] + if let bridgedValues = unsafe bridgedValues { + return unsafe bridgedValues[bucket] } return _bridgeAnythingToObjectiveC(native.uncheckedValue(at: bucket)) } @@ -294,7 +295,7 @@ final internal class _SwiftDeferredNSDictionary let (bucket, found) = native.find(nativeKey) guard found else { return nil } - return _value(at: bucket, bridgedValues: bridgeValues()) + return unsafe _value(at: bucket, bridgedValues: bridgeValues()) } @objc @@ -313,29 +314,29 @@ final internal class _SwiftDeferredNSDictionary ) { _precondition(count >= 0, "Invalid count") guard count > 0 else { return } - let bridgedKeys = bridgeKeys() - let bridgedValues = bridgeValues() + let bridgedKeys = unsafe bridgeKeys() + let bridgedValues = unsafe bridgeValues() var i = 0 // Current position in the output buffers defer { _fixLifetime(self) } - switch (_UnmanagedAnyObjectArray(keys), _UnmanagedAnyObjectArray(objects)) { + switch unsafe (_UnmanagedAnyObjectArray(keys), _UnmanagedAnyObjectArray(objects)) { case (let unmanagedKeys?, let unmanagedObjects?): - for bucket in native.hashTable { - unmanagedKeys[i] = _key(at: bucket, bridgedKeys: bridgedKeys) - unmanagedObjects[i] = _value(at: bucket, bridgedValues: bridgedValues) + for unsafe bucket in unsafe native.hashTable { + unsafe unmanagedKeys[i] = unsafe _key(at: bucket, bridgedKeys: bridgedKeys) + unsafe unmanagedObjects[i] = unsafe _value(at: bucket, bridgedValues: bridgedValues) i += 1 guard i < count else { break } } case (let unmanagedKeys?, nil): - for bucket in native.hashTable { - unmanagedKeys[i] = _key(at: bucket, bridgedKeys: bridgedKeys) + for unsafe bucket in unsafe native.hashTable { + unsafe unmanagedKeys[i] = unsafe _key(at: bucket, bridgedKeys: bridgedKeys) i += 1 guard i < count else { break } } case (nil, let unmanagedObjects?): - for bucket in native.hashTable { - unmanagedObjects[i] = _value(at: bucket, bridgedValues: bridgedValues) + for unsafe bucket in unsafe native.hashTable { + unsafe unmanagedObjects[i] = unsafe _value(at: bucket, bridgedValues: bridgedValues) i += 1 guard i < count else { break } } @@ -353,16 +354,16 @@ final internal class _SwiftDeferredNSDictionary Unmanaged, UnsafeMutablePointer ) -> Void) { - let bridgedKeys = bridgeKeys() - let bridgedValues = bridgeValues() + let bridgedKeys = unsafe bridgeKeys() + let bridgedValues = unsafe bridgeValues() defer { _fixLifetime(self) } var stop: UInt8 = 0 - for bucket in native.hashTable { - let key = _key(at: bucket, bridgedKeys: bridgedKeys) - let value = _value(at: bucket, bridgedValues: bridgedValues) - block( + for bucket in unsafe native.hashTable { + let key = unsafe _key(at: bucket, bridgedKeys: bridgedKeys) + let value = unsafe _value(at: bucket, bridgedValues: bridgedValues) + unsafe block( Unmanaged.passUnretained(key), Unmanaged.passUnretained(value), &stop) @@ -384,39 +385,39 @@ final internal class _SwiftDeferredNSDictionary defer { _fixLifetime(self) } let hashTable = native.hashTable - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr - theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + unsafe theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) } // Test 'objects' rather than 'count' because (a) this is very rare anyway, // and (b) the optimizer should then be able to optimize away the // unwrapping check below. - if _slowPath(objects == nil) { + if unsafe _slowPath(objects == nil) { return 0 } - let unmanagedObjects = _UnmanagedAnyObjectArray(objects!) - var bucket = _HashTable.Bucket(offset: Int(theState.extra.0)) - let endBucket = hashTable.endBucket - _precondition(bucket == endBucket || hashTable.isOccupied(bucket), + let unmanagedObjects = unsafe _UnmanagedAnyObjectArray(objects!) + var bucket = unsafe _HashTable.Bucket(offset: Int(theState.extra.0)) + let endBucket = unsafe hashTable.endBucket + unsafe _precondition(bucket == endBucket || hashTable.isOccupied(bucket), "Invalid fast enumeration state") var stored = 0 // Only need to bridge once, so we can hoist it out of the loop. - let bridgedKeys = bridgeKeys() + let bridgedKeys = unsafe bridgeKeys() for i in 0.. Bool { - let nsd = unsafeBitCast(object, to: _NSDictionary.self) + let nsd = unsafe unsafeBitCast(object, to: _NSDictionary.self) return nsd.object(forKey: key) != nil } @usableFromInline internal func lookup(_ key: Key) -> Value? { - let nsd = unsafeBitCast(object, to: _NSDictionary.self) + let nsd = unsafe unsafeBitCast(object, to: _NSDictionary.self) return nsd.object(forKey: key) } @@ -536,7 +537,7 @@ extension __CocoaDictionary: _DictionaryBuffer { internal func lookup(_ index: Index) -> (key: Key, value: Value) { _precondition(index.storage.base.object === self.object, "Invalid index") let key: Key = index.storage.allKeys[index._offset] - let value: Value = index.storage.base.object.object(forKey: key)! + let value: Value = unsafe index.storage.base.object.object(forKey: key)! return (key, value) } @@ -552,7 +553,7 @@ extension __CocoaDictionary: _DictionaryBuffer { func value(at index: Index) -> Value { _precondition(index.storage.base.object === self.object, "Invalid index") let key = index.storage.allKeys[index._offset] - return index.storage.base.object.object(forKey: key)! + return unsafe index.storage.base.object.object(forKey: key)! } } @@ -582,7 +583,7 @@ extension __CocoaDictionary { @inline(__always) get { let storage = _bridgeObject(toNative: _storage) - return unsafeDowncast(storage, to: Storage.self) + return unsafe unsafeDowncast(storage, to: Storage.self) } } @@ -626,7 +627,7 @@ extension __CocoaDictionary.Index { internal var handleBitPattern: UInt { @_effects(readonly) get { - return unsafeBitCast(storage, to: UInt.self) + return unsafe unsafeBitCast(storage, to: UInt.self) } } @@ -656,7 +657,7 @@ extension __CocoaDictionary.Index { internal var age: Int32 { @_effects(readonly) get { - return _HashTable.age(for: storage.base.object) + return unsafe _HashTable.age(for: storage.base.object) } } } @@ -688,6 +689,7 @@ extension __CocoaDictionary.Index: Comparable { } extension __CocoaDictionary: Sequence { + @safe @usableFromInline final internal class Iterator { // Cocoa Dictionary iterator has to be a class, otherwise we cannot @@ -697,23 +699,23 @@ extension __CocoaDictionary: Sequence { // This stored property should be stored at offset zero. There's code below // relying on this. internal var _fastEnumerationState: _SwiftNSFastEnumerationState = - _makeSwiftNSFastEnumerationState() + unsafe _makeSwiftNSFastEnumerationState() // This stored property should be stored right after // `_fastEnumerationState`. There's code below relying on this. - internal var _fastEnumerationStackBuf = _CocoaFastEnumerationStackBuf() + internal var _fastEnumerationStackBuf = unsafe _CocoaFastEnumerationStackBuf() internal let base: __CocoaDictionary internal var _fastEnumerationStatePtr: UnsafeMutablePointer<_SwiftNSFastEnumerationState> { - return _getUnsafePointerToStoredProperties(self).assumingMemoryBound( + return unsafe _getUnsafePointerToStoredProperties(self).assumingMemoryBound( to: _SwiftNSFastEnumerationState.self) } internal var _fastEnumerationStackBufPtr: UnsafeMutablePointer<_CocoaFastEnumerationStackBuf> { - return UnsafeMutableRawPointer(_fastEnumerationStatePtr + 1) + return unsafe UnsafeMutableRawPointer(_fastEnumerationStatePtr + 1) .assumingMemoryBound(to: _CocoaFastEnumerationStackBuf.self) } @@ -750,12 +752,12 @@ extension __CocoaDictionary.Iterator: IteratorProtocol { } let base = self.base if itemIndex == itemCount { - let stackBufCount = _fastEnumerationStackBuf.count + let stackBufCount = unsafe _fastEnumerationStackBuf.count // We can't use `withUnsafeMutablePointer` here to get pointers to // properties, because doing so might introduce a writeback storage, but // fast enumeration relies on the pointer identity of the enumeration // state struct. - itemCount = base.object.countByEnumerating( + itemCount = unsafe base.object.countByEnumerating( with: _fastEnumerationStatePtr, objects: UnsafeMutableRawPointer(_fastEnumerationStackBufPtr) .assumingMemoryBound(to: AnyObject.self), @@ -767,10 +769,10 @@ extension __CocoaDictionary.Iterator: IteratorProtocol { itemIndex = 0 } let itemsPtrUP = - UnsafeMutableRawPointer(_fastEnumerationState.itemsPtr!) + unsafe UnsafeMutableRawPointer(_fastEnumerationState.itemsPtr!) .assumingMemoryBound(to: AnyObject.self) - let itemsPtr = _UnmanagedAnyObjectArray(itemsPtrUP) - let key: AnyObject = itemsPtr[itemIndex] + let itemsPtr = unsafe _UnmanagedAnyObjectArray(itemsPtrUP) + let key: AnyObject = unsafe itemsPtr[itemIndex] itemIndex += 1 return key } @@ -778,7 +780,7 @@ extension __CocoaDictionary.Iterator: IteratorProtocol { @usableFromInline internal func next() -> Element? { guard let key = nextKey() else { return nil } - let value: AnyObject = base.object.object(forKey: key)! + let value: AnyObject = unsafe base.object.object(forKey: key)! return (key, value) } } @@ -806,11 +808,11 @@ extension Dictionary { return Dictionary(_native: deferred.native) } - if let nativeStorage = s as? _DictionaryStorage { + if let nativeStorage = unsafe s as? _DictionaryStorage { return Dictionary(_native: _NativeDictionary(nativeStorage)) } - if s === __RawDictionaryStorage.empty { + if unsafe s === __RawDictionaryStorage.empty { return Dictionary() } diff --git a/stdlib/public/core/DictionaryBuilder.swift b/stdlib/public/core/DictionaryBuilder.swift index 282c44a0d1d51..bd1238244471e 100644 --- a/stdlib/public/core/DictionaryBuilder.swift +++ b/stdlib/public/core/DictionaryBuilder.swift @@ -106,18 +106,18 @@ extension _NativeDictionary { // If the capacity is 0, then our storage is the empty singleton. Those are // read only, so we shouldn't attempt to write to them. if capacity == 0 { - let c = initializer( + let c = unsafe initializer( UnsafeMutableBufferPointer(start: nil, count: 0), UnsafeMutableBufferPointer(start: nil, count: 0)) _precondition(c == 0) return } - let initializedCount = initializer( + let initializedCount = unsafe initializer( UnsafeMutableBufferPointer(start: _keys, count: capacity), UnsafeMutableBufferPointer(start: _values, count: capacity)) _precondition(initializedCount >= 0 && initializedCount <= capacity) - _storage._count = initializedCount + unsafe _storage._count = initializedCount // Hash initialized elements and move each of them into their correct // buckets. @@ -136,42 +136,42 @@ extension _NativeDictionary { // Each iteration of the loop below processes an unprocessed element, and/or // reduces the size of the unprocessed region, while ensuring the above // invariants. - var bucket = _HashTable.Bucket(offset: initializedCount - 1) - while bucket.offset >= 0 { - if hashTable._isOccupied(bucket) { + var bucket = unsafe _HashTable.Bucket(offset: initializedCount - 1) + while unsafe bucket.offset >= 0 { + if unsafe hashTable._isOccupied(bucket) { // We've moved an element here in a previous iteration. - bucket.offset -= 1 + unsafe bucket.offset -= 1 continue } // Find the target bucket for this entry and mark it as in use. let target: Bucket if _isDebugAssertConfiguration() || allowingDuplicates { - let (b, found) = find(_keys[bucket.offset]) + let (b, found) = unsafe find(_keys[bucket.offset]) if found { - _internalInvariant(b != bucket) + unsafe _internalInvariant(b != bucket) _precondition(allowingDuplicates, "Duplicate keys found") // Discard duplicate entry. uncheckedDestroy(at: bucket) - _storage._count -= 1 - bucket.offset -= 1 + unsafe _storage._count -= 1 + unsafe bucket.offset -= 1 continue } - hashTable.insert(b) - target = b + unsafe hashTable.insert(b) + unsafe target = unsafe b } else { - let hashValue = self.hashValue(for: _keys[bucket.offset]) - target = hashTable.insertNew(hashValue: hashValue) + let hashValue = unsafe self.hashValue(for: _keys[bucket.offset]) + unsafe target = unsafe hashTable.insertNew(hashValue: hashValue) } - if target > bucket { + if unsafe target > bucket { // The target is outside the unprocessed region. We can simply move the // entry, leaving behind an uninitialized bucket. moveEntry(from: bucket, to: target) // Restore invariants by lowering the region boundary. - bucket.offset -= 1 - } else if target == bucket { + unsafe bucket.offset -= 1 + } else if unsafe target == bucket { // Already in place. - bucket.offset -= 1 + unsafe bucket.offset -= 1 } else { // The target bucket is also in the unprocessed region. Swap the current // item into place, then try again with the swapped-in value, so that we diff --git a/stdlib/public/core/DictionaryCasting.swift b/stdlib/public/core/DictionaryCasting.swift index 6060cad086e59..349827e90f0bc 100644 --- a/stdlib/public/core/DictionaryCasting.swift +++ b/stdlib/public/core/DictionaryCasting.swift @@ -64,7 +64,7 @@ internal func _dictionaryDownCastIndirect( _ source: UnsafePointer>, _ target: UnsafeMutablePointer>) { - target.initialize(to: _dictionaryDownCast(source.pointee)) + unsafe target.initialize(to: _dictionaryDownCast(source.pointee)) } /// Implements a forced downcast. This operation should have O(1) complexity. @@ -121,8 +121,8 @@ internal func _dictionaryDownCastConditionalIndirect> ) -> Bool { if let result: Dictionary - = _dictionaryDownCastConditional(source.pointee) { - target.initialize(to: result) + = unsafe _dictionaryDownCastConditional(source.pointee) { + unsafe target.initialize(to: result) return true } return false diff --git a/stdlib/public/core/DictionaryStorage.swift b/stdlib/public/core/DictionaryStorage.swift index 3d3cfa3c1f343..ef66c68975166 100644 --- a/stdlib/public/core/DictionaryStorage.swift +++ b/stdlib/public/core/DictionaryStorage.swift @@ -22,6 +22,7 @@ import SwiftShims @_fixed_layout @usableFromInline @_objc_non_lazy_realization +@unsafe internal class __RawDictionaryStorage: __SwiftNativeNSDictionary { // NOTE: The precise layout of this type is relied on in the runtime to // provide a statically allocated empty singleton. See @@ -87,15 +88,15 @@ internal class __RawDictionaryStorage: __SwiftNativeNSDictionary { @inlinable @nonobjc internal final var _bucketCount: Int { - @inline(__always) get { return 1 &<< _scale } + @inline(__always) get { return unsafe 1 &<< _scale } } @inlinable @nonobjc internal final var _metadata: UnsafeMutablePointer<_HashTable.Word> { @inline(__always) get { - let address = Builtin.projectTailElems(self, _HashTable.Word.self) - return UnsafeMutablePointer(address) + let address = unsafe Builtin.projectTailElems(self, _HashTable.Word.self) + return unsafe UnsafeMutablePointer(address) } } @@ -105,7 +106,7 @@ internal class __RawDictionaryStorage: __SwiftNativeNSDictionary { @nonobjc internal final var _hashTable: _HashTable { @inline(__always) get { - return _HashTable(words: _metadata, bucketCount: _bucketCount) + return unsafe _HashTable(words: _metadata, bucketCount: _bucketCount) } } } @@ -115,7 +116,7 @@ internal class __RawDictionaryStorage: __SwiftNativeNSDictionary { // NOTE: older runtimes called this class _EmptyDictionarySingleton. // The two must coexist without a conflicting ObjC class name, so it was // renamed. The old name must not be used in the new runtime. -@_fixed_layout +@unsafe @_fixed_layout @usableFromInline @_objc_non_lazy_realization internal class __EmptyDictionarySingleton: __RawDictionaryStorage { @@ -137,10 +138,10 @@ internal class __EmptyDictionarySingleton: __RawDictionaryStorage { } #if _runtime(_ObjC) -extension __EmptyDictionarySingleton: _NSDictionaryCore { +extension __EmptyDictionarySingleton: @unsafe _NSDictionaryCore { @objc(copyWithZone:) internal func copy(with zone: _SwiftNSZone?) -> AnyObject { - return self + return unsafe self } @objc @@ -156,13 +157,13 @@ extension __EmptyDictionarySingleton: _NSDictionaryCore { // Even though we never do anything in here, we need to update the // state so that callers know we actually ran. - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr } - state.pointee = theState + unsafe state.pointee = theState return 0 } @@ -194,6 +195,7 @@ extension __EmptyDictionarySingleton: _NSDictionaryCore { // // TODO: We should figure out how to make this a constant so that it's placed in // non-writable memory (can't be a let, Builtin.addressof below requires a var). +@unsafe public var _swiftEmptyDictionarySingleton: (Int, Int, Int, Int, UInt8, UInt8, UInt16, UInt32, Int, Int, Int, Int) = ( /*isa*/0, /*refcount*/-1, // HeapObject header @@ -217,43 +219,43 @@ extension __RawDictionaryStorage { @inlinable @nonobjc internal static var empty: __EmptyDictionarySingleton { - return Builtin.bridgeFromRawPointer( + return unsafe Builtin.bridgeFromRawPointer( Builtin.addressof(&_swiftEmptyDictionarySingleton)) } @_alwaysEmitIntoClient @inline(__always) internal final func uncheckedKey(at bucket: _HashTable.Bucket) -> Key { - defer { _fixLifetime(self) } - _internalInvariant(_hashTable.isOccupied(bucket)) - let keys = _rawKeys.assumingMemoryBound(to: Key.self) - return keys[bucket.offset] + defer { unsafe _fixLifetime(self) } + unsafe _internalInvariant(_hashTable.isOccupied(bucket)) + let keys = unsafe _rawKeys.assumingMemoryBound(to: Key.self) + return unsafe keys[bucket.offset] } @_alwaysEmitIntoClient @inline(never) internal final func find(_ key: Key) -> (bucket: _HashTable.Bucket, found: Bool) { - return find(key, hashValue: key._rawHashValue(seed: _seed)) + return unsafe find(key, hashValue: key._rawHashValue(seed: _seed)) } @_alwaysEmitIntoClient @inline(never) internal final func find(_ key: Key, hashValue: Int) -> (bucket: _HashTable.Bucket, found: Bool) { - let hashTable = _hashTable - var bucket = hashTable.idealBucket(forHashValue: hashValue) - while hashTable._isOccupied(bucket) { - if uncheckedKey(at: bucket) == key { - return (bucket, true) + let hashTable = unsafe _hashTable + var bucket = unsafe hashTable.idealBucket(forHashValue: hashValue) + while unsafe hashTable._isOccupied(bucket) { + if unsafe uncheckedKey(at: bucket) == key { + return unsafe (bucket, true) } - bucket = hashTable.bucket(wrappedAfter: bucket) + unsafe bucket = unsafe hashTable.bucket(wrappedAfter: bucket) } - return (bucket, false) + return unsafe (bucket, false) } } -@usableFromInline +@unsafe @usableFromInline final internal class _DictionaryStorage - : __RawDictionaryStorage, _NSDictionaryCore { + : __RawDictionaryStorage, @unsafe _NSDictionaryCore { // This type is made with allocWithTailElems, so no init is ever called. // But we still need to have an init to satisfy the compiler. @nonobjc @@ -262,28 +264,28 @@ final internal class _DictionaryStorage } deinit { - guard _count > 0 else { return } + guard unsafe _count > 0 else { return } if !_isPOD(Key.self) { - let keys = self._keys - for bucket in _hashTable { - (keys + bucket.offset).deinitialize(count: 1) + let keys = unsafe self._keys + for unsafe bucket in unsafe _hashTable { + unsafe (keys + bucket.offset).deinitialize(count: 1) } } if !_isPOD(Value.self) { - let values = self._values - for bucket in _hashTable { - (values + bucket.offset).deinitialize(count: 1) + let values = unsafe self._values + for unsafe bucket in unsafe _hashTable { + unsafe (values + bucket.offset).deinitialize(count: 1) } } - _count = 0 - _fixLifetime(self) + unsafe _count = 0 + unsafe _fixLifetime(self) } @inlinable final internal var _keys: UnsafeMutablePointer { @inline(__always) get { - return self._rawKeys.assumingMemoryBound(to: Key.self) + return unsafe self._rawKeys.assumingMemoryBound(to: Key.self) } } @@ -291,7 +293,7 @@ final internal class _DictionaryStorage final internal var _values: UnsafeMutablePointer { @inline(__always) get { - return self._rawValues.assumingMemoryBound(to: Value.self) + return unsafe self._rawValues.assumingMemoryBound(to: Value.self) } } @@ -311,17 +313,17 @@ final internal class _DictionaryStorage @objc(copyWithZone:) internal func copy(with zone: _SwiftNSZone?) -> AnyObject { - return self + return unsafe self } @objc internal var count: Int { - return _count + return unsafe _count } @objc(keyEnumerator) internal func keyEnumerator() -> _NSEnumerator { - return _SwiftDictionaryNSEnumerator(asNative) + return unsafe _SwiftDictionaryNSEnumerator(asNative) } @objc(countByEnumeratingWithState:objects:count:) @@ -329,41 +331,41 @@ final internal class _DictionaryStorage with state: UnsafeMutablePointer<_SwiftNSFastEnumerationState>, objects: UnsafeMutablePointer?, count: Int ) -> Int { - defer { _fixLifetime(self) } - let hashTable = _hashTable - - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr - theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) + defer { unsafe _fixLifetime(self) } + let hashTable = unsafe _hashTable + + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + unsafe theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) } // Test 'objects' rather than 'count' because (a) this is very rare anyway, // and (b) the optimizer should then be able to optimize away the // unwrapping check below. - if _slowPath(objects == nil) { + if unsafe _slowPath(objects == nil) { return 0 } - let unmanagedObjects = _UnmanagedAnyObjectArray(objects!) - var bucket = _HashTable.Bucket(offset: Int(theState.extra.0)) - let endBucket = hashTable.endBucket - _precondition(bucket == endBucket || hashTable.isOccupied(bucket), + let unmanagedObjects = unsafe _UnmanagedAnyObjectArray(objects!) + var bucket = unsafe _HashTable.Bucket(offset: Int(theState.extra.0)) + let endBucket = unsafe hashTable.endBucket + unsafe _precondition(bucket == endBucket || hashTable.isOccupied(bucket), "Invalid fast enumeration state") var stored = 0 for i in 0.. guard let nativeKey = _conditionallyBridgeFromObjectiveC(aKey, Key.self) else { return nil } - let (bucket, found) = asNative.find(nativeKey) + let (bucket, found) = unsafe asNative.find(nativeKey) guard found else { return nil } - let value = asNative.uncheckedValue(at: bucket) + let value = unsafe asNative.uncheckedValue(at: bucket) return _bridgeAnythingToObjectiveC(value) } @@ -386,23 +388,23 @@ final internal class _DictionaryStorage _precondition(count >= 0, "Invalid count") guard count > 0 else { return } var i = 0 // Current position in the output buffers - switch (_UnmanagedAnyObjectArray(keys), _UnmanagedAnyObjectArray(objects)) { + switch unsafe (_UnmanagedAnyObjectArray(keys), _UnmanagedAnyObjectArray(objects)) { case (let unmanagedKeys?, let unmanagedObjects?): - for (key, value) in asNative { - unmanagedObjects[i] = _bridgeAnythingToObjectiveC(value) - unmanagedKeys[i] = _bridgeAnythingToObjectiveC(key) + for (key, value) in unsafe asNative { + unsafe unmanagedObjects[i] = _bridgeAnythingToObjectiveC(value) + unsafe unmanagedKeys[i] = _bridgeAnythingToObjectiveC(key) i += 1 guard i < count else { break } } case (let unmanagedKeys?, nil): - for (key, _) in asNative { - unmanagedKeys[i] = _bridgeAnythingToObjectiveC(key) + for (key, _) in unsafe asNative { + unsafe unmanagedKeys[i] = _bridgeAnythingToObjectiveC(key) i += 1 guard i < count else { break } } case (nil, let unmanagedObjects?): - for (_, value) in asNative { - unmanagedObjects[i] = _bridgeAnythingToObjectiveC(value) + for (_, value) in unsafe asNative { + unsafe unmanagedObjects[i] = _bridgeAnythingToObjectiveC(value) i += 1 guard i < count else { break } } @@ -420,7 +422,7 @@ extension _DictionaryStorage { internal static func copy( original: __RawDictionaryStorage ) -> _DictionaryStorage { - return allocate( + return unsafe allocate( scale: original._scale, age: original._age, seed: original._seed) @@ -433,15 +435,15 @@ extension _DictionaryStorage { capacity: Int, move: Bool ) -> _DictionaryStorage { - let scale = _HashTable.scale(forCapacity: capacity) - return allocate(scale: scale, age: nil, seed: nil) + let scale = unsafe _HashTable.scale(forCapacity: capacity) + return unsafe allocate(scale: scale, age: nil, seed: nil) } @usableFromInline @_effects(releasenone) static internal func allocate(capacity: Int) -> _DictionaryStorage { - let scale = _HashTable.scale(forCapacity: capacity) - return allocate(scale: scale, age: nil, seed: nil) + let scale = unsafe _HashTable.scale(forCapacity: capacity) + return unsafe allocate(scale: scale, age: nil, seed: nil) } #if _runtime(_ObjC) @@ -451,9 +453,9 @@ extension _DictionaryStorage { _ cocoa: __CocoaDictionary, capacity: Int ) -> _DictionaryStorage { - let scale = _HashTable.scale(forCapacity: capacity) - let age = _HashTable.age(for: cocoa.object) - return allocate(scale: scale, age: age, seed: nil) + let scale = unsafe _HashTable.scale(forCapacity: capacity) + let age = unsafe _HashTable.age(for: cocoa.object) + return unsafe allocate(scale: scale, age: age, seed: nil) } #endif @@ -467,41 +469,41 @@ extension _DictionaryStorage { _internalInvariant(scale >= 0 && scale < Int.bitWidth - 1) let bucketCount = (1 as Int) &<< scale - let wordCount = _UnsafeBitset.wordCount(forCapacity: bucketCount) - let storage = Builtin.allocWithTailElems_3( + let wordCount = unsafe _UnsafeBitset.wordCount(forCapacity: bucketCount) + let storage = unsafe Builtin.allocWithTailElems_3( _DictionaryStorage.self, wordCount._builtinWordValue, _HashTable.Word.self, bucketCount._builtinWordValue, Key.self, bucketCount._builtinWordValue, Value.self) - let metadataAddr = Builtin.projectTailElems(storage, _HashTable.Word.self) - let keysAddr = Builtin.getTailAddr_Word( + let metadataAddr = unsafe Builtin.projectTailElems(storage, _HashTable.Word.self) + let keysAddr = unsafe Builtin.getTailAddr_Word( metadataAddr, wordCount._builtinWordValue, _HashTable.Word.self, Key.self) let valuesAddr = Builtin.getTailAddr_Word( keysAddr, bucketCount._builtinWordValue, Key.self, Value.self) - storage._count = 0 - storage._capacity = _HashTable.capacity(forScale: scale) - storage._scale = scale - storage._reservedScale = 0 - storage._extra = 0 + unsafe storage._count = 0 + unsafe storage._capacity = unsafe _HashTable.capacity(forScale: scale) + unsafe storage._scale = scale + unsafe storage._reservedScale = 0 + unsafe storage._extra = 0 if let age = age { - storage._age = age + unsafe storage._age = age } else { // The default mutation count is simply a scrambled version of the storage // address. - storage._age = Int32( + unsafe storage._age = Int32( truncatingIfNeeded: ObjectIdentifier(storage).hashValue) } - storage._seed = seed ?? _HashTable.hashSeed(for: Builtin.castToNativeObject(storage), scale: scale) - storage._rawKeys = UnsafeMutableRawPointer(keysAddr) - storage._rawValues = UnsafeMutableRawPointer(valuesAddr) + unsafe storage._seed = unsafe seed ?? _HashTable.hashSeed(for: Builtin.castToNativeObject(storage), scale: scale) + unsafe storage._rawKeys = UnsafeMutableRawPointer(keysAddr) + unsafe storage._rawValues = UnsafeMutableRawPointer(valuesAddr) // Initialize hash table metadata. - storage._hashTable.clear() - return storage + unsafe storage._hashTable.clear() + return unsafe storage } } diff --git a/stdlib/public/core/DictionaryVariant.swift b/stdlib/public/core/DictionaryVariant.swift index 345029a35d870..7fd4bdb4db32f 100644 --- a/stdlib/public/core/DictionaryVariant.swift +++ b/stdlib/public/core/DictionaryVariant.swift @@ -33,6 +33,7 @@ internal protocol _DictionaryBuffer { extension Dictionary { @usableFromInline @frozen + @safe internal struct _Variant { @usableFromInline internal var object: _BridgeStorage<__RawDictionaryStorage> @@ -40,14 +41,14 @@ extension Dictionary { @inlinable @inline(__always) init(native: __owned _NativeDictionary) { - self.object = _BridgeStorage(native: native._storage) + self.object = unsafe _BridgeStorage(native: native._storage) } @inlinable @inline(__always) init(dummy: Void) { #if _pointerBitWidth(_64) && !$Embedded - self.object = _BridgeStorage(taggedPayload: 0) + self.object = unsafe _BridgeStorage(taggedPayload: 0) #elseif _pointerBitWidth(_32) || $Embedded self.init(native: _NativeDictionary()) #else @@ -59,7 +60,7 @@ extension Dictionary { @inlinable @inline(__always) init(cocoa: __owned __CocoaDictionary) { - self.object = _BridgeStorage(objC: cocoa.object) + self.object = unsafe _BridgeStorage(objC: cocoa.object) } #endif } @@ -75,7 +76,7 @@ extension Dictionary._Variant { @inlinable internal mutating func isUniquelyReferenced() -> Bool { - return object.isUniquelyReferencedUnflaggedNative() + return unsafe object.isUniquelyReferencedUnflaggedNative() } #if _runtime(_ObjC) @@ -89,15 +90,15 @@ extension Dictionary._Variant { @usableFromInline @_transparent internal var asNative: _NativeDictionary { get { - return _NativeDictionary(object.unflaggedNativeInstance) + return unsafe _NativeDictionary(object.unflaggedNativeInstance) } set { self = .init(native: newValue) } _modify { - var native = _NativeDictionary(object.unflaggedNativeInstance) + var native = unsafe _NativeDictionary(object.unflaggedNativeInstance) self = .init(dummy: ()) - defer { object = .init(native: native._storage) } + defer { object = unsafe .init(native: native._storage) } yield &native } } @@ -105,7 +106,7 @@ extension Dictionary._Variant { #if _runtime(_ObjC) @inlinable internal var asCocoa: __CocoaDictionary { - return __CocoaDictionary(object.objCInstance) + return unsafe __CocoaDictionary(object.objCInstance) } #endif @@ -323,7 +324,7 @@ extension Dictionary._Variant { cocoa, capacity: cocoa.count + 1) let result = native.mutatingFind(key, isUnique: true) self = .init(native: native) - return result + return unsafe result } #endif let isUnique = isUniquelyReferenced() diff --git a/stdlib/public/core/Diffing.swift b/stdlib/public/core/Diffing.swift index ecaa93d22a3a2..ef080c05654f7 100644 --- a/stdlib/public/core/Diffing.swift +++ b/stdlib/public/core/Diffing.swift @@ -275,7 +275,7 @@ private func _myers( y = x &- k while x < n && y < m { - if !cmp(a[x], b[y]) { + if unsafe !cmp(a[x], b[y]) { break; } x &+= 1 @@ -324,9 +324,9 @@ private func _myers( _internalInvariant((x == prev_x && y > prev_y) || (y == prev_y && x > prev_x)) if y != prev_y { - changes.append(.insert(offset: prev_y, element: b[prev_y], associatedWith: nil)) + unsafe changes.append(.insert(offset: prev_y, element: b[prev_y], associatedWith: nil)) } else { - changes.append(.remove(offset: prev_x, element: a[prev_x], associatedWith: nil)) + unsafe changes.append(.remove(offset: prev_x, element: a[prev_x], associatedWith: nil)) } x = prev_x @@ -358,9 +358,9 @@ private func _myers( return try array.withUnsafeBufferPointer(body) } - return _withContiguousStorage(for: old) { a in - return _withContiguousStorage(for: new) { b in - return CollectionDifference(_formChanges(from: a, to: b, using:_descent(from: a, to: b)))! + return unsafe _withContiguousStorage(for: old) { a in + return unsafe _withContiguousStorage(for: new) { b in + return unsafe CollectionDifference(_formChanges(from: a, to: b, using:_descent(from: a, to: b)))! } } } diff --git a/stdlib/public/core/EmbeddedPrint.swift b/stdlib/public/core/EmbeddedPrint.swift index d1163d264b94b..071d67a958d47 100644 --- a/stdlib/public/core/EmbeddedPrint.swift +++ b/stdlib/public/core/EmbeddedPrint.swift @@ -21,15 +21,15 @@ import SwiftShims func putchar(_: CInt) -> CInt public func print(_ string: StaticString, terminator: StaticString = "\n") { - var p = string.utf8Start - while p.pointee != 0 { - putchar(CInt(p.pointee)) - p += 1 + var p = unsafe string.utf8Start + while unsafe p.pointee != 0 { + putchar(CInt(unsafe p.pointee)) + unsafe p += 1 } - p = terminator.utf8Start - while p.pointee != 0 { - putchar(CInt(p.pointee)) - p += 1 + unsafe p = terminator.utf8Start + while unsafe p.pointee != 0 { + putchar(CInt(unsafe p.pointee)) + unsafe p += 1 } } @@ -37,14 +37,14 @@ public func print(_ string: StaticString, terminator: StaticString = "\n") { public func print(_ string: String, terminator: StaticString = "\n") { var string = string string.withUTF8 { buf in - for c in buf { + for unsafe c in unsafe buf { putchar(CInt(c)) } } - var p = terminator.utf8Start - while p.pointee != 0 { - putchar(CInt(p.pointee)) - p += 1 + var p = unsafe terminator.utf8Start + while unsafe p.pointee != 0 { + putchar(CInt(unsafe p.pointee)) + unsafe p += 1 } } @@ -52,25 +52,25 @@ public func print(_ string: String, terminator: StaticString = "\n") { public func print(_ object: some CustomStringConvertible, terminator: StaticString = "\n") { var string = object.description string.withUTF8 { buf in - for c in buf { + for unsafe c in unsafe buf { putchar(CInt(c)) } } - var p = terminator.utf8Start - while p.pointee != 0 { - putchar(CInt(p.pointee)) - p += 1 + var p = unsafe terminator.utf8Start + while unsafe p.pointee != 0 { + putchar(CInt(unsafe p.pointee)) + unsafe p += 1 } } func printCharacters(_ buf: UnsafeRawBufferPointer) { - for c in buf { + for unsafe c in unsafe buf { putchar(CInt(c)) } } func printCharacters(_ buf: UnsafeBufferPointer) { - printCharacters(UnsafeRawBufferPointer(buf)) + unsafe printCharacters(UnsafeRawBufferPointer(buf)) } extension BinaryInteger { @@ -81,7 +81,7 @@ extension BinaryInteger { _ uppercase: Bool ) -> Int { if self == (0 as Self) { - buffer[0] = UInt8(("0" as Unicode.Scalar).value) + unsafe buffer[0] = UInt8(("0" as Unicode.Scalar).value) return 1 } @@ -98,21 +98,21 @@ extension BinaryInteger { var index = Int(bufferLength - 1) while value != 0 { let (quotient, remainder) = value.quotientAndRemainder(dividingBy: Magnitude(radix)) - buffer[index] = _ascii(UInt8(truncatingIfNeeded: remainder)) + unsafe buffer[index] = _ascii(UInt8(truncatingIfNeeded: remainder)) index -= 1 value = quotient } if isNegative { - buffer[index] = UInt8(("-" as Unicode.Scalar).value) + unsafe buffer[index] = UInt8(("-" as Unicode.Scalar).value) index -= 1 } let start = index + 1 let end = Int(bufferLength - 1) let count = end - start + 1 - let intermediate = UnsafeBufferPointer(start: buffer.advanced(by: start), count: count) - let destination = UnsafeMutableRawBufferPointer(start: buffer, count: Int(bufferLength)) - destination.copyMemory(from: UnsafeRawBufferPointer(intermediate)) + let intermediate = unsafe UnsafeBufferPointer(start: buffer.advanced(by: start), count: count) + let destination = unsafe UnsafeMutableRawBufferPointer(start: buffer, count: Int(bufferLength)) + unsafe destination.copyMemory(from: UnsafeRawBufferPointer(intermediate)) return count } @@ -122,12 +122,12 @@ extension BinaryInteger { let byteCount = 64 let stackBuffer = Builtin.stackAlloc(byteCount._builtinWordValue, 1._builtinWordValue, 1._builtinWordValue) - let buffer = UnsafeMutableRawBufferPointer(start: .init(stackBuffer), + let buffer = unsafe UnsafeMutableRawBufferPointer(start: .init(stackBuffer), count: byteCount).baseAddress!.assumingMemoryBound(to: UInt8.self) - let count = _toStringImpl(buffer, 64, 10, false) + let count = unsafe _toStringImpl(buffer, 64, 10, false) - printCharacters(UnsafeBufferPointer(start: buffer, count: count)) + unsafe printCharacters(UnsafeBufferPointer(start: buffer, count: count)) Builtin.stackDealloc(stackBuffer) } diff --git a/stdlib/public/core/EmbeddedRuntime.swift b/stdlib/public/core/EmbeddedRuntime.swift index d80da94e88ec4..2b61e3aeaf31d 100644 --- a/stdlib/public/core/EmbeddedRuntime.swift +++ b/stdlib/public/core/EmbeddedRuntime.swift @@ -14,6 +14,7 @@ import SwiftShims /// Class object and class metadata structures +@unsafe public struct ClassMetadata { var superclassMetadata: UnsafeMutablePointer? @@ -88,7 +89,7 @@ public struct ClassMetadata { │ ║ ║ or stack-allocated instance during deinit() │ └───────────╨──────────╨─────────────────────────────────────────────────┘ */ - +@unsafe public struct HeapObject { // There is no way to express the custom ptrauth signature on the metadata // field, so let's use UnsafeRawPointer and a helper function in C instead @@ -142,10 +143,10 @@ func free(_ p: UnsafeMutableRawPointer?) /// Allocations func alignedAlloc(size: Int, alignment: Int) -> UnsafeMutableRawPointer? { - let alignment = max(alignment, MemoryLayout.size) + let alignment = max(alignment, unsafe MemoryLayout.size) var r: UnsafeMutableRawPointer? = nil - _ = posix_memalign(&r, alignment, size) - return r + _ = unsafe posix_memalign(&r, alignment, size) + return unsafe r } @_cdecl("swift_slowAlloc") @@ -156,83 +157,83 @@ public func swift_slowAlloc(_ size: Int, _ alignMask: Int) -> UnsafeMutableRawPo } else { alignment = alignMask + 1 } - return alignedAlloc(size: size, alignment: alignment) + return unsafe alignedAlloc(size: size, alignment: alignment) } @_cdecl("swift_slowDealloc") public func swift_slowDealloc(_ ptr: UnsafeMutableRawPointer, _ size: Int, _ alignMask: Int) { - free(ptr) + unsafe free(ptr) } @_cdecl("swift_allocObject") public func swift_allocObject(metadata: Builtin.RawPointer, requiredSize: Int, requiredAlignmentMask: Int) -> Builtin.RawPointer { - return swift_allocObject(metadata: UnsafeMutablePointer(metadata), requiredSize: requiredSize, requiredAlignmentMask: requiredAlignmentMask)._rawValue + return unsafe swift_allocObject(metadata: UnsafeMutablePointer(metadata), requiredSize: requiredSize, requiredAlignmentMask: requiredAlignmentMask)._rawValue } func swift_allocObject(metadata: UnsafeMutablePointer, requiredSize: Int, requiredAlignmentMask: Int) -> UnsafeMutablePointer { - let p = swift_slowAlloc(requiredSize, requiredAlignmentMask)! - let object = p.assumingMemoryBound(to: HeapObject.self) - _swift_embedded_set_heap_object_metadata_pointer(object, metadata) - object.pointee.refcount = 1 - return object + let p = unsafe swift_slowAlloc(requiredSize, requiredAlignmentMask)! + let object = unsafe p.assumingMemoryBound(to: HeapObject.self) + unsafe _swift_embedded_set_heap_object_metadata_pointer(object, metadata) + unsafe object.pointee.refcount = 1 + return unsafe object } @_cdecl("swift_deallocObject") public func swift_deallocObject(object: Builtin.RawPointer, allocatedSize: Int, allocatedAlignMask: Int) { - swift_deallocObject(object: UnsafeMutablePointer(object), allocatedSize: allocatedSize, allocatedAlignMask: allocatedAlignMask) + unsafe swift_deallocObject(object: UnsafeMutablePointer(object), allocatedSize: allocatedSize, allocatedAlignMask: allocatedAlignMask) } func swift_deallocObject(object: UnsafeMutablePointer, allocatedSize: Int, allocatedAlignMask: Int) { - free(UnsafeMutableRawPointer(object)) + unsafe free(UnsafeMutableRawPointer(object)) } @_cdecl("swift_deallocClassInstance") public func swift_deallocClassInstance(object: Builtin.RawPointer, allocatedSize: Int, allocatedAlignMask: Int) { - swift_deallocClassInstance(object: UnsafeMutablePointer(object), allocatedSize: allocatedSize, allocatedAlignMask: allocatedAlignMask) + unsafe swift_deallocClassInstance(object: UnsafeMutablePointer(object), allocatedSize: allocatedSize, allocatedAlignMask: allocatedAlignMask) } func swift_deallocClassInstance(object: UnsafeMutablePointer, allocatedSize: Int, allocatedAlignMask: Int) { - if (object.pointee.refcount & HeapObject.doNotFreeBit) != 0 { + if (unsafe object.pointee.refcount & HeapObject.doNotFreeBit) != 0 { return } - free(UnsafeMutableRawPointer(object)) + unsafe free(UnsafeMutableRawPointer(object)) } @_cdecl("swift_deallocPartialClassInstance") public func swift_deallocPartialClassInstance(object: Builtin.RawPointer, metadata: Builtin.RawPointer, allocatedSize: Int, allocatedAlignMask: Int) { - swift_deallocPartialClassInstance(object: UnsafeMutablePointer(object), metadata: UnsafeMutablePointer(metadata), allocatedSize: allocatedSize, allocatedAlignMask: allocatedAlignMask) + unsafe swift_deallocPartialClassInstance(object: UnsafeMutablePointer(object), metadata: UnsafeMutablePointer(metadata), allocatedSize: allocatedSize, allocatedAlignMask: allocatedAlignMask) } func swift_deallocPartialClassInstance(object: UnsafeMutablePointer, metadata: UnsafeMutablePointer, allocatedSize: Int, allocatedAlignMask: Int) { - var classMetadata = _swift_embedded_get_heap_object_metadata_pointer(object).assumingMemoryBound(to: ClassMetadata.self) - while classMetadata != metadata { - _swift_embedded_invoke_heap_object_optional_ivardestroyer(object, classMetadata) - guard let superclassMetadata = classMetadata.pointee.superclassMetadata else { break } - classMetadata = superclassMetadata + var classMetadata = unsafe _swift_embedded_get_heap_object_metadata_pointer(object).assumingMemoryBound(to: ClassMetadata.self) + while unsafe classMetadata != metadata { + unsafe _swift_embedded_invoke_heap_object_optional_ivardestroyer(object, classMetadata) + guard let superclassMetadata = unsafe classMetadata.pointee.superclassMetadata else { break } + unsafe classMetadata = superclassMetadata } } @_cdecl("swift_initStaticObject") public func swift_initStaticObject(metadata: Builtin.RawPointer, object: Builtin.RawPointer) -> Builtin.RawPointer { - return swift_initStaticObject(metadata: UnsafeMutablePointer(metadata), object: UnsafeMutablePointer(object))._rawValue + return unsafe swift_initStaticObject(metadata: UnsafeMutablePointer(metadata), object: UnsafeMutablePointer(object))._rawValue } func swift_initStaticObject(metadata: UnsafeMutablePointer, object: UnsafeMutablePointer) -> UnsafeMutablePointer { - _swift_embedded_set_heap_object_metadata_pointer(object, metadata) - object.pointee.refcount = HeapObject.immortalRefCount | HeapObject.doNotFreeBit - return object + unsafe _swift_embedded_set_heap_object_metadata_pointer(object, metadata) + unsafe object.pointee.refcount = HeapObject.immortalRefCount | HeapObject.doNotFreeBit + return unsafe object } @_cdecl("swift_initStackObject") public func swift_initStackObject(metadata: Builtin.RawPointer, object: Builtin.RawPointer) -> Builtin.RawPointer { - return swift_initStackObject(metadata: UnsafeMutablePointer(metadata), object: UnsafeMutablePointer(object))._rawValue + return unsafe swift_initStackObject(metadata: UnsafeMutablePointer(metadata), object: UnsafeMutablePointer(object))._rawValue } func swift_initStackObject(metadata: UnsafeMutablePointer, object: UnsafeMutablePointer) -> UnsafeMutablePointer { - _swift_embedded_set_heap_object_metadata_pointer(object, metadata) - object.pointee.refcount = 1 | HeapObject.doNotFreeBit - return object + unsafe _swift_embedded_set_heap_object_metadata_pointer(object, metadata) + unsafe object.pointee.refcount = 1 | HeapObject.doNotFreeBit + return unsafe object } @@ -244,7 +245,7 @@ func isValidPointerForNativeRetain(object: Builtin.RawPointer) -> Bool { if objectBits == 0 { return false } #if _pointerBitWidth(_64) - if (objectBits & HeapObject.immortalObjectPointerBit) != 0 { return false } + if unsafe (objectBits & HeapObject.immortalObjectPointerBit) != 0 { return false } #endif return true @@ -256,25 +257,25 @@ public func swift_setDeallocating(object: Builtin.RawPointer) { @_cdecl("swift_dynamicCastClass") public func swift_dynamicCastClass(object: UnsafeMutableRawPointer, targetMetadata: UnsafeRawPointer) -> UnsafeMutableRawPointer? { - let sourceObj = object.assumingMemoryBound(to: HeapObject.self) - var type = _swift_embedded_get_heap_object_metadata_pointer(sourceObj).assumingMemoryBound(to: ClassMetadata.self) - let targetType = targetMetadata.assumingMemoryBound(to: ClassMetadata.self) - while type != targetType { - guard let superType = type.pointee.superclassMetadata else { + let sourceObj = unsafe object.assumingMemoryBound(to: HeapObject.self) + var type = unsafe _swift_embedded_get_heap_object_metadata_pointer(sourceObj).assumingMemoryBound(to: ClassMetadata.self) + let targetType = unsafe targetMetadata.assumingMemoryBound(to: ClassMetadata.self) + while unsafe type != targetType { + guard let superType = unsafe type.pointee.superclassMetadata else { return nil } - type = UnsafeMutablePointer(superType) + unsafe type = UnsafeMutablePointer(superType) } - return object + return unsafe object } @_cdecl("swift_dynamicCastClassUnconditional") public func swift_dynamicCastClassUnconditional(object: UnsafeMutableRawPointer, targetMetadata: UnsafeRawPointer, file: UnsafePointer, line: CUnsignedInt, column: CUnsignedInt) -> UnsafeMutableRawPointer { - guard let result = swift_dynamicCastClass(object: object, targetMetadata: targetMetadata) else { + guard let result = unsafe swift_dynamicCastClass(object: object, targetMetadata: targetMetadata) else { fatalError("failed cast") } - return result + return unsafe result } @_cdecl("swift_isEscapingClosureAtFileLocation") @@ -292,25 +293,25 @@ public func swift_isEscapingClosureAtFileLocation(object: Builtin.RawPointer, fi public func swift_isUniquelyReferenced_native(object: Builtin.RawPointer) -> Bool { if !isValidPointerForNativeRetain(object: object) { return false } - return swift_isUniquelyReferenced_nonNull_native(object: UnsafeMutablePointer(object)) + return unsafe swift_isUniquelyReferenced_nonNull_native(object: UnsafeMutablePointer(object)) } @_cdecl("swift_isUniquelyReferenced_nonNull_native") public func swift_isUniquelyReferenced_nonNull_native(object: Builtin.RawPointer) -> Bool { - return swift_isUniquelyReferenced_nonNull_native(object: UnsafeMutablePointer(object)) + return unsafe swift_isUniquelyReferenced_nonNull_native(object: UnsafeMutablePointer(object)) } func swift_isUniquelyReferenced_nonNull_native(object: UnsafeMutablePointer) -> Bool { - let refcount = refcountPointer(for: object) - return loadAcquire(refcount) == 1 + let refcount = unsafe refcountPointer(for: object) + return unsafe loadAcquire(refcount) == 1 } @_cdecl("swift_retain") public func swift_retain(object: Builtin.RawPointer) -> Builtin.RawPointer { if !isValidPointerForNativeRetain(object: object) { return object } - let o = UnsafeMutablePointer(object) - return swift_retain_n_(object: o, n: 1)._rawValue + let o = unsafe UnsafeMutablePointer(object) + return unsafe swift_retain_n_(object: o, n: 1)._rawValue } // Cannot use UnsafeMutablePointer? directly in the function argument or return value as it causes IRGen crashes @@ -318,19 +319,19 @@ public func swift_retain(object: Builtin.RawPointer) -> Builtin.RawPointer { public func swift_retain_n(object: Builtin.RawPointer, n: UInt32) -> Builtin.RawPointer { if !isValidPointerForNativeRetain(object: object) { return object } - let o = UnsafeMutablePointer(object) - return swift_retain_n_(object: o, n: n)._rawValue + let o = unsafe UnsafeMutablePointer(object) + return unsafe swift_retain_n_(object: o, n: n)._rawValue } func swift_retain_n_(object: UnsafeMutablePointer, n: UInt32) -> UnsafeMutablePointer { - let refcount = refcountPointer(for: object) - if loadRelaxed(refcount) & HeapObject.refcountMask == HeapObject.immortalRefCount { - return object + let refcount = unsafe refcountPointer(for: object) + if unsafe loadRelaxed(refcount) & HeapObject.refcountMask == HeapObject.immortalRefCount { + return unsafe object } - addRelaxed(refcount, n: Int(n)) + unsafe addRelaxed(refcount, n: Int(n)) - return object + return unsafe object } @_cdecl("swift_bridgeObjectRetain") @@ -341,7 +342,7 @@ public func swift_bridgeObjectRetain(object: Builtin.RawPointer) -> Builtin.RawP @_cdecl("swift_bridgeObjectRetain_n") public func swift_bridgeObjectRetain_n(object: Builtin.RawPointer, n: UInt32) -> Builtin.RawPointer { let objectBits = UInt(Builtin.ptrtoint_Word(object)) - let untaggedObject = Builtin.inttoptr_Word((objectBits & HeapObject.bridgeObjectToPlainObjectMask)._builtinWordValue) + let untaggedObject = unsafe Builtin.inttoptr_Word((objectBits & HeapObject.bridgeObjectToPlainObjectMask)._builtinWordValue) return swift_retain_n(object: untaggedObject, n: n) } @@ -349,30 +350,30 @@ public func swift_bridgeObjectRetain_n(object: Builtin.RawPointer, n: UInt32) -> public func swift_release(object: Builtin.RawPointer) { if !isValidPointerForNativeRetain(object: object) { return } - let o = UnsafeMutablePointer(object) - swift_release_n_(object: o, n: 1) + let o = unsafe UnsafeMutablePointer(object) + unsafe swift_release_n_(object: o, n: 1) } @_cdecl("swift_release_n") public func swift_release_n(object: Builtin.RawPointer, n: UInt32) { if !isValidPointerForNativeRetain(object: object) { return } - let o = UnsafeMutablePointer(object) - swift_release_n_(object: o, n: n) + let o = unsafe UnsafeMutablePointer(object) + unsafe swift_release_n_(object: o, n: n) } func swift_release_n_(object: UnsafeMutablePointer?, n: UInt32) { - guard let object else { + guard let object = unsafe object else { return } - let refcount = refcountPointer(for: object) - let loadedRefcount = loadRelaxed(refcount) - if loadedRefcount & HeapObject.refcountMask == HeapObject.immortalRefCount { + let refcount = unsafe refcountPointer(for: object) + let loadedRefcount = unsafe loadRelaxed(refcount) + if unsafe loadedRefcount & HeapObject.refcountMask == HeapObject.immortalRefCount { return } - let resultingRefcount = subFetchAcquireRelease(refcount, n: Int(n)) & HeapObject.refcountMask + let resultingRefcount = unsafe subFetchAcquireRelease(refcount, n: Int(n)) & HeapObject.refcountMask if resultingRefcount == 0 { // Set the refcount to immortalRefCount before calling the object destroyer // to prevent future retains/releases from having any effect. Unlike the @@ -383,10 +384,10 @@ func swift_release_n_(object: UnsafeMutablePointer?, n: UInt32) { // There can only be one thread with a reference at this point (because // we're releasing the last existing reference), so a relaxed store is // enough. - let doNotFree = (loadedRefcount & HeapObject.doNotFreeBit) != 0 - storeRelaxed(refcount, newValue: HeapObject.immortalRefCount | (doNotFree ? HeapObject.doNotFreeBit : 0)) + let doNotFree = unsafe (loadedRefcount & HeapObject.doNotFreeBit) != 0 + unsafe storeRelaxed(refcount, newValue: HeapObject.immortalRefCount | (doNotFree ? HeapObject.doNotFreeBit : 0)) - _swift_embedded_invoke_heap_object_destroy(object) + unsafe _swift_embedded_invoke_heap_object_destroy(object) } else if resultingRefcount < 0 { fatalError("negative refcount") } @@ -400,23 +401,23 @@ public func swift_bridgeObjectRelease(object: Builtin.RawPointer) { @_cdecl("swift_bridgeObjectRelease_n") public func swift_bridgeObjectRelease_n(object: Builtin.RawPointer, n: UInt32) { let objectBits = UInt(Builtin.ptrtoint_Word(object)) - let untaggedObject = Builtin.inttoptr_Word((objectBits & HeapObject.bridgeObjectToPlainObjectMask)._builtinWordValue) + let untaggedObject = unsafe Builtin.inttoptr_Word((objectBits & HeapObject.bridgeObjectToPlainObjectMask)._builtinWordValue) swift_release_n(object: untaggedObject, n: n) } @_cdecl("swift_retainCount") public func swift_retainCount(object: Builtin.RawPointer) -> Int { if !isValidPointerForNativeRetain(object: object) { return 0 } - let o = UnsafeMutablePointer(object) - let refcount = refcountPointer(for: o) - return loadAcquire(refcount) & HeapObject.refcountMask + let o = unsafe UnsafeMutablePointer(object) + let refcount = unsafe refcountPointer(for: o) + return unsafe loadAcquire(refcount) & HeapObject.refcountMask } /// Refcount helpers fileprivate func refcountPointer(for object: UnsafeMutablePointer) -> UnsafeMutablePointer { // TODO: This should use MemoryLayout.offset(to: \.refcount) but we don't have KeyPaths yet - return UnsafeMutablePointer(UnsafeRawPointer(object).advanced(by: MemoryLayout.size)._rawValue) + return unsafe UnsafeMutablePointer(UnsafeRawPointer(object).advanced(by: MemoryLayout.size)._rawValue) } fileprivate func loadRelaxed(_ atomic: UnsafeMutablePointer) -> Int { @@ -468,22 +469,22 @@ public func swift_endAccess(buffer: UnsafeMutableRawPointer) { @_cdecl("swift_once") public func swift_once(predicate: UnsafeMutablePointer, fn: (@convention(c) (UnsafeMutableRawPointer)->()), context: UnsafeMutableRawPointer) { let checkedLoadAcquire = { predicate in - let value = loadAcquire(predicate) + let value = unsafe loadAcquire(predicate) assert(value == -1 || value == 0 || value == 1) return value } - if checkedLoadAcquire(predicate) < 0 { return } + if unsafe checkedLoadAcquire(predicate) < 0 { return } - let won = compareExchangeRelaxed(predicate, expectedOldValue: 0, desiredNewValue: 1) + let won = unsafe compareExchangeRelaxed(predicate, expectedOldValue: 0, desiredNewValue: 1) if won { - fn(context) - storeRelease(predicate, newValue: -1) + unsafe fn(context) + unsafe storeRelease(predicate, newValue: -1) return } // TODO: This should really use an OS provided lock - while checkedLoadAcquire(predicate) >= 0 { + while unsafe checkedLoadAcquire(predicate) >= 0 { // spin } } @@ -510,7 +511,7 @@ public func _willThrowTyped(_ error: E) { func arc4random_buf(buf: UnsafeMutableRawPointer, nbytes: Int) public func swift_stdlib_random(_ buf: UnsafeMutableRawPointer, _ nbytes: Int) { - arc4random_buf(buf: buf, nbytes: nbytes) + unsafe arc4random_buf(buf: buf, nbytes: nbytes) } @_cdecl("swift_clearSensitive") @@ -519,9 +520,9 @@ public func swift_clearSensitive(buf: UnsafeMutableRawPointer, nbytes: Int) { // TODO: use memset_s if available // Though, it shouldn't make too much difference because the `@inline(never)` should prevent // the optimizer from removing the loop below. - let bytePtr = buf.assumingMemoryBound(to: UInt8.self) + let bytePtr = unsafe buf.assumingMemoryBound(to: UInt8.self) for i in 0..( _ lhs: UnsafePointer, _ rhs: UnsafePointer ) -> Bool { - return lhs.pointee == rhs.pointee + return unsafe lhs.pointee == rhs.pointee } diff --git a/stdlib/public/core/ErrorType.swift b/stdlib/public/core/ErrorType.swift index 8f2dde03735f3..c6d76b72b0f07 100644 --- a/stdlib/public/core/ErrorType.swift +++ b/stdlib/public/core/ErrorType.swift @@ -139,25 +139,25 @@ extension Error { @_silgen_name("") internal func _getErrorDomainNSString(_ x: UnsafePointer) -> AnyObject { - return x.pointee._domain._bridgeToObjectiveCImpl() + return unsafe x.pointee._domain._bridgeToObjectiveCImpl() } @_silgen_name("") internal func _getErrorCode(_ x: UnsafePointer) -> Int { - return x.pointee._code + return unsafe x.pointee._code } @_silgen_name("") internal func _getErrorUserInfoNSDictionary(_ x: UnsafePointer) -> AnyObject? { - return x.pointee._userInfo.map { $0 } + return unsafe x.pointee._userInfo.map { $0 } } // Called by the casting machinery to extract an NSError from an Error value. @_silgen_name("") internal func _getErrorEmbeddedNSErrorIndirect( _ x: UnsafePointer) -> AnyObject? { - return x.pointee._getEmbeddedNSError() + return unsafe x.pointee._getEmbeddedNSError() } /// Called by compiler-generated code to extract an NSError from an Error value. diff --git a/stdlib/public/core/ExistentialCollection.swift b/stdlib/public/core/ExistentialCollection.swift index b3c0a8496e5bb..13d412b8704b4 100644 --- a/stdlib/public/core/ExistentialCollection.swift +++ b/stdlib/public/core/ExistentialCollection.swift @@ -566,7 +566,7 @@ internal final class _SequenceBox: _AnySequenceBox { internal override func __copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _base._copyContents(initializing: buf) + let (it,idx) = unsafe _base._copyContents(initializing: buf) return (AnyIterator(it),idx) } @@ -659,7 +659,7 @@ internal final class _CollectionBox: _AnyCollectionBox internal override func __copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _base._copyContents(initializing: buf) + let (it,idx) = unsafe _base._copyContents(initializing: buf) return (AnyIterator(it),idx) } @@ -854,7 +854,7 @@ internal final class _BidirectionalCollectionBox internal override func __copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _base._copyContents(initializing: buf) + let (it,idx) = unsafe _base._copyContents(initializing: buf) return (AnyIterator(it),idx) } @@ -1067,7 +1067,7 @@ internal final class _RandomAccessCollectionBox internal override func __copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _base._copyContents(initializing: buf) + let (it,idx) = unsafe _base._copyContents(initializing: buf) return (AnyIterator(it),idx) } @@ -1393,7 +1393,7 @@ extension AnySequence { public __consuming func _copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _box.__copyContents(initializing: buf) + let (it,idx) = unsafe _box.__copyContents(initializing: buf) return (AnyIterator(it),idx) } } @@ -1499,7 +1499,7 @@ extension AnyCollection { public __consuming func _copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _box.__copyContents(initializing: buf) + let (it,idx) = unsafe _box.__copyContents(initializing: buf) return (AnyIterator(it),idx) } } @@ -1611,7 +1611,7 @@ extension AnyBidirectionalCollection { public __consuming func _copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _box.__copyContents(initializing: buf) + let (it,idx) = unsafe _box.__copyContents(initializing: buf) return (AnyIterator(it),idx) } } @@ -1723,7 +1723,7 @@ extension AnyRandomAccessCollection { public __consuming func _copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (AnyIterator,UnsafeMutableBufferPointer.Index) { - let (it,idx) = _box.__copyContents(initializing: buf) + let (it,idx) = unsafe _box.__copyContents(initializing: buf) return (AnyIterator(it),idx) } } @@ -1755,7 +1755,7 @@ internal final class _IndexBox: _AnyIndexBox { @inlinable internal func _unsafeUnbox(_ other: _AnyIndexBox) -> BaseIndex { - return unsafeDowncast(other, to: _IndexBox.self)._base + return unsafe unsafeDowncast(other, to: _IndexBox.self)._base } @inlinable diff --git a/stdlib/public/core/FloatingPointParsing.swift.gyb b/stdlib/public/core/FloatingPointParsing.swift.gyb index 0656976c5acab..a7d5ddef667ce 100644 --- a/stdlib/public/core/FloatingPointParsing.swift.gyb +++ b/stdlib/public/core/FloatingPointParsing.swift.gyb @@ -169,9 +169,9 @@ extension ${Self}: LosslessStringConvertible { self.init(Substring(text)) } else { self = 0.0 - let success = _withUnprotectedUnsafeMutablePointer(to: &self) { p -> Bool in - text.withCString { chars -> Bool in - switch chars[0] { + let success = unsafe _withUnprotectedUnsafeMutablePointer(to: &self) { p -> Bool in + unsafe text.withCString { chars -> Bool in + switch unsafe chars[0] { case 9, 10, 11, 12, 13, 32: return false // Reject leading whitespace case 0: @@ -179,9 +179,9 @@ extension ${Self}: LosslessStringConvertible { default: break } - let endPtr = _swift_stdlib_strto${cFuncSuffix2[bits]}_clocale(chars, p) + let endPtr = unsafe _swift_stdlib_strto${cFuncSuffix2[bits]}_clocale(chars, p) // Succeed only if endPtr points to end of C string - return endPtr != nil && endPtr![0] == 0 + return unsafe endPtr != nil && endPtr![0] == 0 } } if !success { @@ -198,9 +198,9 @@ extension ${Self}: LosslessStringConvertible { @available(SwiftStdlib 5.3, *) public init?(_ text: Substring) { self = 0.0 - let success = _withUnprotectedUnsafeMutablePointer(to: &self) { p -> Bool in - text.withCString { chars -> Bool in - switch chars[0] { + let success = unsafe _withUnprotectedUnsafeMutablePointer(to: &self) { p -> Bool in + unsafe text.withCString { chars -> Bool in + switch unsafe chars[0] { case 9, 10, 11, 12, 13, 32: return false // Reject leading whitespace case 0: @@ -208,9 +208,9 @@ extension ${Self}: LosslessStringConvertible { default: break } - let endPtr = _swift_stdlib_strto${cFuncSuffix2[bits]}_clocale(chars, p) + let endPtr = unsafe _swift_stdlib_strto${cFuncSuffix2[bits]}_clocale(chars, p) // Succeed only if endPtr points to end of C string - return endPtr != nil && endPtr![0] == 0 + return unsafe endPtr != nil && endPtr![0] == 0 } } if !success { diff --git a/stdlib/public/core/FloatingPointTypes.swift.gyb b/stdlib/public/core/FloatingPointTypes.swift.gyb index bede3f8bc7729..607324a6a0dd8 100644 --- a/stdlib/public/core/FloatingPointTypes.swift.gyb +++ b/stdlib/public/core/FloatingPointTypes.swift.gyb @@ -105,8 +105,8 @@ extension ${Self}: CustomStringConvertible { return "nan" } else { var (buffer, length) = _float${bits}ToString(self, debug: false) - return buffer.withBytes { (bufferPtr) in - String._fromASCII( + return unsafe buffer.withBytes { (bufferPtr) in + unsafe String._fromASCII( UnsafeBufferPointer(start: bufferPtr, count: length)) } } @@ -121,8 +121,8 @@ extension ${Self}: CustomDebugStringConvertible { /// that NaN values are printed in an extended format. public var debugDescription: String { var (buffer, length) = _float${bits}ToString(self, debug: true) - return buffer.withBytes { (bufferPtr) in - String._fromASCII( + return unsafe buffer.withBytes { (bufferPtr) in + unsafe String._fromASCII( UnsafeBufferPointer(start: bufferPtr, count: length)) } } @@ -132,9 +132,9 @@ ${Availability(bits)} extension ${Self}: TextOutputStreamable { public func write(to target: inout Target) where Target: TextOutputStream { var (buffer, length) = _float${bits}ToString(self, debug: true) - buffer.withBytes { (bufferPtr) in - let bufPtr = UnsafeBufferPointer(start: bufferPtr, count: length) - target._writeASCII(bufPtr) + unsafe buffer.withBytes { (bufferPtr) in + let bufPtr = unsafe UnsafeBufferPointer(start: bufferPtr, count: length) + unsafe target._writeASCII(bufPtr) } } } @@ -317,7 +317,7 @@ extension ${Self}: BinaryFloatingPoint { @inlinable internal var _representation: _Representation { - return unsafeBitCast(self, to: _Representation.self) + return unsafe unsafeBitCast(self, to: _Representation.self) } @inlinable @@ -378,7 +378,7 @@ extension ${Self}: BinaryFloatingPoint { if exponent != 0 { significand |= Float80._explicitBitMask } let rep = _Representation( explicitSignificand: significand, signAndExponent: signBit|exponent) - self = unsafeBitCast(rep, to: Float80.self) + self = unsafe unsafeBitCast(rep, to: Float80.self) } @inlinable @@ -405,7 +405,7 @@ extension ${Self}: BinaryFloatingPoint { explicitSignificand: ${Self}._explicitBitMask, signAndExponent: 0x7fff ) - return unsafeBitCast(rep, to: ${Self}.self) + return unsafe unsafeBitCast(rep, to: ${Self}.self) %else: return ${Self}( sign: .plus, @@ -426,7 +426,7 @@ extension ${Self}: BinaryFloatingPoint { explicitSignificand: ${Self}._explicitBitMask | ${Self}._quietNaNMask, signAndExponent: 0x7fff ) - return unsafeBitCast(rep, to: ${Self}.self) + return unsafe unsafeBitCast(rep, to: ${Self}.self) %else: return ${Self}(nan: 0, signaling: false) %end @@ -1356,7 +1356,7 @@ internal struct _${Self}AnyHashableBox: _AnyHashableBox { into result: UnsafeMutablePointer ) -> Bool { guard let value = _value as? T else { return false } - result.initialize(to: value) + unsafe result.initialize(to: value) return true } } diff --git a/stdlib/public/core/HashTable.swift b/stdlib/public/core/HashTable.swift index 93de95b6c1243..be0a7259cfb6a 100644 --- a/stdlib/public/core/HashTable.swift +++ b/stdlib/public/core/HashTable.swift @@ -18,6 +18,7 @@ internal protocol _HashTableDelegate { @usableFromInline @frozen +@unsafe internal struct _HashTable { @usableFromInline internal typealias Word = _UnsafeBitset.Word @@ -33,23 +34,23 @@ internal struct _HashTable { internal init(words: UnsafeMutablePointer, bucketCount: Int) { _internalInvariant(bucketCount > 0 && bucketCount & (bucketCount - 1) == 0, "bucketCount must be a power of two") - self.words = words + unsafe self.words = unsafe words // The bucket count is a power of two, so subtracting 1 will never overflow // and get us a nice mask. - self.bucketMask = bucketCount &- 1 + unsafe self.bucketMask = bucketCount &- 1 } @inlinable internal var bucketCount: Int { @inline(__always) get { - return _assumeNonNegative(bucketMask &+ 1) + return unsafe _assumeNonNegative(bucketMask &+ 1) } } @inlinable internal var wordCount: Int { @inline(__always) get { - return _UnsafeBitset.wordCount(forCapacity: bucketCount) + return unsafe _UnsafeBitset.wordCount(forCapacity: bucketCount) } } @@ -61,7 +62,7 @@ internal struct _HashTable { /// to occupied buckets in the table. @_alwaysEmitIntoClient internal var bitset: _UnsafeBitset { - _UnsafeBitset(words: words, wordCount: wordCount) + unsafe _UnsafeBitset(words: words, wordCount: wordCount) } } @@ -76,7 +77,7 @@ extension _HashTable { internal static func capacity(forScale scale: Int8) -> Int { let bucketCount = (1 as Int) &<< scale - return Int(Double(bucketCount) * maxLoadFactor) + return unsafe Int(Double(bucketCount) * maxLoadFactor) } internal static func scale(forCapacity capacity: Int) -> Int8 { @@ -84,7 +85,7 @@ extension _HashTable { // Calculate the minimum number of entries we need to allocate to satisfy // the maximum load factor. `capacity + 1` below ensures that we always // leave at least one hole. - let minimumEntries = Swift.max( + let minimumEntries = unsafe Swift.max( Int((Double(capacity) / maxLoadFactor).rounded(.up)), capacity + 1) // The actual number of entries we need to allocate is the lowest power of @@ -94,7 +95,7 @@ extension _HashTable { _internalInvariant(exponent >= 0 && exponent < Int.bitWidth) // The scale is the exponent corresponding to the bucket count. let scale = Int8(truncatingIfNeeded: exponent) - _internalInvariant(self.capacity(forScale: scale) >= capacity) + unsafe _internalInvariant(self.capacity(forScale: scale) >= capacity) return scale } @@ -127,7 +128,7 @@ extension _HashTable { // 128-bit execution seed takes care of randomization. We only need to // guarantee that no two tables with the same seed can coexist at the same // time (apart from copy-on-write derivatives of the same table). - return unsafeBitCast(object, to: Int.self) + return unsafe unsafeBitCast(object, to: Int.self) } } @@ -141,26 +142,26 @@ extension _HashTable { @inlinable @inline(__always) internal init(offset: Int) { - self.offset = offset + unsafe self.offset = offset } @inlinable @inline(__always) internal init(word: Int, bit: Int) { - self.offset = _UnsafeBitset.join(word: word, bit: bit) + unsafe self.offset = unsafe _UnsafeBitset.join(word: word, bit: bit) } @inlinable internal var word: Int { @inline(__always) get { - return _UnsafeBitset.word(for: offset) + return unsafe _UnsafeBitset.word(for: offset) } } @inlinable internal var bit: Int { @inline(__always) get { - return _UnsafeBitset.bit(for: offset) + return unsafe _UnsafeBitset.bit(for: offset) } } } @@ -171,7 +172,7 @@ extension _HashTable.Bucket: Equatable { @inline(__always) internal static func == (lhs: _HashTable.Bucket, rhs: _HashTable.Bucket) -> Bool { - return lhs.offset == rhs.offset + return unsafe lhs.offset == rhs.offset } } @@ -180,7 +181,7 @@ extension _HashTable.Bucket: Comparable { @inline(__always) internal static func < (lhs: _HashTable.Bucket, rhs: _HashTable.Bucket) -> Bool { - return lhs.offset < rhs.offset + return unsafe lhs.offset < rhs.offset } } @@ -197,8 +198,8 @@ extension _HashTable { @inlinable @inline(__always) internal init(bucket: Bucket, age: Int32) { - self.bucket = bucket - self.age = age + unsafe self.bucket = unsafe bucket + unsafe self.age = age } } } @@ -210,9 +211,9 @@ extension _HashTable.Index: Equatable { lhs: _HashTable.Index, rhs: _HashTable.Index ) -> Bool { - _precondition(lhs.age == rhs.age, + unsafe _precondition(lhs.age == rhs.age, "Can't compare indices belonging to different collections") - return lhs.bucket == rhs.bucket + return unsafe lhs.bucket == rhs.bucket } } @@ -223,16 +224,16 @@ extension _HashTable.Index: Comparable { lhs: _HashTable.Index, rhs: _HashTable.Index ) -> Bool { - _precondition(lhs.age == rhs.age, + unsafe _precondition(lhs.age == rhs.age, "Can't compare indices belonging to different collections") - return lhs.bucket < rhs.bucket + return unsafe lhs.bucket < rhs.bucket } } -extension _HashTable: Sequence { +extension _HashTable: @unsafe Sequence { @usableFromInline @frozen - internal struct Iterator: IteratorProtocol { + internal struct Iterator: @unsafe IteratorProtocol { @usableFromInline let hashTable: _HashTable @usableFromInline @@ -243,25 +244,25 @@ extension _HashTable: Sequence { @inlinable @inline(__always) init(_ hashTable: _HashTable) { - self.hashTable = hashTable - self.wordIndex = 0 - self.word = hashTable.words[0] - if hashTable.bucketCount < Word.capacity { - self.word = self.word.intersecting(elementsBelow: hashTable.bucketCount) + unsafe self.hashTable = unsafe hashTable + unsafe self.wordIndex = 0 + unsafe self.word = unsafe hashTable.words[0] + if unsafe hashTable.bucketCount < Word.capacity { + unsafe self.word = unsafe self.word.intersecting(elementsBelow: hashTable.bucketCount) } } @inlinable @inline(__always) internal mutating func next() -> Bucket? { - if let bit = word.next() { - return Bucket(word: wordIndex, bit: bit) + if let bit = unsafe word.next() { + return unsafe Bucket(word: wordIndex, bit: bit) } - while wordIndex + 1 < hashTable.wordCount { - wordIndex += 1 - word = hashTable.words[wordIndex] - if let bit = word.next() { - return Bucket(word: wordIndex, bit: bit) + while unsafe wordIndex + 1 < hashTable.wordCount { + unsafe wordIndex += 1 + unsafe word = unsafe hashTable.words[wordIndex] + if let bit = unsafe word.next() { + return unsafe Bucket(word: wordIndex, bit: bit) } } return nil @@ -271,7 +272,7 @@ extension _HashTable: Sequence { @inlinable @inline(__always) internal func makeIterator() -> Iterator { - return Iterator(self) + return unsafe Iterator(self) } } @@ -282,63 +283,63 @@ extension _HashTable { @inlinable @inline(__always) internal func isValid(_ bucket: Bucket) -> Bool { - return bucket.offset >= 0 && bucket.offset < bucketCount + return unsafe bucket.offset >= 0 && bucket.offset < bucketCount } @inlinable @inline(__always) internal func _isOccupied(_ bucket: Bucket) -> Bool { - _internalInvariant(isValid(bucket)) - return words[bucket.word].uncheckedContains(bucket.bit) + unsafe _internalInvariant(isValid(bucket)) + return unsafe words[bucket.word].uncheckedContains(bucket.bit) } @inlinable @inline(__always) internal func isOccupied(_ bucket: Bucket) -> Bool { - return isValid(bucket) && _isOccupied(bucket) + return unsafe isValid(bucket) && _isOccupied(bucket) } @inlinable @inline(__always) internal func checkOccupied(_ bucket: Bucket) { - _precondition(isOccupied(bucket), + unsafe _precondition(isOccupied(bucket), "Attempting to access Collection elements using an invalid Index") } @inlinable @inline(__always) internal func _firstOccupiedBucket(fromWord word: Int) -> Bucket { - _internalInvariant(word >= 0 && word <= wordCount) + unsafe _internalInvariant(word >= 0 && word <= wordCount) var word = word - while word < wordCount { - if let bit = words[word].minimum { - return Bucket(word: word, bit: bit) + while unsafe word < wordCount { + if let bit = unsafe words[word].minimum { + return unsafe Bucket(word: word, bit: bit) } word += 1 } - return endBucket + return unsafe endBucket } @inlinable internal func occupiedBucket(after bucket: Bucket) -> Bucket { - _internalInvariant(isValid(bucket)) - let word = bucket.word - if let bit = words[word].intersecting(elementsAbove: bucket.bit).minimum { - return Bucket(word: word, bit: bit) + unsafe _internalInvariant(isValid(bucket)) + let word = unsafe bucket.word + if let bit = unsafe words[word].intersecting(elementsAbove: bucket.bit).minimum { + return unsafe Bucket(word: word, bit: bit) } - return _firstOccupiedBucket(fromWord: word + 1) + return unsafe _firstOccupiedBucket(fromWord: word + 1) } @inlinable internal var startBucket: Bucket { - return _firstOccupiedBucket(fromWord: 0) + return unsafe _firstOccupiedBucket(fromWord: 0) } @inlinable internal var endBucket: Bucket { @inline(__always) get { - return Bucket(offset: bucketCount) + return unsafe Bucket(offset: bucketCount) } } } @@ -347,7 +348,7 @@ extension _HashTable { @inlinable @inline(__always) internal func idealBucket(forHashValue hashValue: Int) -> Bucket { - return Bucket(offset: hashValue & bucketMask) + return unsafe Bucket(offset: hashValue & bucketMask) } /// The next bucket after `bucket`, with wraparound at the end of the table. @@ -356,23 +357,23 @@ extension _HashTable { internal func bucket(wrappedAfter bucket: Bucket) -> Bucket { // The bucket is less than bucketCount, which is power of two less than // Int.max. Therefore adding 1 does not overflow. - return Bucket(offset: (bucket.offset &+ 1) & bucketMask) + return unsafe Bucket(offset: (bucket.offset &+ 1) & bucketMask) } } extension _HashTable { @inlinable internal func previousHole(before bucket: Bucket) -> Bucket { - _internalInvariant(isValid(bucket)) + unsafe _internalInvariant(isValid(bucket)) // Note that if we have only a single partial word, its out-of-bounds bits // are guaranteed to be all set, so the formula below gives correct results. - var word = bucket.word + var word = unsafe bucket.word if let bit = - words[word] + unsafe words[word] .complement .intersecting(elementsBelow: bucket.bit) .maximum { - return Bucket(word: word, bit: bit) + return unsafe Bucket(word: word, bit: bit) } var wrap = false while true { @@ -380,37 +381,37 @@ extension _HashTable { if word < 0 { _precondition(!wrap, "Hash table has no holes") wrap = true - word = wordCount - 1 + word = unsafe wordCount - 1 } - if let bit = words[word].complement.maximum { - return Bucket(word: word, bit: bit) + if let bit = unsafe words[word].complement.maximum { + return unsafe Bucket(word: word, bit: bit) } } } @inlinable internal func nextHole(atOrAfter bucket: Bucket) -> Bucket { - _internalInvariant(isValid(bucket)) + unsafe _internalInvariant(isValid(bucket)) // Note that if we have only a single partial word, its out-of-bounds bits // are guaranteed to be all set, so the formula below gives correct results. - var word = bucket.word + var word = unsafe bucket.word if let bit = - words[word] + unsafe words[word] .complement .subtracting(elementsBelow: bucket.bit) .minimum { - return Bucket(word: word, bit: bit) + return unsafe Bucket(word: word, bit: bit) } var wrap = false while true { word &+= 1 - if word == wordCount { + if unsafe word == wordCount { _precondition(!wrap, "Hash table has no holes") wrap = true word = 0 } - if let bit = words[word].complement.minimum { - return Bucket(word: word, bit: bit) + if let bit = unsafe words[word].complement.minimum { + return unsafe Bucket(word: word, bit: bit) } } } @@ -421,8 +422,8 @@ extension _HashTable { @inline(__always) @_effects(releasenone) internal func copyContents(of other: _HashTable) { - _internalInvariant(bucketCount == other.bucketCount) - self.words.update(from: other.words, count: wordCount) + unsafe _internalInvariant(bucketCount == other.bucketCount) + unsafe self.words.update(from: other.words, count: wordCount) } /// Insert a new entry with the specified hash value into the table. @@ -430,29 +431,29 @@ extension _HashTable { @inlinable @inline(__always) internal func insertNew(hashValue: Int) -> Bucket { - let hole = nextHole(atOrAfter: idealBucket(forHashValue: hashValue)) - insert(hole) - return hole + let hole = unsafe nextHole(atOrAfter: idealBucket(forHashValue: hashValue)) + unsafe insert(hole) + return unsafe hole } /// Insert a new entry for an element at `index`. @inlinable @inline(__always) internal func insert(_ bucket: Bucket) { - _internalInvariant(!isOccupied(bucket)) - words[bucket.word].uncheckedInsert(bucket.bit) + unsafe _internalInvariant(!isOccupied(bucket)) + unsafe words[bucket.word].uncheckedInsert(bucket.bit) } @inlinable @inline(__always) internal func clear() { - if bucketCount < Word.capacity { + if unsafe bucketCount < Word.capacity { // We have only a single partial word. Set all out of bounds bits, so that // `occupiedBucket(after:)` and `nextHole(atOrAfter:)` works correctly // without a special case. - words[0] = Word.allBits.subtracting(elementsBelow: bucketCount) + unsafe words[0] = Word.allBits.subtracting(elementsBelow: bucketCount) } else { - words.update(repeating: .empty, count: wordCount) + unsafe words.update(repeating: .empty, count: wordCount) } } @@ -462,42 +463,42 @@ extension _HashTable { at bucket: Bucket, with delegate: D ) { - _internalInvariant(isOccupied(bucket)) + unsafe _internalInvariant(isOccupied(bucket)) // If we've put a hole in a chain of contiguous elements, some element after // the hole may belong where the new hole is. - var hole = bucket - var candidate = self.bucket(wrappedAfter: hole) + var hole = unsafe bucket + var candidate = unsafe self.bucket(wrappedAfter: hole) - guard _isOccupied(candidate) else { + guard unsafe _isOccupied(candidate) else { // Fast path: Don't get the first bucket when there's nothing to do. - words[hole.word].uncheckedRemove(hole.bit) + unsafe words[hole.word].uncheckedRemove(hole.bit) return } // Find the first bucket in the contiguous chain that contains the entry // we've just deleted. - let start = self.bucket(wrappedAfter: previousHole(before: bucket)) + let start = unsafe self.bucket(wrappedAfter: previousHole(before: bucket)) // Relocate out-of-place elements in the chain, repeating until we get to // the end of the chain. - while _isOccupied(candidate) { - let candidateHash = delegate.hashValue(at: candidate) - let ideal = idealBucket(forHashValue: candidateHash) + while unsafe _isOccupied(candidate) { + let candidateHash = unsafe delegate.hashValue(at: candidate) + let ideal = unsafe idealBucket(forHashValue: candidateHash) // Does this element belong between start and hole? We need two // separate tests depending on whether [start, hole] wraps around the // end of the storage. - let c0 = ideal >= start - let c1 = ideal <= hole - if start <= hole ? (c0 && c1) : (c0 || c1) { - delegate.moveEntry(from: candidate, to: hole) - hole = candidate + let c0 = unsafe ideal >= start + let c1 = unsafe ideal <= hole + if unsafe start <= hole ? (c0 && c1) : (c0 || c1) { + unsafe delegate.moveEntry(from: candidate, to: hole) + unsafe hole = unsafe candidate } - candidate = self.bucket(wrappedAfter: candidate) + unsafe candidate = unsafe self.bucket(wrappedAfter: candidate) } - words[hole.word].uncheckedRemove(hole.bit) + unsafe words[hole.word].uncheckedRemove(hole.bit) } } diff --git a/stdlib/public/core/Hashable.swift b/stdlib/public/core/Hashable.swift index 2a6617f3a2d89..5dff1f30685e5 100644 --- a/stdlib/public/core/Hashable.swift +++ b/stdlib/public/core/Hashable.swift @@ -158,7 +158,7 @@ internal func Hashable_isEqual_indirect( _ lhs: UnsafePointer, _ rhs: UnsafePointer ) -> Bool { - return lhs.pointee == rhs.pointee + return unsafe lhs.pointee == rhs.pointee } // Called by the SwiftValue implementation. @@ -166,5 +166,5 @@ internal func Hashable_isEqual_indirect( internal func Hashable_hashValue_indirect( _ value: UnsafePointer ) -> Int { - return value.pointee.hashValue + return unsafe value.pointee.hashValue } diff --git a/stdlib/public/core/Hasher.swift b/stdlib/public/core/Hasher.swift index 7be0c15f6ed57..2e25df1e71400 100644 --- a/stdlib/public/core/Hasher.swift +++ b/stdlib/public/core/Hasher.swift @@ -24,25 +24,25 @@ internal func _loadPartialUnalignedUInt64LE( var result: UInt64 = 0 switch byteCount { case 7: - result |= UInt64(p.load(fromByteOffset: 6, as: UInt8.self)) &<< 48 + unsafe result |= UInt64(p.load(fromByteOffset: 6, as: UInt8.self)) &<< 48 fallthrough case 6: - result |= UInt64(p.load(fromByteOffset: 5, as: UInt8.self)) &<< 40 + unsafe result |= UInt64(p.load(fromByteOffset: 5, as: UInt8.self)) &<< 40 fallthrough case 5: - result |= UInt64(p.load(fromByteOffset: 4, as: UInt8.self)) &<< 32 + unsafe result |= UInt64(p.load(fromByteOffset: 4, as: UInt8.self)) &<< 32 fallthrough case 4: - result |= UInt64(p.load(fromByteOffset: 3, as: UInt8.self)) &<< 24 + unsafe result |= UInt64(p.load(fromByteOffset: 3, as: UInt8.self)) &<< 24 fallthrough case 3: - result |= UInt64(p.load(fromByteOffset: 2, as: UInt8.self)) &<< 16 + unsafe result |= UInt64(p.load(fromByteOffset: 2, as: UInt8.self)) &<< 16 fallthrough case 2: - result |= UInt64(p.load(fromByteOffset: 1, as: UInt8.self)) &<< 8 + unsafe result |= UInt64(p.load(fromByteOffset: 1, as: UInt8.self)) &<< 8 fallthrough case 1: - result |= UInt64(p.load(fromByteOffset: 0, as: UInt8.self)) + unsafe result |= UInt64(p.load(fromByteOffset: 0, as: UInt8.self)) fallthrough case 0: return result @@ -217,31 +217,31 @@ extension Hasher { // Load first unaligned partial word of data do { - let start = UInt(bitPattern: data) + let start = unsafe UInt(bitPattern: data) let end = _roundUp(start, toAlignment: MemoryLayout.alignment) let c = min(remaining, Int(end - start)) if c > 0 { - let chunk = _loadPartialUnalignedUInt64LE(data, byteCount: c) + let chunk = unsafe _loadPartialUnalignedUInt64LE(data, byteCount: c) combine(bytes: chunk, count: c) - data += c + unsafe data += c remaining -= c } } - _internalInvariant( + unsafe _internalInvariant( remaining == 0 || Int(bitPattern: data) & (MemoryLayout.alignment - 1) == 0) // Load as many aligned words as there are in the input buffer while remaining >= MemoryLayout.size { - combine(UInt64(littleEndian: data.load(as: UInt64.self))) - data += MemoryLayout.size + unsafe combine(UInt64(littleEndian: data.load(as: UInt64.self))) + unsafe data += MemoryLayout.size remaining -= MemoryLayout.size } // Load last partial word of data _internalInvariant(remaining >= 0 && remaining < 8) if remaining > 0 { - let chunk = _loadPartialUnalignedUInt64LE(data, byteCount: remaining) + let chunk = unsafe _loadPartialUnalignedUInt64LE(data, byteCount: remaining) combine(bytes: chunk, count: remaining) } } @@ -257,8 +257,8 @@ extension Hasher { @usableFromInline var _swift_stdlib_Hashing_parameters: _SwiftHashingParameters = { var seed0: UInt64 = 0, seed1: UInt64 = 0 - swift_stdlib_random(&seed0, MemoryLayout.size) - swift_stdlib_random(&seed1, MemoryLayout.size) + unsafe swift_stdlib_random(&seed0, MemoryLayout.size) + unsafe swift_stdlib_random(&seed1, MemoryLayout.size) return .init(seed0: seed0, seed1: seed1, deterministic: false) }() #endif @@ -396,7 +396,7 @@ public struct Hasher { /// - Parameter bytes: A raw memory buffer. @_effects(releasenone) public mutating func combine(bytes: UnsafeRawBufferPointer) { - _core.combine(bytes: bytes) + unsafe _core.combine(bytes: bytes) } /// Finalize the hasher state and return the hash value. @@ -470,7 +470,7 @@ public struct Hasher { seed: Int, bytes: UnsafeRawBufferPointer) -> Int { var core = _Core(seed: seed) - core.combine(bytes: bytes) + unsafe core.combine(bytes: bytes) return Int(truncatingIfNeeded: core.finalize()) } } diff --git a/stdlib/public/core/Hashing.swift b/stdlib/public/core/Hashing.swift index 032da1fbfb716..8919a0657a7c1 100644 --- a/stdlib/public/core/Hashing.swift +++ b/stdlib/public/core/Hashing.swift @@ -39,29 +39,30 @@ internal func _stdlib_NSObject_isEqual(_ lhs: AnyObject, _ rhs: AnyObject) -> Bo /// /// Accesses the underlying raw memory as Unmanaged using untyped /// memory accesses. The memory remains bound to managed AnyObjects. +@unsafe internal struct _UnmanagedAnyObjectArray { /// Underlying pointer. internal var value: UnsafeMutableRawPointer internal init(_ up: UnsafeMutablePointer) { - self.value = UnsafeMutableRawPointer(up) + unsafe self.value = UnsafeMutableRawPointer(up) } internal init?(_ up: UnsafeMutablePointer?) { - guard let unwrapped = up else { return nil } - self.init(unwrapped) + guard let unwrapped = unsafe up else { return nil } + unsafe self.init(unwrapped) } internal subscript(i: Int) -> AnyObject { get { - let unmanaged = value.load( + let unmanaged = unsafe value.load( fromByteOffset: i * MemoryLayout.stride, as: Unmanaged.self) - return unmanaged.takeUnretainedValue() + return unsafe unmanaged.takeUnretainedValue() } nonmutating set(newValue) { - let unmanaged = Unmanaged.passUnretained(newValue) - value.storeBytes(of: unmanaged, + let unmanaged = unsafe Unmanaged.passUnretained(newValue) + unsafe value.storeBytes(of: unmanaged, toByteOffset: i * MemoryLayout.stride, as: Unmanaged.self) } @@ -94,13 +95,13 @@ final internal class __SwiftEmptyNSEnumerator ) -> Int { // Even though we never do anything in here, we need to update the // state so that callers know we actually ran. - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr } - state.pointee = theState + unsafe state.pointee = theState return 0 } } @@ -116,15 +117,17 @@ final internal class __SwiftEmptyNSEnumerator // NOTE: older runtimes called this class _BridgingHashBuffer. // The two must coexist without a conflicting ObjC class name, so it // was renamed. The old name must not be used in the new runtime. +@unsafe internal final class __BridgingHashBuffer : ManagedBuffer<__BridgingHashBuffer.Header, AnyObject> { + @unsafe struct Header { internal var owner: AnyObject internal var hashTable: _HashTable init(owner: AnyObject, hashTable: _HashTable) { - self.owner = owner - self.hashTable = hashTable + unsafe self.owner = owner + unsafe self.hashTable = unsafe hashTable } } @@ -132,32 +135,32 @@ internal final class __BridgingHashBuffer owner: AnyObject, hashTable: _HashTable ) -> __BridgingHashBuffer { - let buffer = self.create(minimumCapacity: hashTable.bucketCount) { _ in - Header(owner: owner, hashTable: hashTable) + let buffer = unsafe self.create(minimumCapacity: hashTable.bucketCount) { _ in + unsafe Header(owner: owner, hashTable: hashTable) } - return unsafeDowncast(buffer, to: __BridgingHashBuffer.self) + return unsafe unsafeDowncast(buffer, to: __BridgingHashBuffer.self) } deinit { - for bucket in header.hashTable { - (firstElementAddress + bucket.offset).deinitialize(count: 1) + for bucket in unsafe header.hashTable { + unsafe (firstElementAddress + bucket.offset).deinitialize(count: 1) } - _fixLifetime(self) + unsafe _fixLifetime(self) } internal subscript(bucket: _HashTable.Bucket) -> AnyObject { @inline(__always) get { - _internalInvariant(header.hashTable.isOccupied(bucket)) - defer { _fixLifetime(self) } - return firstElementAddress[bucket.offset] + unsafe _internalInvariant(header.hashTable.isOccupied(bucket)) + defer { unsafe _fixLifetime(self) } + return unsafe firstElementAddress[bucket.offset] } } @inline(__always) internal func initialize(at bucket: _HashTable.Bucket, to object: AnyObject) { - _internalInvariant(header.hashTable.isOccupied(bucket)) - (firstElementAddress + bucket.offset).initialize(to: object) - _fixLifetime(self) + unsafe _internalInvariant(header.hashTable.isOccupied(bucket)) + unsafe (firstElementAddress + bucket.offset).initialize(to: object) + unsafe _fixLifetime(self) } } #endif diff --git a/stdlib/public/core/InlineArray.swift b/stdlib/public/core/InlineArray.swift index 2f53370ffbcba..fdb619018c2fd 100644 --- a/stdlib/public/core/InlineArray.swift +++ b/stdlib/public/core/InlineArray.swift @@ -13,6 +13,7 @@ /// A fixed-size array. @available(SwiftStdlib 6.2, *) @frozen +@safe public struct InlineArray: ~Copyable { @usableFromInline internal let _storage: Builtin.FixedArray @@ -38,7 +39,7 @@ extension InlineArray where Element: ~Copyable { @_alwaysEmitIntoClient @_transparent internal var _address: UnsafePointer { - UnsafePointer(Builtin.unprotectedAddressOfBorrow(self)) + unsafe UnsafePointer(Builtin.unprotectedAddressOfBorrow(self)) } /// Returns a buffer pointer over the entire vector. @@ -46,7 +47,7 @@ extension InlineArray where Element: ~Copyable { @_alwaysEmitIntoClient @_transparent internal var _buffer: UnsafeBufferPointer { - UnsafeBufferPointer(start: _address, count: count) + unsafe UnsafeBufferPointer(start: _address, count: count) } /// Returns a mutable pointer to the first element in the vector. @@ -55,7 +56,7 @@ extension InlineArray where Element: ~Copyable { @_transparent internal var _mutableAddress: UnsafeMutablePointer { mutating get { - UnsafeMutablePointer(Builtin.unprotectedAddressOf(&self)) + unsafe UnsafeMutablePointer(Builtin.unprotectedAddressOf(&self)) } } @@ -65,7 +66,7 @@ extension InlineArray where Element: ~Copyable { @_transparent internal var _mutableBuffer: UnsafeMutableBufferPointer { mutating get { - UnsafeMutableBufferPointer(start: _mutableAddress, count: count) + unsafe UnsafeMutableBufferPointer(start: _mutableAddress, count: count) } } @@ -77,7 +78,7 @@ extension InlineArray where Element: ~Copyable { internal static func _initializationBuffer( start: Builtin.RawPointer ) -> UnsafeMutableBufferPointer { - UnsafeMutableBufferPointer( + unsafe UnsafeMutableBufferPointer( start: UnsafeMutablePointer(start), count: count ) @@ -113,12 +114,12 @@ extension InlineArray where Element: ~Copyable { for i in 0 ..< count { do throws(E) { - try buffer.initializeElement(at: i, to: body(i)) + try unsafe buffer.initializeElement(at: i, to: body(i)) } catch { // The closure threw an error. We need to deinitialize every element // we've initialized up to this point. for j in 0 ..< i { - buffer.deinitializeElement(at: j) + unsafe buffer.deinitializeElement(at: j) } // Throw the error we were given back out to the caller. @@ -165,16 +166,16 @@ extension InlineArray where Element: ~Copyable { start: rawPtr ) - buffer.initializeElement(at: 0, to: o.take()._consumingUncheckedUnwrapped()) + unsafe buffer.initializeElement(at: 0, to: o.take()._consumingUncheckedUnwrapped()) for i in 1 ..< count { do throws(E) { - try buffer.initializeElement(at: i, to: next(buffer[i &- 1])) + try unsafe buffer.initializeElement(at: i, to: next(buffer[i &- 1])) } catch { // The closure threw an error. We need to deinitialize every element // we've initialized up to this point. for j in 0 ..< i { - buffer.deinitializeElement(at: j) + unsafe buffer.deinitializeElement(at: j) } throw error @@ -198,7 +199,7 @@ extension InlineArray where Element: Copyable { self = Builtin.emplace { let buffer = InlineArray._initializationBuffer(start: $0) - buffer.initialize(repeating: value) + unsafe buffer.initialize(repeating: value) } } } @@ -346,14 +347,14 @@ extension InlineArray where Element: ~Copyable { unsafeAddress { _precondition(indices.contains(i), "Index out of bounds") - return _address + i + return unsafe _address + i } @_transparent unsafeMutableAddress { _precondition(indices.contains(i), "Index out of bounds") - return _mutableAddress + i + return unsafe _mutableAddress + i } } } @@ -388,10 +389,10 @@ extension InlineArray where Element: ~Copyable { _precondition(indices.contains(i), "Index out of bounds") _precondition(indices.contains(j), "Index out of bounds") - let ithElement = _mutableBuffer.moveElement(from: i) - let jthElement = _mutableBuffer.moveElement(from: j) - _mutableBuffer.initializeElement(at: i, to: jthElement) - _mutableBuffer.initializeElement(at: j, to: ithElement) + let ithElement = unsafe _mutableBuffer.moveElement(from: i) + let jthElement = unsafe _mutableBuffer.moveElement(from: j) + unsafe _mutableBuffer.initializeElement(at: i, to: jthElement) + unsafe _mutableBuffer.initializeElement(at: j, to: ithElement) } } @@ -440,7 +441,7 @@ extension InlineArray where Element: ~Copyable { public borrowing func _withUnsafeBufferPointer( _ body: (UnsafeBufferPointer) throws(E) -> Result ) throws(E) -> Result { - try body(_buffer) + try unsafe body(_buffer) } /// Calls the given closure with a pointer to the vector's mutable contiguous @@ -489,6 +490,6 @@ extension InlineArray where Element: ~Copyable { public mutating func _withUnsafeMutableBufferPointer( _ body: (UnsafeMutableBufferPointer) throws(E) -> Result ) throws(E) -> Result { - try body(_mutableBuffer) + try unsafe body(_mutableBuffer) } } diff --git a/stdlib/public/core/InputStream.swift b/stdlib/public/core/InputStream.swift index b53683efcb496..d16218250c5a7 100644 --- a/stdlib/public/core/InputStream.swift +++ b/stdlib/public/core/InputStream.swift @@ -30,15 +30,15 @@ import SwiftShims /// already been reached when `readLine()` is called, the result is `nil`. public func readLine(strippingNewline: Bool = true) -> String? { var utf8Start: UnsafeMutablePointer? - let utf8Count = swift_stdlib_readLine_stdin(&utf8Start) + let utf8Count = unsafe swift_stdlib_readLine_stdin(&utf8Start) defer { - _swift_stdlib_free(utf8Start) + unsafe _swift_stdlib_free(utf8Start) } guard utf8Count > 0 else { return nil } - let utf8Buffer = UnsafeBufferPointer(start: utf8Start, count: utf8Count) - var result = String._fromUTF8Repairing(utf8Buffer).result + let utf8Buffer = unsafe UnsafeBufferPointer(start: utf8Start, count: utf8Count) + var result = unsafe String._fromUTF8Repairing(utf8Buffer).result if strippingNewline, result.last == "\n" || result.last == "\r\n" { _ = result.removeLast() } diff --git a/stdlib/public/core/Int128.swift b/stdlib/public/core/Int128.swift index 3bebe158bd8c7..5713c4e61424b 100644 --- a/stdlib/public/core/Int128.swift +++ b/stdlib/public/core/Int128.swift @@ -45,7 +45,7 @@ public struct Int128: Sendable { @_transparent public init(_low: UInt64, _high: Int64) { #if _endian(little) - self = unsafeBitCast((_low, _high), to: Self.self) + self = unsafe unsafeBitCast((_low, _high), to: Self.self) #else self = unsafeBitCast((_high, _low), to: Self.self) #endif diff --git a/stdlib/public/core/IntegerParsing.swift b/stdlib/public/core/IntegerParsing.swift index 1b946b79c8f81..3869f06f417b1 100644 --- a/stdlib/public/core/IntegerParsing.swift +++ b/stdlib/public/core/IntegerParsing.swift @@ -34,7 +34,7 @@ internal func _parseIntegerDigits( } let multiplicand = Result(truncatingIfNeeded: radix) var result = 0 as Result - for digit in codeUnits { + for unsafe digit in unsafe codeUnits { let digitValue: Result if _fastPath(digit >= _0 && digit < numericalUpperBound) { digitValue = Result(truncatingIfNeeded: digit &- _0) @@ -66,18 +66,18 @@ internal func _parseInteger( // ASCII constants, named for clarity: let _plus = 43 as UInt8, _minus = 45 as UInt8 - let first = codeUnits[0] + let first = unsafe codeUnits[0] if first == _minus { - return _parseIntegerDigits( + return unsafe _parseIntegerDigits( ascii: UnsafeBufferPointer(rebasing: codeUnits[1...]), radix: radix, isNegative: true) } if first == _plus { - return _parseIntegerDigits( + return unsafe _parseIntegerDigits( ascii: UnsafeBufferPointer(rebasing: codeUnits[1...]), radix: radix, isNegative: false) } - return _parseIntegerDigits(ascii: codeUnits, radix: radix, isNegative: false) + return unsafe _parseIntegerDigits(ascii: codeUnits, radix: radix, isNegative: false) } @_alwaysEmitIntoClient @@ -86,7 +86,7 @@ internal func _parseInteger( ascii text: S, radix: Int ) -> Result? { var str = String(text) - return str.withUTF8 { _parseInteger(ascii: $0, radix: radix) } + return str.withUTF8 { unsafe _parseInteger(ascii: $0, radix: radix) } } extension FixedWidthInteger { @@ -129,7 +129,7 @@ extension FixedWidthInteger { guard _fastPath(!text.isEmpty) else { return nil } let result: Self? = text.utf8.withContiguousStorageIfAvailable { - _parseInteger(ascii: $0, radix: radix) + unsafe _parseInteger(ascii: $0, radix: radix) } ?? _parseInteger(ascii: text, radix: radix) guard let result_ = result else { return nil } self = result_ diff --git a/stdlib/public/core/IntegerTypes.swift.gyb b/stdlib/public/core/IntegerTypes.swift.gyb index a07d5a4a192a1..14ab72e56f16f 100644 --- a/stdlib/public/core/IntegerTypes.swift.gyb +++ b/stdlib/public/core/IntegerTypes.swift.gyb @@ -824,7 +824,7 @@ internal struct _IntegerAnyHashableBox< into result: UnsafeMutablePointer ) -> Bool { guard let value = _value as? T else { return false } - result.initialize(to: value) + unsafe result.initialize(to: value) return true } } diff --git a/stdlib/public/core/Integers.swift b/stdlib/public/core/Integers.swift index d508c7a5876b2..04924da4011bd 100644 --- a/stdlib/public/core/Integers.swift +++ b/stdlib/public/core/Integers.swift @@ -345,7 +345,7 @@ extension SignedNumeric { @inlinable public func abs(_ x: T) -> T { if T.self == T.Magnitude.self { - return unsafeBitCast(x.magnitude, to: T.self) + return unsafe unsafeBitCast(x.magnitude, to: T.self) } return x < (0 as T) ? -x : x @@ -1433,8 +1433,8 @@ extension BinaryInteger { } result.reverse() - return result.withUnsafeBufferPointer { - return String._fromASCII($0) + return unsafe result.withUnsafeBufferPointer { + return unsafe String._fromASCII($0) } } diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift index 0531dd325de78..09161387845bb 100644 --- a/stdlib/public/core/KeyPath.swift +++ b/stdlib/public/core/KeyPath.swift @@ -34,6 +34,7 @@ internal func _abstract( /// A type-erased key path, from any root type to any resulting value /// type. @_objcRuntimeName(_TtCs11_AnyKeyPath) +@safe public class AnyKeyPath: _AppendKeyPath { /// The root type for this key path. @inlinable @@ -76,7 +77,7 @@ public class AnyKeyPath: _AppendKeyPath { } #if _pointerBitWidth(_64) - _kvcKeyPathStringPtr = UnsafePointer(bitPattern: -offset - 1) + _kvcKeyPathStringPtr = unsafe UnsafePointer(bitPattern: -offset - 1) #elseif _pointerBitWidth(_32) if offset <= maximumOffsetOn32BitArchitecture { _kvcKeyPathStringPtr = UnsafePointer(bitPattern: (offset + 1)) @@ -90,12 +91,12 @@ public class AnyKeyPath: _AppendKeyPath { final func getOffsetFromStorage() -> Int? { let maximumOffsetOn32BitArchitecture = 4094 - guard _kvcKeyPathStringPtr != nil else { + guard unsafe _kvcKeyPathStringPtr != nil else { return nil } #if _pointerBitWidth(_64) - let offset = (0 &- Int(bitPattern: _kvcKeyPathStringPtr)) &- 1 + let offset = unsafe (0 &- Int(bitPattern: _kvcKeyPathStringPtr)) &- 1 guard _fastPath(offset >= 0) else { // This happens to be an actual _kvcKeyPathStringPtr, not an offset, if // we get here. @@ -127,7 +128,7 @@ public class AnyKeyPath: _AppendKeyPath { } guard let ptr = _kvcKeyPathStringPtr else { return nil } - return String(validatingCString: ptr) + return unsafe String(validatingCString: ptr) } } @@ -157,7 +158,7 @@ public class AnyKeyPath: _AppendKeyPath { result._kvcKeyPathStringPtr = nil let base = UnsafeMutableRawPointer(Builtin.projectTailElems(result, Int32.self)) - body(UnsafeMutableRawBufferPointer(start: base, count: bytes)) + unsafe body(UnsafeMutableRawBufferPointer(start: base, count: bytes)) return result } @@ -166,25 +167,25 @@ public class AnyKeyPath: _AppendKeyPath { defer { _fixLifetime(self) } let base = UnsafeRawPointer(Builtin.projectTailElems(self, Int32.self)) - return try f(KeyPathBuffer(base: base)) + return try unsafe f(KeyPathBuffer(base: base)) } @usableFromInline // Exposed as public API by MemoryLayout.offset(of:) internal var _storedInlineOffset: Int? { #if !$Embedded return withBuffer { - var buffer = $0 + var buffer = unsafe $0 // The identity key path is effectively a stored keypath of type Self // at offset zero - if buffer.data.isEmpty { return 0 } + if unsafe buffer.data.isEmpty { return 0 } var offset = 0 while true { - let (rawComponent, optNextType) = buffer.next() - switch rawComponent.header.kind { + let (rawComponent, optNextType) = unsafe buffer.next() + switch unsafe rawComponent.header.kind { case .struct: - offset += rawComponent._structOrClassOffset + unsafe offset += rawComponent._structOrClassOffset case .class, .computed, .optionalChain, .optionalForce, .optionalWrap, .external: return .none @@ -218,13 +219,13 @@ extension AnyKeyPath: Hashable { final public func hash(into hasher: inout Hasher) { ObjectIdentifier(type(of: self)).hash(into: &hasher) return withBuffer { - var buffer = $0 - if buffer.data.isEmpty { return } + var buffer = unsafe $0 + if unsafe buffer.data.isEmpty { return } while true { - let (component, type) = buffer.next() - hasher.combine(component.value) + let (component, type) = unsafe buffer.next() + unsafe hasher.combine(component.value) if let type = type { - hasher.combine(unsafeBitCast(type, to: Int.self)) + unsafe hasher.combine(unsafeBitCast(type, to: Int.self)) } else { break } @@ -242,25 +243,25 @@ extension AnyKeyPath: Hashable { return false } return a.withBuffer { - var aBuffer = $0 + var aBuffer = unsafe $0 return b.withBuffer { - var bBuffer = $0 + var bBuffer = unsafe $0 // Two equivalent key paths should have the same reference prefix - if aBuffer.hasReferencePrefix != bBuffer.hasReferencePrefix { + if unsafe aBuffer.hasReferencePrefix != bBuffer.hasReferencePrefix { return false } // Identity is equal to identity - if aBuffer.data.isEmpty { - return bBuffer.data.isEmpty + if unsafe aBuffer.data.isEmpty { + return unsafe bBuffer.data.isEmpty } while true { - let (aComponent, aType) = aBuffer.next() - let (bComponent, bType) = bBuffer.next() + let (aComponent, aType) = unsafe aBuffer.next() + let (bComponent, bType) = unsafe bBuffer.next() - if aComponent.header.endOfReferencePrefix + if unsafe aComponent.header.endOfReferencePrefix != bComponent.header.endOfReferencePrefix || aComponent.value != bComponent.value || aType != bType { @@ -335,25 +336,25 @@ public class KeyPath: PartialKeyPath { // One performance improvement is to skip right to Value // if this keypath traverses through structs only. if let offset = getOffsetFromStorage() { - return _withUnprotectedUnsafeBytes(of: root) { - let pointer = $0.baseAddress._unsafelyUnwrappedUnchecked + offset - return pointer.assumingMemoryBound(to: Value.self).pointee + return unsafe _withUnprotectedUnsafeBytes(of: root) { + let pointer = unsafe $0.baseAddress._unsafelyUnwrappedUnchecked + offset + return unsafe pointer.assumingMemoryBound(to: Value.self).pointee } } return withBuffer { - var buffer = $0 + var buffer = unsafe $0 - if _slowPath(buffer.data.isEmpty) { + if unsafe _slowPath(buffer.data.isEmpty) { return Builtin.reinterpretCast(root) } - if _fastPath(buffer.isSingleComponent) { + if unsafe _fastPath(buffer.isSingleComponent) { var isBreak = false - let (rawComponent, _) = buffer.next() + let (rawComponent, _) = unsafe buffer.next() return Builtin.emplace { - rawComponent._projectReadOnly( + unsafe rawComponent._projectReadOnly( root, to: Value.self, endingWith: Value.self, @@ -363,39 +364,39 @@ public class KeyPath: PartialKeyPath { } } - let maxSize = buffer.maxSize + let maxSize = unsafe buffer.maxSize let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) // 16 is the max alignment allowed on practically every platform we deploy // to. - return _withUnprotectedUnsafeTemporaryAllocation( + return unsafe _withUnprotectedUnsafeTemporaryAllocation( byteCount: roundedMaxSize, alignment: 16 ) { - let currentValueBuffer = $0 + let currentValueBuffer = unsafe $0 - currentValueBuffer.withMemoryRebound(to: Root.self) { - $0.initializeElement(at: 0, to: root) + unsafe currentValueBuffer.withMemoryRebound(to: Root.self) { + unsafe $0.initializeElement(at: 0, to: root) } var currentType = rootType while true { - let (rawComponent, optNextType) = buffer.next() + let (rawComponent, optNextType) = unsafe buffer.next() let newType = optNextType ?? valueType let isLast = optNextType == nil var isBreak = false func projectCurrent(_: Current.Type) { func projectNew(_: New.Type) { - let base = currentValueBuffer.withMemoryRebound( + let base = unsafe currentValueBuffer.withMemoryRebound( to: Current.self ) { - $0.moveElement(from: 0) + unsafe $0.moveElement(from: 0) } - currentValueBuffer.withMemoryRebound(to: New.self) { - rawComponent._projectReadOnly( + unsafe currentValueBuffer.withMemoryRebound(to: New.self) { + unsafe rawComponent._projectReadOnly( base, to: New.self, endingWith: Value.self, @@ -426,8 +427,8 @@ public class KeyPath: PartialKeyPath { _openExistential(currentType, do: projectCurrent(_:)) if isLast || isBreak { - return currentValueBuffer.withMemoryRebound(to: Value.self) { - $0.moveElement(from: 0) + return unsafe currentValueBuffer.withMemoryRebound(to: Value.self) { + unsafe $0.moveElement(from: 0) } } } @@ -437,7 +438,7 @@ public class KeyPath: PartialKeyPath { deinit { #if !$Embedded - withBuffer { $0.destroy() } + withBuffer { unsafe $0.destroy() } #else fatalError() // unreachable, keypaths in embedded Swift are compile-time #endif @@ -463,34 +464,34 @@ public class WritableKeyPath: KeyPath { // Don't declare "p" above this if-statement; it may slow things down. if let offset = getOffsetFromStorage() { - let p = UnsafeRawPointer(base).advanced(by: offset) - return (pointer: UnsafeMutablePointer( + let p = unsafe UnsafeRawPointer(base).advanced(by: offset) + return unsafe (pointer: UnsafeMutablePointer( mutating: p.assumingMemoryBound(to: Value.self)), owner: nil) } - var p = UnsafeRawPointer(base) + var p = unsafe UnsafeRawPointer(base) var type: Any.Type = Root.self var keepAlive: AnyObject? return withBuffer { - var buffer = $0 + var buffer = unsafe $0 - _internalInvariant(!buffer.hasReferencePrefix, + unsafe _internalInvariant(!buffer.hasReferencePrefix, "WritableKeyPath should not have a reference prefix") - if buffer.data.isEmpty { - return ( + if unsafe buffer.data.isEmpty { + return unsafe ( UnsafeMutablePointer( mutating: p.assumingMemoryBound(to: Value.self)), nil) } while true { - let (rawComponent, optNextType) = buffer.next() + let (rawComponent, optNextType) = unsafe buffer.next() let nextType = optNextType ?? Value.self func project(_: CurValue.Type) { func project2(_: NewValue.Type) { - p = rawComponent._projectMutableAddress(p, + unsafe p = unsafe rawComponent._projectMutableAddress(p, from: CurValue.self, to: NewValue.self, isRoot: p == UnsafeRawPointer(base), @@ -506,8 +507,8 @@ public class WritableKeyPath: KeyPath { // TODO: With coroutines, it would be better to yield here, so that // we don't need the hack of the keepAlive reference to manage closing // accesses. - let typedPointer = p.assumingMemoryBound(to: Value.self) - return (pointer: UnsafeMutablePointer(mutating: typedPointer), + let typedPointer = unsafe p.assumingMemoryBound(to: Value.self) + return unsafe (pointer: UnsafeMutablePointer(mutating: typedPointer), owner: keepAlive) } } @@ -528,28 +529,27 @@ public class ReferenceWritableKeyPath< -> (pointer: UnsafeMutablePointer, owner: AnyObject?) { var keepAlive: AnyObject? let address: UnsafeMutablePointer = withBuffer { - var buffer = $0 + var buffer = unsafe $0 // Project out the reference prefix. - - let maxSize = buffer.maxSize + let maxSize = unsafe buffer.maxSize let roundedMaxSize = 1 &<< (Int.bitWidth &- maxSize.leadingZeroBitCount) // 16 is the max alignment allowed on practically every platform we deploy // to. - let base: Any = _withUnprotectedUnsafeTemporaryAllocation( + let base: Any = unsafe _withUnprotectedUnsafeTemporaryAllocation( byteCount: roundedMaxSize, alignment: 16 ) { var currentType: Any.Type = Root.self - let currentValueBuffer = $0 + let currentValueBuffer = unsafe $0 - currentValueBuffer.withMemoryRebound(to: Root.self) { - $0.initializeElement(at: 0, to: origBase) + unsafe currentValueBuffer.withMemoryRebound(to: Root.self) { + unsafe $0.initializeElement(at: 0, to: origBase) } - while buffer.hasReferencePrefix { - let (rawComponent, optNextType) = buffer.next() + while unsafe buffer.hasReferencePrefix { + let (rawComponent, optNextType) = unsafe buffer.next() _internalInvariant(optNextType != nil, "reference prefix should not go to end of buffer") let nextType = optNextType._unsafelyUnwrappedUnchecked @@ -558,14 +558,14 @@ public class ReferenceWritableKeyPath< func projectCurrent(_: Current.Type) { var isBreak = false - let base = currentValueBuffer.withMemoryRebound( + let base = unsafe currentValueBuffer.withMemoryRebound( to: Current.self ) { - $0.moveElement(from: 0) + unsafe $0.moveElement(from: 0) } - currentValueBuffer.withMemoryRebound(to: New.self) { - rawComponent._projectReadOnly( + unsafe currentValueBuffer.withMemoryRebound(to: New.self) { + unsafe rawComponent._projectReadOnly( base, to: New.self, endingWith: Value.self, @@ -588,8 +588,8 @@ public class ReferenceWritableKeyPath< } func projectCurrent(_: Current.Type) -> Any { - return currentValueBuffer.withMemoryRebound(to: Current.self) { - $0.moveElement(from: 0) + return unsafe currentValueBuffer.withMemoryRebound(to: Current.self) { + unsafe $0.moveElement(from: 0) } } @@ -601,15 +601,15 @@ public class ReferenceWritableKeyPath< func formalMutation(_ base: MutationRoot) -> UnsafeMutablePointer { var base2 = base - return withUnsafeBytes(of: &base2) { baseBytes in - var p = baseBytes.baseAddress.unsafelyUnwrapped + return unsafe withUnsafeBytes(of: &base2) { baseBytes in + var p = unsafe baseBytes.baseAddress.unsafelyUnwrapped var curType: Any.Type = MutationRoot.self while true { - let (rawComponent, optNextType) = buffer.next() + let (rawComponent, optNextType) = unsafe buffer.next() let nextType = optNextType ?? Value.self func project(_: CurValue.Type) { func project2(_: NewValue.Type) { - p = rawComponent._projectMutableAddress(p, + unsafe p = unsafe rawComponent._projectMutableAddress(p, from: CurValue.self, to: NewValue.self, isRoot: p == baseBytes.baseAddress, @@ -622,14 +622,14 @@ public class ReferenceWritableKeyPath< if optNextType == nil { break } curType = nextType } - let typedPointer = p.assumingMemoryBound(to: Value.self) - return UnsafeMutablePointer(mutating: typedPointer) + let typedPointer = unsafe p.assumingMemoryBound(to: Value.self) + return unsafe UnsafeMutablePointer(mutating: typedPointer) } } - return _openExistential(base, do: formalMutation(_:)) + return _openExistential(base, do: unsafe formalMutation(_:)) } - return (address, keepAlive) + return unsafe (address, keepAlive) } } @@ -675,6 +675,7 @@ internal struct ComputedPropertyID: Hashable { } @_unavailableInEmbedded +@unsafe internal struct ComputedAccessorsPtr { #if INTERNAL_CHECKS_ENABLED internal let header: RawKeyPathComponent.Header @@ -683,9 +684,9 @@ internal struct ComputedAccessorsPtr { init(header: RawKeyPathComponent.Header, value: UnsafeRawPointer) { #if INTERNAL_CHECKS_ENABLED - self.header = header + unsafe self.header = unsafe header #endif - self._value = value + unsafe self._value = unsafe value } @_transparent @@ -710,23 +711,23 @@ internal struct ComputedAccessorsPtr { internal var getterPtr: UnsafeRawPointer { #if INTERNAL_CHECKS_ENABLED - _internalInvariant(header.kind == .computed, + unsafe _internalInvariant(header.kind == .computed, "not a computed property") #endif - return _value + return unsafe _value } internal var setterPtr: UnsafeRawPointer { #if INTERNAL_CHECKS_ENABLED - _internalInvariant(header.isComputedSettable, + unsafe _internalInvariant(header.isComputedSettable, "not a settable property") #endif - return _value + MemoryLayout.size + return unsafe _value + MemoryLayout.size } internal func getter() -> Getter { - return getterPtr._loadAddressDiscriminatedFunctionPointer( + return unsafe getterPtr._loadAddressDiscriminatedFunctionPointer( as: Getter.self, discriminator: ComputedAccessorsPtr.getterPtrAuthKey) } @@ -734,11 +735,11 @@ internal struct ComputedAccessorsPtr { internal func nonmutatingSetter() -> NonmutatingSetter { #if INTERNAL_CHECKS_ENABLED - _internalInvariant(header.isComputedSettable && !header.isComputedMutating, + unsafe _internalInvariant(header.isComputedSettable && !header.isComputedMutating, "not a nonmutating settable property") #endif - return setterPtr._loadAddressDiscriminatedFunctionPointer( + return unsafe setterPtr._loadAddressDiscriminatedFunctionPointer( as: NonmutatingSetter.self, discriminator: ComputedAccessorsPtr.nonmutatingSetterPtrAuthKey) } @@ -746,22 +747,23 @@ internal struct ComputedAccessorsPtr { internal func mutatingSetter() -> MutatingSetter { #if INTERNAL_CHECKS_ENABLED - _internalInvariant(header.isComputedSettable && header.isComputedMutating, + unsafe _internalInvariant(header.isComputedSettable && header.isComputedMutating, "not a mutating settable property") #endif - return setterPtr._loadAddressDiscriminatedFunctionPointer( + return unsafe setterPtr._loadAddressDiscriminatedFunctionPointer( as: MutatingSetter.self, discriminator: ComputedAccessorsPtr.mutatingSetterPtrAuthKey) } } @_unavailableInEmbedded +@unsafe internal struct ComputedArgumentWitnessesPtr { internal let _value: UnsafeRawPointer init(_ value: UnsafeRawPointer) { - self._value = value + unsafe self._value = unsafe value } @_transparent @@ -808,24 +810,24 @@ internal struct ComputedArgumentWitnessesPtr { // pointers. internal var destroy: Destroy? { - return _value._loadAddressDiscriminatedFunctionPointer( + return unsafe _value._loadAddressDiscriminatedFunctionPointer( as: Optional.self, discriminator: ComputedArgumentWitnessesPtr.destroyPtrAuthKey) } internal var copy: Copy { - return _value._loadAddressDiscriminatedFunctionPointer( + return unsafe _value._loadAddressDiscriminatedFunctionPointer( fromByteOffset: MemoryLayout.size, as: Copy.self, discriminator: ComputedArgumentWitnessesPtr.copyPtrAuthKey) } internal var equals: Equals { - return _value._loadAddressDiscriminatedFunctionPointer( + return unsafe _value._loadAddressDiscriminatedFunctionPointer( fromByteOffset: 2*MemoryLayout.size, as: Equals.self, discriminator: ComputedArgumentWitnessesPtr.equalsPtrAuthKey) } internal var hash: Hash { - return _value._loadAddressDiscriminatedFunctionPointer( + return unsafe _value._loadAddressDiscriminatedFunctionPointer( fromByteOffset: 3*MemoryLayout.size, as: Hash.self, discriminator: ComputedArgumentWitnessesPtr.hashPtrAuthKey) @@ -833,7 +835,9 @@ internal struct ComputedArgumentWitnessesPtr { } @_unavailableInEmbedded +@unsafe internal enum KeyPathComponent { + @unsafe internal struct ArgumentRef { internal var data: UnsafeRawBufferPointer internal var witnesses: ComputedArgumentWitnessesPtr @@ -844,9 +848,9 @@ internal enum KeyPathComponent { witnesses: ComputedArgumentWitnessesPtr, witnessSizeAdjustment: Int ) { - self.data = data - self.witnesses = witnesses - self.witnessSizeAdjustment = witnessSizeAdjustment + unsafe self.data = unsafe data + unsafe self.witnesses = unsafe witnesses + unsafe self.witnessSizeAdjustment = witnessSizeAdjustment } } @@ -881,9 +885,9 @@ internal enum KeyPathComponent { } @_unavailableInEmbedded -extension KeyPathComponent: Hashable { +extension KeyPathComponent: @unsafe Hashable { internal static func ==(a: KeyPathComponent, b: KeyPathComponent) -> Bool { - switch (a, b) { + switch unsafe (a, b) { case (.struct(offset: let a), .struct(offset: let b)), (.class (offset: let a), .class (offset: let b)): return a == b @@ -902,8 +906,8 @@ extension KeyPathComponent: Hashable { if id1 != id2 { return false } - if let arg1 = argument1, let arg2 = argument2 { - return arg1.witnesses.equals( + if let arg1 = unsafe argument1, let arg2 = unsafe argument2 { + return unsafe arg1.witnesses.equals( arg1.data.baseAddress.unsafelyUnwrapped, arg2.data.baseAddress.unsafelyUnwrapped, arg1.data.count - arg1.witnessSizeAdjustment) @@ -929,8 +933,8 @@ extension KeyPathComponent: Hashable { func appendHashFromArgument( _ argument: KeyPathComponent.ArgumentRef? ) { - if let argument = argument { - let hash = argument.witnesses.hash( + if let argument = unsafe argument { + let hash = unsafe argument.witnesses.hash( argument.data.baseAddress.unsafelyUnwrapped, argument.data.count - argument.witnessSizeAdjustment) // Returning 0 indicates that the arguments should not impact the @@ -941,7 +945,7 @@ extension KeyPathComponent: Hashable { } } } - switch self { + switch unsafe self { case .struct(offset: let a): hasher.combine(0) hasher.combine(a) @@ -957,15 +961,15 @@ extension KeyPathComponent: Hashable { case .get(id: let id, accessors: _, argument: let argument): hasher.combine(5) hasher.combine(id) - appendHashFromArgument(argument) + unsafe appendHashFromArgument(argument) case .mutatingGetSet(id: let id, accessors: _, argument: let argument): hasher.combine(6) hasher.combine(id) - appendHashFromArgument(argument) + unsafe appendHashFromArgument(argument) case .nonmutatingGetSet(id: let id, accessors: _, argument: let argument): hasher.combine(7) hasher.combine(id) - appendHashFromArgument(argument) + unsafe appendHashFromArgument(argument) } } } @@ -1006,12 +1010,12 @@ internal final class ClassHolder { // withUnsafeMutablePointer(to:) because the instance was just allocated with // allocWithTailElems_1 and so we need to make sure to use an initialization // rather than an assignment. - withUnsafeMutablePointer(to: &holder.previous) { - $0.initialize(to: previous) + unsafe withUnsafeMutablePointer(to: &holder.previous) { + unsafe $0.initialize(to: previous) } - withUnsafeMutablePointer(to: &holder.instance) { - $0.initialize(to: instance) + unsafe withUnsafeMutablePointer(to: &holder.instance) { + unsafe $0.initialize(to: instance) } let accessRecordPtr = Builtin.projectTailElems(holder, AccessRecord.self) @@ -1033,6 +1037,7 @@ internal final class ClassHolder { // A class that triggers writeback to a pointer when destroyed. @_unavailableInEmbedded +@unsafe internal final class MutatingWritebackBuffer { internal let previous: AnyObject? internal let base: UnsafeMutablePointer @@ -1042,7 +1047,7 @@ internal final class MutatingWritebackBuffer { internal var value: NewValue deinit { - set(value, &base.pointee, argument, argumentSize) + unsafe set(value, &base.pointee, argument, argumentSize) } internal init(previous: AnyObject?, @@ -1051,17 +1056,18 @@ internal final class MutatingWritebackBuffer { argument: UnsafeRawPointer, argumentSize: Int, value: NewValue) { - self.previous = previous - self.base = base - self.set = set - self.argument = argument - self.argumentSize = argumentSize - self.value = value + unsafe self.previous = previous + unsafe self.base = unsafe base + unsafe self.set = unsafe set + unsafe self.argument = unsafe argument + unsafe self.argumentSize = argumentSize + unsafe self.value = value } } // A class that triggers writeback to a non-mutated value when destroyed. @_unavailableInEmbedded +@unsafe internal final class NonmutatingWritebackBuffer { internal let previous: AnyObject? internal let base: CurValue @@ -1071,7 +1077,7 @@ internal final class NonmutatingWritebackBuffer { internal var value: NewValue deinit { - set(value, base, argument, argumentSize) + unsafe set(value, base, argument, argumentSize) } internal @@ -1081,12 +1087,12 @@ internal final class NonmutatingWritebackBuffer { argument: UnsafeRawPointer, argumentSize: Int, value: NewValue) { - self.previous = previous - self.base = base - self.set = set - self.argument = argument - self.argumentSize = argumentSize - self.value = value + unsafe self.previous = previous + unsafe self.base = base + unsafe self.set = unsafe set + unsafe self.argument = unsafe argument + unsafe self.argumentSize = argumentSize + unsafe self.value = value } } @@ -1110,13 +1116,14 @@ internal enum KeyPathComputedIDResolution { } @_unavailableInEmbedded +@unsafe internal struct RawKeyPathComponent { internal var header: Header internal var body: UnsafeRawBufferPointer internal init(header: Header, body: UnsafeRawBufferPointer) { - self.header = header - self.body = body + unsafe self.header = unsafe header + unsafe self.body = unsafe body } @_transparent @@ -1128,74 +1135,74 @@ internal struct RawKeyPathComponent { internal var _value: UInt32 init(discriminator: UInt32, payload: UInt32) { - _value = 0 - self.discriminator = discriminator - self.payload = payload + unsafe _value = 0 + unsafe self.discriminator = discriminator + unsafe self.payload = payload } internal var discriminator: UInt32 { get { - return (_value & Header.discriminatorMask) &>> Header.discriminatorShift + return unsafe (_value & Header.discriminatorMask) &>> Header.discriminatorShift } set { - let shifted = newValue &<< Header.discriminatorShift - _internalInvariant(shifted & Header.discriminatorMask == shifted, + let shifted = unsafe newValue &<< Header.discriminatorShift + unsafe _internalInvariant(shifted & Header.discriminatorMask == shifted, "discriminator doesn't fit") - _value = _value & ~Header.discriminatorMask | shifted + unsafe _value = unsafe _value & ~Header.discriminatorMask | shifted } } internal var payload: UInt32 { get { - return _value & Header.payloadMask + return unsafe _value & Header.payloadMask } set { - _internalInvariant(newValue & Header.payloadMask == newValue, + unsafe _internalInvariant(newValue & Header.payloadMask == newValue, "payload too big") - _value = _value & ~Header.payloadMask | newValue + unsafe _value = unsafe _value & ~Header.payloadMask | newValue } } internal var storedOffsetPayload: UInt32 { get { - _internalInvariant(kind == .struct || kind == .class, + unsafe _internalInvariant(kind == .struct || kind == .class, "not a stored component") - return _value & Header.storedOffsetPayloadMask + return unsafe _value & Header.storedOffsetPayloadMask } set { - _internalInvariant(kind == .struct || kind == .class, + unsafe _internalInvariant(kind == .struct || kind == .class, "not a stored component") - _internalInvariant(newValue & Header.storedOffsetPayloadMask == newValue, + unsafe _internalInvariant(newValue & Header.storedOffsetPayloadMask == newValue, "payload too big") - _value = _value & ~Header.storedOffsetPayloadMask | newValue + unsafe _value = unsafe _value & ~Header.storedOffsetPayloadMask | newValue } } internal var endOfReferencePrefix: Bool { get { - return _value & Header.endOfReferencePrefixFlag != 0 + return unsafe _value & Header.endOfReferencePrefixFlag != 0 } set { if newValue { - _value |= Header.endOfReferencePrefixFlag + unsafe _value |= Header.endOfReferencePrefixFlag } else { - _value &= ~Header.endOfReferencePrefixFlag + unsafe _value &= ~Header.endOfReferencePrefixFlag } } } internal var kind: KeyPathComponentKind { - switch (discriminator, payload) { - case (Header.externalTag, _): + switch unsafe (discriminator, payload) { + case (unsafe Header.externalTag, _): return .external - case (Header.structTag, _): + case (unsafe Header.structTag, _): return .struct - case (Header.classTag, _): + case (unsafe Header.classTag, _): return .class - case (Header.computedTag, _): + case (unsafe Header.computedTag, _): return .computed - case (Header.optionalTag, Header.optionalChainPayload): + case (unsafe Header.optionalTag, unsafe Header.optionalChainPayload): return .optionalChain - case (Header.optionalTag, Header.optionalWrapPayload): + case (unsafe Header.optionalTag, unsafe Header.optionalWrapPayload): return .optionalWrap - case (Header.optionalTag, Header.optionalForcePayload): + case (unsafe Header.optionalTag, unsafe Header.optionalForcePayload): return .optionalForce default: _internalInvariantFailure("invalid header") @@ -1259,24 +1266,24 @@ internal struct RawKeyPathComponent { } internal var isStoredMutable: Bool { - _internalInvariant(kind == .struct || kind == .class) - return _value & Header.storedMutableFlag != 0 + unsafe _internalInvariant(kind == .struct || kind == .class) + return unsafe _value & Header.storedMutableFlag != 0 } internal static var computedMutatingFlag: UInt32 { return _SwiftKeyPathComponentHeader_ComputedMutatingFlag } internal var isComputedMutating: Bool { - _internalInvariant(kind == .computed) - return _value & Header.computedMutatingFlag != 0 + unsafe _internalInvariant(kind == .computed) + return unsafe _value & Header.computedMutatingFlag != 0 } internal static var computedSettableFlag: UInt32 { return _SwiftKeyPathComponentHeader_ComputedSettableFlag } internal var isComputedSettable: Bool { - _internalInvariant(kind == .computed) - return _value & Header.computedSettableFlag != 0 + unsafe _internalInvariant(kind == .computed) + return unsafe _value & Header.computedSettableFlag != 0 } internal static var computedIDByStoredPropertyFlag: UInt32 { @@ -1286,8 +1293,8 @@ internal struct RawKeyPathComponent { return _SwiftKeyPathComponentHeader_ComputedIDByVTableOffsetFlag } internal var computedIDKind: KeyPathComputedIDKind { - let storedProperty = _value & Header.computedIDByStoredPropertyFlag != 0 - let vtableOffset = _value & Header.computedIDByVTableOffsetFlag != 0 + let storedProperty = unsafe _value & Header.computedIDByStoredPropertyFlag != 0 + let vtableOffset = unsafe _value & Header.computedIDByVTableOffsetFlag != 0 switch (storedProperty, vtableOffset) { case (true, true): @@ -1305,8 +1312,8 @@ internal struct RawKeyPathComponent { return _SwiftKeyPathComponentHeader_ComputedHasArgumentsFlag } internal var hasComputedArguments: Bool { - _internalInvariant(kind == .computed) - return _value & Header.computedHasArgumentsFlag != 0 + unsafe _internalInvariant(kind == .computed) + return unsafe _value & Header.computedHasArgumentsFlag != 0 } // If a computed component is instantiated from an external property @@ -1318,14 +1325,14 @@ internal struct RawKeyPathComponent { } internal var isComputedInstantiatedFromExternalWithArguments: Bool { get { - _internalInvariant(kind == .computed) + unsafe _internalInvariant(kind == .computed) return - _value & Header.computedInstantiatedFromExternalWithArgumentsFlag != 0 + unsafe _value & Header.computedInstantiatedFromExternalWithArgumentsFlag != 0 } set { - _internalInvariant(kind == .computed) - _value = - _value & ~Header.computedInstantiatedFromExternalWithArgumentsFlag + unsafe _internalInvariant(kind == .computed) + unsafe _value = + unsafe _value & ~Header.computedInstantiatedFromExternalWithArgumentsFlag | (newValue ? Header.computedInstantiatedFromExternalWithArgumentsFlag : 0) } @@ -1350,14 +1357,14 @@ internal struct RawKeyPathComponent { return _SwiftKeyPathComponentHeader_ComputedIDUnresolvedFunctionCall } internal var computedIDResolution: KeyPathComputedIDResolution { - switch payload & Header.computedIDResolutionMask { - case Header.computedIDResolved: + switch unsafe payload & Header.computedIDResolutionMask { + case unsafe Header.computedIDResolved: return .resolved - case Header.computedIDResolvedAbsolute: + case unsafe Header.computedIDResolvedAbsolute: return .resolvedAbsolute - case Header.computedIDUnresolvedIndirectPointer: + case unsafe Header.computedIDUnresolvedIndirectPointer: return .indirectPointer - case Header.computedIDUnresolvedFunctionCall: + case unsafe Header.computedIDUnresolvedFunctionCall: return .functionCall default: _internalInvariantFailure("invalid key path resolution") @@ -1371,27 +1378,27 @@ internal struct RawKeyPathComponent { } internal var isTrivialPropertyDescriptor: Bool { - return _value == + return unsafe _value == _SwiftKeyPathComponentHeader_TrivialPropertyDescriptorMarker } /// If this is the header for a component in a key path pattern, return /// the size of the body of the component. internal var patternComponentBodySize: Int { - return _componentBodySize(forPropertyDescriptor: false) + return unsafe _componentBodySize(forPropertyDescriptor: false) } /// If this is the header for a property descriptor, return /// the size of the body of the component. internal var propertyDescriptorBodySize: Int { - if isTrivialPropertyDescriptor { return 0 } - return _componentBodySize(forPropertyDescriptor: true) + if unsafe isTrivialPropertyDescriptor { return 0 } + return unsafe _componentBodySize(forPropertyDescriptor: true) } internal func _componentBodySize(forPropertyDescriptor: Bool) -> Int { - switch kind { + switch unsafe kind { case .struct, .class: - if storedOffsetPayload == Header.unresolvedFieldOffsetPayload + if unsafe storedOffsetPayload == Header.unresolvedFieldOffsetPayload || storedOffsetPayload == Header.outOfLineOffsetPayload || storedOffsetPayload == Header.unresolvedIndirectOffsetPayload { // A 32-bit offset is stored in the body. @@ -1404,19 +1411,19 @@ internal struct RawKeyPathComponent { // The body holds a pointer to the external property descriptor, // and some number of substitution arguments, the count of which is // in the payload. - return 4 &* (1 &+ Int(payload)) + return unsafe 4 &* (1 &+ Int(payload)) case .computed: // The body holds at minimum the id and getter. var size = 8 // If settable, it also holds the setter. - if isComputedSettable { + if unsafe isComputedSettable { size &+= 4 } // If there are arguments, there's also a layout function, // witness table, and initializer function. // Property descriptors never carry argument information, though. - if !forPropertyDescriptor && hasComputedArguments { + if unsafe !forPropertyDescriptor && hasComputedArguments { size &+= 12 } @@ -1429,17 +1436,17 @@ internal struct RawKeyPathComponent { } init(optionalForce: ()) { - self.init(discriminator: Header.optionalTag, + unsafe self.init(discriminator: Header.optionalTag, payload: Header.optionalForcePayload) } init(optionalWrap: ()) { - self.init(discriminator: Header.optionalTag, + unsafe self.init(discriminator: Header.optionalTag, payload: Header.optionalWrapPayload) } init(optionalChain: ()) { - self.init(discriminator: Header.optionalTag, + unsafe self.init(discriminator: Header.optionalTag, payload: Header.optionalChainPayload) } @@ -1448,14 +1455,14 @@ internal struct RawKeyPathComponent { inlineOffset: UInt32) { let discriminator: UInt32 switch kind { - case .struct: discriminator = Header.structTag - case .class: discriminator = Header.classTag + case .struct: discriminator = unsafe Header.structTag + case .class: discriminator = unsafe Header.classTag } - _internalInvariant(inlineOffset <= Header.maximumOffsetPayload) - let payload = inlineOffset + unsafe _internalInvariant(inlineOffset <= Header.maximumOffsetPayload) + let payload = unsafe inlineOffset | (mutable ? Header.storedMutableFlag : 0) - self.init(discriminator: discriminator, + unsafe self.init(discriminator: discriminator, payload: payload) } @@ -1463,14 +1470,14 @@ internal struct RawKeyPathComponent { mutable: Bool) { let discriminator: UInt32 switch kind { - case .struct: discriminator = Header.structTag - case .class: discriminator = Header.classTag + case .struct: discriminator = unsafe Header.structTag + case .class: discriminator = unsafe Header.classTag } - let payload = Header.outOfLineOffsetPayload + let payload = unsafe Header.outOfLineOffsetPayload | (mutable ? Header.storedMutableFlag : 0) - self.init(discriminator: discriminator, + unsafe self.init(discriminator: discriminator, payload: payload) } @@ -1479,9 +1486,9 @@ internal struct RawKeyPathComponent { settable: Bool, hasArguments: Bool, instantiatedFromExternalWithArguments: Bool) { - let discriminator = Header.computedTag + let discriminator = unsafe Header.computedTag var payload = - (mutating ? Header.computedMutatingFlag : 0) + unsafe (mutating ? Header.computedMutatingFlag : 0) | (settable ? Header.computedSettableFlag : 0) | (hasArguments ? Header.computedHasArgumentsFlag : 0) | (instantiatedFromExternalWithArguments @@ -1490,20 +1497,20 @@ internal struct RawKeyPathComponent { case .pointer: break case .storedPropertyIndex: - payload |= Header.computedIDByStoredPropertyFlag + unsafe payload |= Header.computedIDByStoredPropertyFlag case .vtableOffset: - payload |= Header.computedIDByVTableOffsetFlag + unsafe payload |= Header.computedIDByVTableOffsetFlag } - self.init(discriminator: discriminator, + unsafe self.init(discriminator: discriminator, payload: payload) } } internal var bodySize: Int { let ptrSize = MemoryLayout.size - switch header.kind { + switch unsafe header.kind { case .struct, .class: - if header.storedOffsetPayload == Header.outOfLineOffsetPayload { + if unsafe header.storedOffsetPayload == Header.outOfLineOffsetPayload { return 4 // overflowed } return 0 @@ -1513,19 +1520,19 @@ internal struct RawKeyPathComponent { return 0 case .computed: // align to pointer, minimum two pointers for id and get - var total = Header.pointerAlignmentSkew &+ ptrSize &* 2 + var total = unsafe Header.pointerAlignmentSkew &+ ptrSize &* 2 // additional word for a setter - if header.isComputedSettable { + if unsafe header.isComputedSettable { total &+= ptrSize } // include the argument size - if header.hasComputedArguments { + if unsafe header.hasComputedArguments { // two words for argument header: size, witnesses total &+= ptrSize &* 2 // size of argument area - total &+= _computedArgumentSize - if header.isComputedInstantiatedFromExternalWithArguments { - total &+= Header.externalWithArgumentsExtraSize + unsafe total &+= _computedArgumentSize + if unsafe header.isComputedInstantiatedFromExternalWithArguments { + unsafe total &+= Header.externalWithArgumentsExtraSize } } return total @@ -1533,81 +1540,81 @@ internal struct RawKeyPathComponent { } internal var _structOrClassOffset: Int { - _internalInvariant(header.kind == .struct || header.kind == .class, + unsafe _internalInvariant(header.kind == .struct || header.kind == .class, "no offset for this kind") // An offset too large to fit inline is represented by a signal and stored // in the body. - if header.storedOffsetPayload == Header.outOfLineOffsetPayload { + if unsafe header.storedOffsetPayload == Header.outOfLineOffsetPayload { // Offset overflowed into body - _internalInvariant(body.count >= MemoryLayout.size, + unsafe _internalInvariant(body.count >= MemoryLayout.size, "component not big enough") - return Int(truncatingIfNeeded: body.load(as: UInt32.self)) + return Int(truncatingIfNeeded: unsafe body.load(as: UInt32.self)) } - return Int(truncatingIfNeeded: header.storedOffsetPayload) + return unsafe Int(truncatingIfNeeded: header.storedOffsetPayload) } internal var _computedIDValue: Int { - _internalInvariant(header.kind == .computed, + unsafe _internalInvariant(header.kind == .computed, "not a computed property") - return body.load(fromByteOffset: Header.pointerAlignmentSkew, + return unsafe body.load(fromByteOffset: Header.pointerAlignmentSkew, as: Int.self) } internal var _computedID: ComputedPropertyID { - _internalInvariant(header.kind == .computed, + unsafe _internalInvariant(header.kind == .computed, "not a computed property") - return ComputedPropertyID( + return unsafe ComputedPropertyID( value: _computedIDValue, kind: header.computedIDKind) } internal var _computedAccessors: ComputedAccessorsPtr { - _internalInvariant(header.kind == .computed, + unsafe _internalInvariant(header.kind == .computed, "not a computed property") - return ComputedAccessorsPtr( + return unsafe ComputedAccessorsPtr( header: header, value: body.baseAddress._unsafelyUnwrappedUnchecked + Header.pointerAlignmentSkew + MemoryLayout.size) } internal var _computedArgumentHeaderPointer: UnsafeRawPointer { - _internalInvariant(header.hasComputedArguments, "no arguments") + unsafe _internalInvariant(header.hasComputedArguments, "no arguments") - return body.baseAddress._unsafelyUnwrappedUnchecked + return unsafe body.baseAddress._unsafelyUnwrappedUnchecked + Header.pointerAlignmentSkew + MemoryLayout.size &* (header.isComputedSettable ? 3 : 2) } internal var _computedArgumentSize: Int { - return _computedArgumentHeaderPointer.load(as: Int.self) + return unsafe _computedArgumentHeaderPointer.load(as: Int.self) } internal var _computedArgumentWitnesses: ComputedArgumentWitnessesPtr { - return _computedArgumentHeaderPointer.load( + return unsafe _computedArgumentHeaderPointer.load( fromByteOffset: MemoryLayout.size, as: ComputedArgumentWitnessesPtr.self) } internal var _computedArguments: UnsafeRawPointer { - var base = _computedArgumentHeaderPointer + MemoryLayout.size &* 2 + var base = unsafe _computedArgumentHeaderPointer + MemoryLayout.size &* 2 // If the component was instantiated from an external property descriptor // with its own arguments, we include some additional capture info to // be able to map to the original argument context by adjusting the size // passed to the witness operations. - if header.isComputedInstantiatedFromExternalWithArguments { - base += Header.externalWithArgumentsExtraSize + if unsafe header.isComputedInstantiatedFromExternalWithArguments { + unsafe base += Header.externalWithArgumentsExtraSize } - return base + return unsafe base } internal var _computedMutableArguments: UnsafeMutableRawPointer { - return UnsafeMutableRawPointer(mutating: _computedArguments) + return unsafe UnsafeMutableRawPointer(mutating: _computedArguments) } internal var _computedArgumentWitnessSizeAdjustment: Int { - if header.isComputedInstantiatedFromExternalWithArguments { - return _computedArguments.load( + if unsafe header.isComputedInstantiatedFromExternalWithArguments { + return unsafe _computedArguments.load( fromByteOffset: 0 &- Header.externalWithArgumentsExtraSize, as: Int.self) } @@ -1615,44 +1622,44 @@ internal struct RawKeyPathComponent { } internal var value: KeyPathComponent { - switch header.kind { + switch unsafe header.kind { case .struct: - return .struct(offset: _structOrClassOffset) + return unsafe .struct(offset: _structOrClassOffset) case .class: - return .class(offset: _structOrClassOffset) + return unsafe .class(offset: _structOrClassOffset) case .optionalChain: - return .optionalChain + return unsafe .optionalChain case .optionalForce: - return .optionalForce + return unsafe .optionalForce case .optionalWrap: - return .optionalWrap + return unsafe .optionalWrap case .computed: - let isSettable = header.isComputedSettable - let isMutating = header.isComputedMutating + let isSettable = unsafe header.isComputedSettable + let isMutating = unsafe header.isComputedMutating - let id = _computedID - let accessors = _computedAccessors + let id = unsafe _computedID + let accessors = unsafe _computedAccessors // Argument value is unused if there are no arguments. let argument: KeyPathComponent.ArgumentRef? - if header.hasComputedArguments { - argument = KeyPathComponent.ArgumentRef( + if unsafe header.hasComputedArguments { + unsafe argument = unsafe KeyPathComponent.ArgumentRef( data: UnsafeRawBufferPointer(start: _computedArguments, count: _computedArgumentSize), witnesses: _computedArgumentWitnesses, witnessSizeAdjustment: _computedArgumentWitnessSizeAdjustment) } else { - argument = nil + unsafe argument = nil } switch (isSettable, isMutating) { case (false, false): - return .get(id: id, accessors: accessors, argument: argument) + return unsafe .get(id: id, accessors: accessors, argument: argument) case (true, false): - return .nonmutatingGetSet(id: id, + return unsafe .nonmutatingGetSet(id: id, accessors: accessors, argument: argument) case (true, true): - return .mutatingGetSet(id: id, + return unsafe .mutatingGetSet(id: id, accessors: accessors, argument: argument) case (false, true): @@ -1664,7 +1671,7 @@ internal struct RawKeyPathComponent { } internal func destroy() { - switch header.kind { + switch unsafe header.kind { case .struct, .class, .optionalChain, @@ -1674,9 +1681,9 @@ internal struct RawKeyPathComponent { break case .computed: // Run destructor, if any - if header.hasComputedArguments, - let destructor = _computedArgumentWitnesses.destroy { - destructor(_computedMutableArguments, + if unsafe header.hasComputedArguments, + let destructor = unsafe _computedArgumentWitnesses.destroy { + unsafe destructor(_computedMutableArguments, _computedArgumentSize &- _computedArgumentWitnessSizeAdjustment) } case .external: @@ -1686,17 +1693,17 @@ internal struct RawKeyPathComponent { internal func clone(into buffer: inout UnsafeMutableRawBufferPointer, endOfReferencePrefix: Bool) { - var newHeader = header - newHeader.endOfReferencePrefix = endOfReferencePrefix + var newHeader = unsafe header + unsafe newHeader.endOfReferencePrefix = endOfReferencePrefix - var componentSize = MemoryLayout
.size - buffer.storeBytes(of: newHeader, as: Header.self) - switch header.kind { + var componentSize = unsafe MemoryLayout
.size + unsafe buffer.storeBytes(of: newHeader, as: Header.self) + switch unsafe header.kind { case .struct, .class: - if header.storedOffsetPayload == Header.outOfLineOffsetPayload { - let overflowOffset = body.load(as: UInt32.self) - buffer.storeBytes(of: overflowOffset, toByteOffset: 4, + if unsafe header.storedOffsetPayload == Header.outOfLineOffsetPayload { + let overflowOffset = unsafe body.load(as: UInt32.self) + unsafe buffer.storeBytes(of: overflowOffset, toByteOffset: 4, as: UInt32.self) componentSize += 4 } @@ -1706,22 +1713,22 @@ internal struct RawKeyPathComponent { break case .computed: // Fields are pointer-aligned after the header - componentSize += Header.pointerAlignmentSkew - buffer.storeBytes(of: _computedIDValue, + unsafe componentSize += Header.pointerAlignmentSkew + unsafe buffer.storeBytes(of: _computedIDValue, toByteOffset: componentSize, as: Int.self) componentSize += MemoryLayout.size - let accessors = _computedAccessors + let accessors = unsafe _computedAccessors - (buffer.baseAddress.unsafelyUnwrapped + MemoryLayout.size * 2) + unsafe (buffer.baseAddress.unsafelyUnwrapped + MemoryLayout.size * 2) ._copyAddressDiscriminatedFunctionPointer( from: accessors.getterPtr, discriminator: ComputedAccessorsPtr.getterPtrAuthKey) componentSize += MemoryLayout.size - if header.isComputedSettable { - (buffer.baseAddress.unsafelyUnwrapped + MemoryLayout.size * 3) + if unsafe header.isComputedSettable { + unsafe (buffer.baseAddress.unsafelyUnwrapped + MemoryLayout.size * 3) ._copyAddressDiscriminatedFunctionPointer( from: accessors.setterPtr, discriminator: header.isComputedMutating @@ -1730,37 +1737,37 @@ internal struct RawKeyPathComponent { componentSize += MemoryLayout.size } - if header.hasComputedArguments { - let arguments = _computedArguments - let argumentSize = _computedArgumentSize - buffer.storeBytes(of: argumentSize, + if unsafe header.hasComputedArguments { + let arguments = unsafe _computedArguments + let argumentSize = unsafe _computedArgumentSize + unsafe buffer.storeBytes(of: argumentSize, toByteOffset: componentSize, as: Int.self) componentSize += MemoryLayout.size - buffer.storeBytes(of: _computedArgumentWitnesses, + unsafe buffer.storeBytes(of: _computedArgumentWitnesses, toByteOffset: componentSize, as: ComputedArgumentWitnessesPtr.self) componentSize += MemoryLayout.size - if header.isComputedInstantiatedFromExternalWithArguments { + if unsafe header.isComputedInstantiatedFromExternalWithArguments { // Include the extra matter for components instantiated from // external property descriptors with arguments. - buffer.storeBytes(of: _computedArgumentWitnessSizeAdjustment, + unsafe buffer.storeBytes(of: _computedArgumentWitnessSizeAdjustment, toByteOffset: componentSize, as: Int.self) componentSize += MemoryLayout.size } - let adjustedSize = argumentSize - _computedArgumentWitnessSizeAdjustment + let adjustedSize = unsafe argumentSize - _computedArgumentWitnessSizeAdjustment let argumentDest = - buffer.baseAddress.unsafelyUnwrapped + componentSize - _computedArgumentWitnesses.copy( + unsafe buffer.baseAddress.unsafelyUnwrapped + componentSize + unsafe _computedArgumentWitnesses.copy( arguments, argumentDest, adjustedSize) - if header.isComputedInstantiatedFromExternalWithArguments { + if unsafe header.isComputedInstantiatedFromExternalWithArguments { // The extra information for external property descriptor arguments // can always be memcpy'd. - _memcpy(dest: argumentDest + adjustedSize, + unsafe _memcpy(dest: argumentDest + adjustedSize, src: arguments + adjustedSize, size: UInt(_computedArgumentWitnessSizeAdjustment)) } @@ -1771,7 +1778,7 @@ internal struct RawKeyPathComponent { case .external: _internalInvariantFailure("should have been instantiated away") } - buffer = UnsafeMutableRawBufferPointer( + unsafe buffer = unsafe UnsafeMutableRawBufferPointer( start: buffer.baseAddress.unsafelyUnwrapped + componentSize, count: buffer.count - componentSize) } @@ -1783,14 +1790,14 @@ internal struct RawKeyPathComponent { _ isBreak: inout Bool, pointer: UnsafeMutablePointer ) { - switch value { + switch unsafe value { case .struct(let offset): - _withUnprotectedUnsafeBytes(of: base) { - let p = $0.baseAddress._unsafelyUnwrappedUnchecked + offset + unsafe _withUnprotectedUnsafeBytes(of: base) { + let p = unsafe $0.baseAddress._unsafelyUnwrappedUnchecked + offset // The contents of the struct should be well-typed, so we can assume // typed memory here. - pointer.initialize(to: p.assumingMemoryBound(to: NewValue.self).pointee) + unsafe pointer.initialize(to: p.assumingMemoryBound(to: NewValue.self).pointee) } case .class(let offset): @@ -1800,7 +1807,7 @@ internal struct RawKeyPathComponent { let basePtr = UnsafeRawPointer(Builtin.bridgeToRawPointer(baseObj)) defer { _fixLifetime(baseObj) } - let offsetAddress = basePtr.advanced(by: offset) + let offsetAddress = unsafe basePtr.advanced(by: offset) // Perform an instantaneous record access on the address in order to // ensure that the read will not conflict with an already in-progress @@ -1808,16 +1815,16 @@ internal struct RawKeyPathComponent { Builtin.performInstantaneousReadAccess(offsetAddress._rawValue, NewValue.self) - pointer.initialize( + unsafe pointer.initialize( to: offsetAddress.assumingMemoryBound(to: NewValue.self).pointee ) case .get(id: _, accessors: let accessors, argument: let argument), .mutatingGetSet(id: _, accessors: let accessors, argument: let argument), .nonmutatingGetSet(id: _, accessors: let accessors, argument: let argument): - let getter: ComputedAccessorsPtr.Getter = accessors.getter() + let getter: ComputedAccessorsPtr.Getter = unsafe accessors.getter() - pointer.initialize( + unsafe pointer.initialize( to: getter( base, argument?.data.baseAddress ?? accessors._value, @@ -1837,7 +1844,7 @@ internal struct RawKeyPathComponent { if _fastPath(tag == 0) { // Optional "shares" a layout with its Wrapped type meaning we can // reinterpret the base address as an address to its Wrapped value. - pointer.initialize(to: Builtin.reinterpretCast(base)) + unsafe pointer.initialize(to: Builtin.reinterpretCast(base)) return } @@ -1846,8 +1853,8 @@ internal struct RawKeyPathComponent { // Initialize the leaf optional value by simply injecting the tag (which // we've found to be 1) directly. - pointer.withMemoryRebound(to: LeafValue.self, capacity: 1) { - Builtin.injectEnumTag( + unsafe pointer.withMemoryRebound(to: LeafValue.self, capacity: 1) { + unsafe Builtin.injectEnumTag( &$0.pointee, tag._value ) @@ -1863,7 +1870,7 @@ internal struct RawKeyPathComponent { if _fastPath(tag == 0) { // Optional "shares" a layout with its Wrapped type meaning we can // reinterpret the base address as an address to its Wrapped value. - pointer.initialize(to: Builtin.reinterpretCast(base)) + unsafe pointer.initialize(to: Builtin.reinterpretCast(base)) return } @@ -1878,7 +1885,7 @@ internal struct RawKeyPathComponent { let tag: UInt32 = 0 Builtin.injectEnumTag(&new, tag._value) - pointer.initialize(to: new) + unsafe pointer.initialize(to: new) } } @@ -1889,9 +1896,9 @@ internal struct RawKeyPathComponent { isRoot: Bool, keepAlive: inout AnyObject? ) -> UnsafeRawPointer { - switch value { + switch unsafe value { case .struct(let offset): - return base.advanced(by: offset) + return unsafe base.advanced(by: offset) case .class(let offset): // A class dereference should only occur at the root of a mutation, // since otherwise it would be part of the reference prefix. @@ -1899,36 +1906,36 @@ internal struct RawKeyPathComponent { "class component should not appear in the middle of mutation") // AnyObject memory can alias any class reference memory, so we can // assume type here - let object = base.assumingMemoryBound(to: AnyObject.self).pointee - let offsetAddress = UnsafeRawPointer(Builtin.bridgeToRawPointer(object)) + let object = unsafe base.assumingMemoryBound(to: AnyObject.self).pointee + let offsetAddress = unsafe UnsafeRawPointer(Builtin.bridgeToRawPointer(object)) .advanced(by: offset) // Keep the base alive for the duration of the derived access and also // enforce exclusive access to the address. - keepAlive = ClassHolder._create(previous: keepAlive, instance: object, + keepAlive = unsafe ClassHolder._create(previous: keepAlive, instance: object, accessingAddress: offsetAddress, type: NewValue.self) - return offsetAddress + return unsafe offsetAddress case .mutatingGetSet(id: _, accessors: let accessors, argument: let argument): - let baseTyped = UnsafeMutablePointer( + let baseTyped = unsafe UnsafeMutablePointer( mutating: base.assumingMemoryBound(to: CurValue.self)) - let argValue = argument?.data.baseAddress ?? accessors._value - let argSize = argument?.data.count ?? 0 - let writeback = MutatingWritebackBuffer( + let argValue = unsafe argument?.data.baseAddress ?? accessors._value + let argSize = unsafe argument?.data.count ?? 0 + let writeback = unsafe MutatingWritebackBuffer( previous: keepAlive, base: baseTyped, set: accessors.mutatingSetter(), argument: argValue, argumentSize: argSize, value: accessors.getter()(baseTyped.pointee, argValue, argSize)) - keepAlive = writeback + keepAlive = unsafe writeback // A maximally-abstracted, final, stored class property should have // a stable address. - return UnsafeRawPointer(Builtin.addressof(&writeback.value)) + return unsafe UnsafeRawPointer(Builtin.addressof(&writeback.value)) case .nonmutatingGetSet(id: _, accessors: let accessors, argument: let argument): @@ -1937,20 +1944,20 @@ internal struct RawKeyPathComponent { _internalInvariant(isRoot, "nonmutating component should not appear in the middle of mutation") - let baseValue = base.assumingMemoryBound(to: CurValue.self).pointee - let argValue = argument?.data.baseAddress ?? accessors._value - let argSize = argument?.data.count ?? 0 - let writeback = NonmutatingWritebackBuffer( + let baseValue = unsafe base.assumingMemoryBound(to: CurValue.self).pointee + let argValue = unsafe argument?.data.baseAddress ?? accessors._value + let argSize = unsafe argument?.data.count ?? 0 + let writeback = unsafe NonmutatingWritebackBuffer( previous: keepAlive, base: baseValue, set: accessors.nonmutatingSetter(), argument: argValue, argumentSize: argSize, value: accessors.getter()(baseValue, argValue, argSize)) - keepAlive = writeback + keepAlive = unsafe writeback // A maximally-abstracted, final, stored class property should have // a stable address. - return UnsafeRawPointer(Builtin.addressof(&writeback.value)) + return unsafe UnsafeRawPointer(Builtin.addressof(&writeback.value)) case .optionalForce: _internalInvariant(CurValue.self == Optional.self, @@ -1958,10 +1965,10 @@ internal struct RawKeyPathComponent { // Optional's layout happens to always put the payload at the start // address of the Optional value itself, if a value is present at all. let baseOptionalPointer - = base.assumingMemoryBound(to: Optional.self) + = unsafe base.assumingMemoryBound(to: Optional.self) // Assert that a value exists - _ = baseOptionalPointer.pointee! - return base + _ = unsafe baseOptionalPointer.pointee! + return unsafe base case .optionalChain, .optionalWrap, .get: _internalInvariantFailure("not a mutable key path component") @@ -1971,25 +1978,26 @@ internal struct RawKeyPathComponent { internal func _pop(from: inout UnsafeRawBufferPointer, as type: T.Type) -> T { - let buffer = _pop(from: &from, as: type, count: 1) - return buffer.baseAddress._unsafelyUnwrappedUnchecked.pointee + let buffer = unsafe _pop(from: &from, as: type, count: 1) + return unsafe buffer.baseAddress._unsafelyUnwrappedUnchecked.pointee } internal func _pop(from: inout UnsafeRawBufferPointer, as: T.Type, count: Int) -> UnsafeBufferPointer { - from = MemoryLayout._roundingUpBaseToAlignment(from) + unsafe from = unsafe MemoryLayout._roundingUpBaseToAlignment(from) let byteCount = MemoryLayout.stride * count - let result = UnsafeBufferPointer( + let result = unsafe UnsafeBufferPointer( start: from.baseAddress._unsafelyUnwrappedUnchecked.assumingMemoryBound(to: T.self), count: count) - from = UnsafeRawBufferPointer( + unsafe from = unsafe UnsafeRawBufferPointer( start: from.baseAddress._unsafelyUnwrappedUnchecked + byteCount, count: from.count - byteCount) - return result + return unsafe result } @_unavailableInEmbedded +@unsafe internal struct KeyPathBuffer { internal var data: UnsafeRawBufferPointer internal var trivial: Bool @@ -1997,68 +2005,69 @@ internal struct KeyPathBuffer { internal var isSingleComponent: Bool internal init(base: UnsafeRawPointer) { - let header = base.load(as: Header.self) - data = UnsafeRawBufferPointer( + let header = unsafe base.load(as: Header.self) + unsafe data = unsafe UnsafeRawBufferPointer( start: base + MemoryLayout.size, count: header.size) - trivial = header.trivial - hasReferencePrefix = header.hasReferencePrefix - isSingleComponent = header.isSingleComponent + unsafe trivial = unsafe header.trivial + unsafe hasReferencePrefix = unsafe header.hasReferencePrefix + unsafe isSingleComponent = unsafe header.isSingleComponent } internal init(partialData: UnsafeRawBufferPointer, trivial: Bool = false, hasReferencePrefix: Bool = false, isSingleComponent: Bool = false) { - self.data = partialData - self.trivial = trivial - self.hasReferencePrefix = hasReferencePrefix - self.isSingleComponent = isSingleComponent + unsafe self.data = unsafe partialData + unsafe self.trivial = trivial + unsafe self.hasReferencePrefix = hasReferencePrefix + unsafe self.isSingleComponent = isSingleComponent } internal var mutableData: UnsafeMutableRawBufferPointer { - return UnsafeMutableRawBufferPointer(mutating: data) + return unsafe UnsafeMutableRawBufferPointer(mutating: data) } internal var maxSize: Int { - let bufferPtr = data.baseAddress._unsafelyUnwrappedUnchecked - let endOfBuffer = MemoryLayout._roundingUpToAlignment( + let bufferPtr = unsafe data.baseAddress._unsafelyUnwrappedUnchecked + let endOfBuffer = unsafe MemoryLayout._roundingUpToAlignment( bufferPtr + data.count ) - return endOfBuffer.load(as: Int.self) + return unsafe endOfBuffer.load(as: Int.self) } + @unsafe internal struct Builder { internal var buffer: UnsafeMutableRawBufferPointer internal init(_ buffer: UnsafeMutableRawBufferPointer) { - self.buffer = buffer + unsafe self.buffer = unsafe buffer } internal mutating func pushRaw(size: Int, alignment: Int) -> UnsafeMutableRawBufferPointer { - var baseAddress = buffer.baseAddress._unsafelyUnwrappedUnchecked - var misalign = Int(bitPattern: baseAddress) & (alignment - 1) + var baseAddress = unsafe buffer.baseAddress._unsafelyUnwrappedUnchecked + var misalign = unsafe Int(bitPattern: baseAddress) & (alignment - 1) if misalign != 0 { misalign = alignment - misalign - baseAddress = baseAddress.advanced(by: misalign) + unsafe baseAddress = unsafe baseAddress.advanced(by: misalign) } - let result = UnsafeMutableRawBufferPointer( + let result = unsafe UnsafeMutableRawBufferPointer( start: baseAddress, count: size) - buffer = UnsafeMutableRawBufferPointer( + unsafe buffer = unsafe UnsafeMutableRawBufferPointer( start: baseAddress + size, count: buffer.count - size - misalign) - return result + return unsafe result } internal mutating func push(_ value: T) { - let buf = pushRaw(size: MemoryLayout.size, + let buf = unsafe pushRaw(size: MemoryLayout.size, alignment: MemoryLayout.alignment) - buf.storeBytes(of: value, as: T.self) + unsafe buf.storeBytes(of: value, as: T.self) } internal mutating func pushHeader(_ header: Header) { - push(header) + unsafe push(header) // Start the components at pointer alignment - _ = pushRaw(size: RawKeyPathComponent.Header.pointerAlignmentSkew, + _ = unsafe pushRaw(size: RawKeyPathComponent.Header.pointerAlignmentSkew, alignment: 4) } } @@ -2072,8 +2081,8 @@ internal struct KeyPathBuffer { hasReferencePrefix: Bool, isSingleComponent: Bool ) { - _internalInvariant(size <= Int(Header.sizeMask), "key path too big") - _value = UInt32(size) + unsafe _internalInvariant(size <= Int(Header.sizeMask), "key path too big") + unsafe _value = unsafe UInt32(size) | (trivial ? Header.trivialFlag : 0) | (hasReferencePrefix ? Header.hasReferencePrefixFlag : 0) | (isSingleComponent ? Header.isSingleComponentFlag : 0) @@ -2095,30 +2104,30 @@ internal struct KeyPathBuffer { return _SwiftKeyPathBufferHeader_IsSingleComponentFlag } - internal var size: Int { return Int(_value & Header.sizeMask) } - internal var trivial: Bool { return _value & Header.trivialFlag != 0 } + internal var size: Int { return unsafe Int(_value & Header.sizeMask) } + internal var trivial: Bool { return unsafe _value & Header.trivialFlag != 0 } internal var hasReferencePrefix: Bool { get { - return _value & Header.hasReferencePrefixFlag != 0 + return unsafe _value & Header.hasReferencePrefixFlag != 0 } set { if newValue { - _value |= Header.hasReferencePrefixFlag + unsafe _value |= Header.hasReferencePrefixFlag } else { - _value &= ~Header.hasReferencePrefixFlag + unsafe _value &= ~Header.hasReferencePrefixFlag } } } internal var isSingleComponent: Bool { get { - return _value & Header.isSingleComponentFlag != 0 + return unsafe _value & Header.isSingleComponentFlag != 0 } set { if newValue { - _value |= Header.isSingleComponentFlag + unsafe _value |= Header.isSingleComponentFlag } else { - _value &= ~Header.isSingleComponentFlag + unsafe _value &= ~Header.isSingleComponentFlag } } } @@ -2126,51 +2135,51 @@ internal struct KeyPathBuffer { // In a key path pattern, the "trivial" flag is used to indicate // "instantiable in-line" internal var instantiableInLine: Bool { - return trivial + return unsafe trivial } internal func validateReservedBits() { - _precondition(_value & Header.reservedMask == 0, + unsafe _precondition(_value & Header.reservedMask == 0, "Reserved bits set to an unexpected bit pattern") } } internal func destroy() { // Short-circuit if nothing in the object requires destruction. - if trivial { return } + if unsafe trivial { return } - var bufferToDestroy = self + var bufferToDestroy = unsafe self while true { - let (component, type) = bufferToDestroy.next() - component.destroy() + let (component, type) = unsafe bufferToDestroy.next() + unsafe component.destroy() guard let _ = type else { break } } } internal mutating func next() -> (RawKeyPathComponent, Any.Type?) { - let header = _pop(from: &data, as: RawKeyPathComponent.Header.self) + let header = unsafe _pop(from: &data, as: RawKeyPathComponent.Header.self) // Track if this is the last component of the reference prefix. - if header.endOfReferencePrefix { - _internalInvariant(self.hasReferencePrefix, + if unsafe header.endOfReferencePrefix { + unsafe _internalInvariant(self.hasReferencePrefix, "beginMutation marker in non-reference-writable key path?") - self.hasReferencePrefix = false + unsafe self.hasReferencePrefix = false } - var component = RawKeyPathComponent(header: header, body: data) + var component = unsafe RawKeyPathComponent(header: header, body: data) // Shrinkwrap the component buffer size. - let size = component.bodySize - component.body = UnsafeRawBufferPointer(start: component.body.baseAddress, + let size = unsafe component.bodySize + unsafe component.body = unsafe UnsafeRawBufferPointer(start: component.body.baseAddress, count: size) - _ = _pop(from: &data, as: Int8.self, count: size) + _ = unsafe _pop(from: &data, as: Int8.self, count: size) // fetch type, which is in the buffer unless it's the final component let nextType: Any.Type? - if data.isEmpty { + if unsafe data.isEmpty { nextType = nil } else { - nextType = _pop(from: &data, as: Any.Type.self) + nextType = unsafe _pop(from: &data, as: Any.Type.self) } - return (component, nextType) + return unsafe (component, nextType) } } @@ -2184,7 +2193,7 @@ func _getAtPartialKeyPath( keyPath: PartialKeyPath ) -> Any { func open(_: Value.Type) -> Any { - return _getAtKeyPath(root: root, + return unsafe _getAtKeyPath(root: root, keyPath: unsafeDowncast(keyPath, to: KeyPath.self)) } return _openExistential(type(of: keyPath).valueType, do: open) @@ -2203,7 +2212,7 @@ func _getAtAnyKeyPath( return nil } func openValue(_: Value.Type) -> Any { - return _getAtKeyPath(root: rootForKeyPath, + return unsafe _getAtKeyPath(root: rootForKeyPath, keyPath: unsafeDowncast(keyPath, to: KeyPath.self)) } return _openExistential(keyPathValue, do: openValue) @@ -2232,12 +2241,12 @@ func _modifyAtWritableKeyPath_impl( keyPath: WritableKeyPath ) -> (UnsafeMutablePointer, AnyObject?) { if type(of: keyPath).kind == .reference { - return _modifyAtReferenceWritableKeyPath_impl(root: root, + return unsafe _modifyAtReferenceWritableKeyPath_impl(root: root, keyPath: _unsafeUncheckedDowncast(keyPath, to: ReferenceWritableKeyPath.self)) } - return _withUnprotectedUnsafePointer(to: &root) { - keyPath._projectMutableAddress(from: $0) + return unsafe _withUnprotectedUnsafePointer(to: &root) { + unsafe keyPath._projectMutableAddress(from: $0) } } @@ -2251,7 +2260,7 @@ func _modifyAtReferenceWritableKeyPath_impl( root: Root, keyPath: ReferenceWritableKeyPath ) -> (UnsafeMutablePointer, AnyObject?) { - return keyPath._projectMutableAddress(from: root) + return unsafe keyPath._projectMutableAddress(from: root) } @_silgen_name("swift_setAtWritableKeyPath") @@ -2263,16 +2272,16 @@ func _setAtWritableKeyPath( value: __owned Value ) { if type(of: keyPath).kind == .reference { - return _setAtReferenceWritableKeyPath(root: root, + return unsafe _setAtReferenceWritableKeyPath(root: root, keyPath: _unsafeUncheckedDowncast(keyPath, to: ReferenceWritableKeyPath.self), value: value) } // TODO: we should be able to do this more efficiently than projecting. - let (addr, owner) = _withUnprotectedUnsafePointer(to: &root) { - keyPath._projectMutableAddress(from: $0) + let (addr, owner) = unsafe _withUnprotectedUnsafePointer(to: &root) { + unsafe keyPath._projectMutableAddress(from: $0) } - addr.pointee = value + unsafe addr.pointee = value _fixLifetime(owner) // FIXME: this needs a deallocation barrier to ensure that the // release isn't extended, along with the access scope. @@ -2287,8 +2296,8 @@ func _setAtReferenceWritableKeyPath( value: __owned Value ) { // TODO: we should be able to do this more efficiently than projecting. - let (addr, owner) = keyPath._projectMutableAddress(from: root) - addr.pointee = value + let (addr, owner) = unsafe keyPath._projectMutableAddress(from: root) + unsafe addr.pointee = value _fixLifetime(owner) // FIXME: this needs a deallocation barrier to ensure that the // release isn't extended, along with the access scope. @@ -2581,14 +2590,14 @@ internal func _tryToAppendKeyPaths( func open(_: Root.Type) -> Result { func open2(_: Value.Type) -> Result { func open3(_: AppendedValue.Type) -> Result { - let typedRoot = unsafeDowncast(root, to: KeyPath.self) - let typedLeaf = unsafeDowncast(leaf, + let typedRoot = unsafe unsafeDowncast(root, to: KeyPath.self) + let typedLeaf = unsafe unsafeDowncast(leaf, to: KeyPath.self) var result:AnyKeyPath = _appendingKeyPaths(root: typedRoot, leaf: typedLeaf) _processOffsetForAppendedKeyPath(appendedKeyPath: &result, root: root, leaf: leaf) - return unsafeDowncast(result, to: Result.self) + return unsafe unsafeDowncast(result, to: Result.self) } return _openExistential(leafValue, do: open3) } @@ -2608,17 +2617,17 @@ internal func _appendingKeyPaths< ) -> Result { let resultTy = type(of: root).appendedType(with: type(of: leaf)) var returnValue: AnyKeyPath = root.withBuffer { - var rootBuffer = $0 + var rootBuffer = unsafe $0 return leaf.withBuffer { - var leafBuffer = $0 + var leafBuffer = unsafe $0 // If either operand is the identity key path, then we should return // the other operand back untouched. - if leafBuffer.data.isEmpty { - return unsafeDowncast(root, to: Result.self) + if unsafe leafBuffer.data.isEmpty { + return unsafe unsafeDowncast(root, to: Result.self) } - if rootBuffer.data.isEmpty { - return unsafeDowncast(leaf, to: Result.self) + if unsafe rootBuffer.data.isEmpty { + return unsafe unsafeDowncast(leaf, to: Result.self) } // Reserve room for the appended KVC string, if both key paths are @@ -2628,8 +2637,8 @@ internal func _appendingKeyPaths< if root.getOffsetFromStorage() == nil, leaf.getOffsetFromStorage() == nil, let rootPtr = root._kvcKeyPathStringPtr, let leafPtr = leaf._kvcKeyPathStringPtr { - rootKVCLength = Int(_swift_stdlib_strlen(rootPtr)) - leafKVCLength = Int(_swift_stdlib_strlen(leafPtr)) + rootKVCLength = unsafe Int(_swift_stdlib_strlen(rootPtr)) + leafKVCLength = unsafe Int(_swift_stdlib_strlen(leafPtr)) // root + "." + leaf appendedKVCLength = rootKVCLength + 1 + leafKVCLength + 1 } else { @@ -2641,8 +2650,8 @@ internal func _appendingKeyPaths< // Result buffer has room for both key paths' components, plus the // header, plus space for the middle type. // Align up the root so that we can put the component type after it. - let rootSize = MemoryLayout._roundingUpToAlignment(rootBuffer.data.count) - var resultSize = rootSize + // Root component size + let rootSize = unsafe MemoryLayout._roundingUpToAlignment(rootBuffer.data.count) + var resultSize = unsafe rootSize + // Root component size leafBuffer.data.count + // Leaf component size MemoryLayout.size // Middle type @@ -2662,22 +2671,22 @@ internal func _appendingKeyPaths< var kvcStringBuffer: UnsafeMutableRawPointer? = nil let result = resultTy._create(capacityInBytes: totalResultSize) { - var destBuffer = $0 + var destBuffer = unsafe $0 // Remember where the tail-allocated KVC string buffer begins. if appendedKVCLength > 0 { - kvcStringBuffer = destBuffer.baseAddress._unsafelyUnwrappedUnchecked + unsafe kvcStringBuffer = unsafe destBuffer.baseAddress._unsafelyUnwrappedUnchecked .advanced(by: resultSize) - destBuffer = .init(start: destBuffer.baseAddress, + unsafe destBuffer = unsafe .init(start: destBuffer.baseAddress, count: resultSize) } - var destBuilder = KeyPathBuffer.Builder(destBuffer) + var destBuilder = unsafe KeyPathBuffer.Builder(destBuffer) // Save space for the header. let leafIsReferenceWritable = type(of: leaf).kind == .reference - destBuilder.pushHeader(KeyPathBuffer.Header( + unsafe destBuilder.pushHeader(KeyPathBuffer.Header( size: componentSize, trivial: rootBuffer.trivial && leafBuffer.trivial, hasReferencePrefix: rootBuffer.hasReferencePrefix @@ -2688,13 +2697,13 @@ internal func _appendingKeyPaths< isSingleComponent: false )) - let leafHasReferencePrefix = leafBuffer.hasReferencePrefix + let leafHasReferencePrefix = unsafe leafBuffer.hasReferencePrefix - let rootMaxSize = rootBuffer.maxSize + let rootMaxSize = unsafe rootBuffer.maxSize // Clone the root components into the buffer. while true { - let (component, type) = rootBuffer.next() + let (component, type) = unsafe rootBuffer.next() let isLast = type == nil // If the leaf appended path has a reference prefix, then the // entire root is part of the reference prefix. @@ -2704,68 +2713,68 @@ internal func _appendingKeyPaths< } else if isLast && leafIsReferenceWritable { endOfReferencePrefix = true } else { - endOfReferencePrefix = component.header.endOfReferencePrefix + endOfReferencePrefix = unsafe component.header.endOfReferencePrefix } - component.clone( + unsafe component.clone( into: &destBuilder.buffer, endOfReferencePrefix: endOfReferencePrefix) // Insert our endpoint type between the root and leaf components. if let type = type { - destBuilder.push(type) + unsafe destBuilder.push(type) } else { - destBuilder.push(Value.self as Any.Type) + unsafe destBuilder.push(Value.self as Any.Type) break } } - let leafMaxSize = leafBuffer.maxSize + let leafMaxSize = unsafe leafBuffer.maxSize // Clone the leaf components into the buffer. while true { - let (component, type) = leafBuffer.next() + let (component, type) = unsafe leafBuffer.next() - component.clone( + unsafe component.clone( into: &destBuilder.buffer, endOfReferencePrefix: component.header.endOfReferencePrefix) if let type = type { - destBuilder.push(type) + unsafe destBuilder.push(type) } else { break } } // Append our max size at the end of the buffer before the kvc string. - destBuilder.push(Swift.max(rootMaxSize, leafMaxSize)) + unsafe destBuilder.push(Swift.max(rootMaxSize, leafMaxSize)) - _internalInvariant(destBuilder.buffer.isEmpty, + unsafe _internalInvariant(destBuilder.buffer.isEmpty, "did not fill entire result buffer") } // Build the KVC string if there is one. if root.getOffsetFromStorage() == nil, leaf.getOffsetFromStorage() == nil { - if let kvcStringBuffer = kvcStringBuffer { - let rootPtr = root._kvcKeyPathStringPtr._unsafelyUnwrappedUnchecked - let leafPtr = leaf._kvcKeyPathStringPtr._unsafelyUnwrappedUnchecked - _memcpy( + if let kvcStringBuffer = unsafe kvcStringBuffer { + let rootPtr = unsafe root._kvcKeyPathStringPtr._unsafelyUnwrappedUnchecked + let leafPtr = unsafe leaf._kvcKeyPathStringPtr._unsafelyUnwrappedUnchecked + unsafe _memcpy( dest: kvcStringBuffer, src: rootPtr, size: UInt(rootKVCLength)) - kvcStringBuffer.advanced(by: rootKVCLength) + unsafe kvcStringBuffer.advanced(by: rootKVCLength) .storeBytes(of: 0x2E /* '.' */, as: CChar.self) - _memcpy( + unsafe _memcpy( dest: kvcStringBuffer.advanced(by: rootKVCLength + 1), src: leafPtr, size: UInt(leafKVCLength)) result._kvcKeyPathStringPtr = - UnsafePointer(kvcStringBuffer.assumingMemoryBound(to: CChar.self)) - kvcStringBuffer.advanced(by: rootKVCLength + leafKVCLength + 1) + unsafe UnsafePointer(kvcStringBuffer.assumingMemoryBound(to: CChar.self)) + unsafe kvcStringBuffer.advanced(by: rootKVCLength + leafKVCLength + 1) .storeBytes(of: 0 /* '\0' */, as: CChar.self) } } - return unsafeDowncast(result, to: Result.self) + return unsafe unsafeDowncast(result, to: Result.self) } } _processOffsetForAppendedKeyPath( @@ -2781,7 +2790,7 @@ internal func _appendingKeyPaths< // pointer to the KVC string. internal var keyPathObjectHeaderSize: Int { - return MemoryLayout.size + MemoryLayout.size + return unsafe MemoryLayout.size + MemoryLayout.size } internal var keyPathPatternHeaderSize: Int { @@ -2818,46 +2827,46 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, // These are resolved dynamically, so that they always reflect the dynamic // capability of the properties involved. - let oncePtrPtr = pattern - let patternPtr = pattern.advanced(by: 4) + let oncePtrPtr = unsafe pattern + let patternPtr = unsafe pattern.advanced(by: 4) - let bufferHeader = patternPtr.load(fromByteOffset: keyPathPatternHeaderSize, + let bufferHeader = unsafe patternPtr.load(fromByteOffset: keyPathPatternHeaderSize, as: KeyPathBuffer.Header.self) - bufferHeader.validateReservedBits() + unsafe bufferHeader.validateReservedBits() // If the first word is nonzero, it relative-references a cache variable // we can use to reference a single shared instantiation of this key path. - let oncePtrOffset = oncePtrPtr.load(as: Int32.self) + let oncePtrOffset = unsafe oncePtrPtr.load(as: Int32.self) let oncePtr: UnsafeRawPointer? if oncePtrOffset != 0 { - let theOncePtr = _resolveRelativeAddress(oncePtrPtr, oncePtrOffset) - oncePtr = theOncePtr + let theOncePtr = unsafe _resolveRelativeAddress(oncePtrPtr, oncePtrOffset) + unsafe oncePtr = unsafe theOncePtr // See whether we already instantiated this key path. // This is a non-atomic load because the instantiated pointer will be // written with a release barrier, and loads of the instantiated key path // ought to carry a dependency through this loaded pointer. - let existingInstance = UnsafeRawPointer( + let existingInstance = unsafe UnsafeRawPointer( bitPattern: UInt(Builtin.atomicload_acquire_Word(theOncePtr._rawValue)) ) - if let existingInstance = existingInstance { + if let existingInstance = unsafe existingInstance { // Return the instantiated object at +1. - let object = Unmanaged.fromOpaque(existingInstance) + let object = unsafe Unmanaged.fromOpaque(existingInstance) // TODO: This retain will be unnecessary once we support global objects // with inert refcounting. - _ = object.retain() - return existingInstance + _ = unsafe object.retain() + return unsafe existingInstance } } else { - oncePtr = nil + unsafe oncePtr = nil } // Instantiate a new key path object modeled on the pattern. // Do a pass to determine the class of the key path we'll be instantiating // and how much space we'll need for it. let (keyPathClass, rootType, size, sizeWithMaxSize, _) - = _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) + = unsafe _getKeyPathClassAndInstanceSizeFromPattern(patternPtr, arguments) var pureStructOffset: UInt32? = nil @@ -2866,7 +2875,7 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, capacityInBytes: sizeWithMaxSize ) { instanceData in // Instantiate the pattern into the instance. - pureStructOffset = _instantiateKeyPathBuffer( + pureStructOffset = unsafe _instantiateKeyPathBuffer( patternPtr, instanceData, rootType, @@ -2876,28 +2885,28 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, } // Adopt the KVC string from the pattern. - let kvcStringBase = patternPtr.advanced(by: 12) - let kvcStringOffset = kvcStringBase.load(as: Int32.self) + let kvcStringBase = unsafe patternPtr.advanced(by: 12) + let kvcStringOffset = unsafe kvcStringBase.load(as: Int32.self) if kvcStringOffset == 0 { // Null pointer. instance._kvcKeyPathStringPtr = nil } else { - let kvcStringPtr = _resolveRelativeAddress(kvcStringBase, kvcStringOffset) + let kvcStringPtr = unsafe _resolveRelativeAddress(kvcStringBase, kvcStringOffset) instance._kvcKeyPathStringPtr = - kvcStringPtr.assumingMemoryBound(to: CChar.self) + unsafe kvcStringPtr.assumingMemoryBound(to: CChar.self) } - if instance._kvcKeyPathStringPtr == nil, let offset = pureStructOffset { + if unsafe instance._kvcKeyPathStringPtr == nil, let offset = pureStructOffset { instance.assignOffsetToStorage(offset: Int(offset)) } // If we can cache this instance as a shared instance, do so. - if let oncePtr = oncePtr { + if let oncePtr = unsafe oncePtr { // Try to replace a null pointer in the cache variable with the instance // pointer. - let instancePtr = Unmanaged.passRetained(instance) + let instancePtr = unsafe Unmanaged.passRetained(instance) while true { - let (oldValue, won) = Builtin.cmpxchg_release_monotonic_Word( + let (oldValue, won) = unsafe Builtin.cmpxchg_release_monotonic_Word( oncePtr._rawValue, 0._builtinWordValue, UInt(bitPattern: instancePtr.toOpaque())._builtinWordValue) @@ -2911,15 +2920,15 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, // Otherwise, someone raced with us to instantiate the key path pattern // and won. Their instance should be just as good as ours, so we can take // that one and let ours get deallocated. - if let existingInstance = UnsafeRawPointer(bitPattern: Int(oldValue)) { + if let existingInstance = unsafe UnsafeRawPointer(bitPattern: Int(oldValue)) { // Return the instantiated object at +1. - let object = Unmanaged.fromOpaque(existingInstance) + let object = unsafe Unmanaged.fromOpaque(existingInstance) // TODO: This retain will be unnecessary once we support global objects // with inert refcounting. - _ = object.retain() + _ = unsafe object.retain() // Release the instance we created. - instancePtr.release() - return existingInstance + unsafe instancePtr.release() + return unsafe existingInstance } else { // Try the cmpxchg again if it spuriously failed. continue @@ -2927,7 +2936,7 @@ public func _swift_getKeyPath(pattern: UnsafeMutableRawPointer, } } - return UnsafeRawPointer(Unmanaged.passRetained(instance).toOpaque()) + return unsafe UnsafeRawPointer(Unmanaged.passRetained(instance).toOpaque()) } // A reference to metadata, which is a pointer to a mangled name. @@ -2935,20 +2944,20 @@ internal typealias MetadataReference = UnsafeRawPointer // Determine the length of the given mangled name. internal func _getSymbolicMangledNameLength(_ base: UnsafeRawPointer) -> Int { - var end = base - while let current = Optional(end.load(as: UInt8.self)), current != 0 { + var end = unsafe base + while let current = unsafe Optional(end.load(as: UInt8.self)), current != 0 { // Skip the current character - end = end + 1 + unsafe end = unsafe end + 1 // Skip over a symbolic reference if current >= 0x1 && current <= 0x17 { - end += 4 + unsafe end += 4 } else if current >= 0x18 && current <= 0x1F { - end += MemoryLayout.size + unsafe end += MemoryLayout.size } } - return end - base + return unsafe end - base } // Resolve a mangled name in a generic environment, described by either a @@ -2960,14 +2969,14 @@ internal func _getTypeByMangledNameInEnvironmentOrContext( genericEnvironmentOrContext: UnsafeRawPointer?, genericArguments: UnsafeRawPointer?) -> Any.Type? { - let taggedPointer = UInt(bitPattern: genericEnvironmentOrContext) + let taggedPointer = unsafe UInt(bitPattern: genericEnvironmentOrContext) if taggedPointer & 1 == 0 { - return _getTypeByMangledNameInEnvironment(name, nameLength, + return unsafe _getTypeByMangledNameInEnvironment(name, nameLength, genericEnvironment: genericEnvironmentOrContext, genericArguments: genericArguments) } else { - let context = UnsafeRawPointer(bitPattern: taggedPointer & ~1) - return _getTypeByMangledNameInContext(name, nameLength, + let context = unsafe UnsafeRawPointer(bitPattern: taggedPointer & ~1) + return unsafe _getTypeByMangledNameInContext(name, nameLength, genericContext: context, genericArguments: genericArguments) } @@ -2981,50 +2990,50 @@ internal func _resolveKeyPathGenericArgReference( arguments: UnsafeRawPointer?) -> UnsafeRawPointer { // If the low bit is clear, it's a direct reference to the argument. - if (UInt(bitPattern: reference) & 0x01 == 0) { - return reference + if unsafe (UInt(bitPattern: reference) & 0x01 == 0) { + return unsafe reference } // Adjust the reference. - let referenceStart = reference - 1 + let referenceStart = unsafe reference - 1 // If we have a symbolic reference to an accessor, call it. - let first = referenceStart.load(as: UInt8.self) - if first == 255 && reference.load(as: UInt8.self) == 9 { + let first = unsafe referenceStart.load(as: UInt8.self) + if unsafe first == 255 && reference.load(as: UInt8.self) == 9 { typealias MetadataAccessor = @convention(c) (UnsafeRawPointer?) -> UnsafeRawPointer // Unaligned load of the offset. - let pointerReference = reference + 1 + let pointerReference = unsafe reference + 1 var offset: Int32 = 0 - _memcpy(dest: &offset, src: pointerReference, size: 4) + unsafe _memcpy(dest: &offset, src: pointerReference, size: 4) - let accessorPtrRaw = _resolveCompactFunctionPointer(pointerReference, offset) + let accessorPtrRaw = unsafe _resolveCompactFunctionPointer(pointerReference, offset) let accessorPtrSigned = - _PtrAuth.sign(pointer: accessorPtrRaw, + unsafe _PtrAuth.sign(pointer: accessorPtrRaw, key: .processIndependentCode, discriminator: _PtrAuth.discriminator(for: MetadataAccessor.self)) - let accessor = unsafeBitCast(accessorPtrSigned, to: MetadataAccessor.self) - return accessor(arguments) + let accessor = unsafe unsafeBitCast(accessorPtrSigned, to: MetadataAccessor.self) + return unsafe accessor(arguments) } - let nameLength = _getSymbolicMangledNameLength(referenceStart) - let namePtr = referenceStart.bindMemory(to: UInt8.self, + let nameLength = unsafe _getSymbolicMangledNameLength(referenceStart) + let namePtr = unsafe referenceStart.bindMemory(to: UInt8.self, capacity: nameLength + 1) // FIXME: Could extract this information from the mangled name. guard let result = - _getTypeByMangledNameInEnvironmentOrContext(namePtr, UInt(nameLength), + unsafe _getTypeByMangledNameInEnvironmentOrContext(namePtr, UInt(nameLength), genericEnvironmentOrContext: genericEnvironment, genericArguments: arguments) else { - let nameStr = String._fromUTF8Repairing( + let nameStr = unsafe String._fromUTF8Repairing( UnsafeBufferPointer(start: namePtr, count: nameLength) ).0 fatalError("could not demangle keypath type from '\(nameStr)'") } - return unsafeBitCast(result, to: UnsafeRawPointer.self) + return unsafe unsafeBitCast(result, to: UnsafeRawPointer.self) } // Resolve the given metadata reference to (type) metadata. @@ -3034,7 +3043,7 @@ internal func _resolveKeyPathMetadataReference( genericEnvironment: UnsafeRawPointer?, arguments: UnsafeRawPointer?) -> Any.Type { - return unsafeBitCast( + return unsafe unsafeBitCast( _resolveKeyPathGenericArgReference( reference, genericEnvironment: genericEnvironment, @@ -3045,6 +3054,7 @@ internal func _resolveKeyPathMetadataReference( internal enum KeyPathStructOrClass { case `struct`, `class` } +@unsafe internal enum KeyPathPatternStoredOffset { case inline(UInt32) case outOfLine(UInt32) @@ -3052,6 +3062,7 @@ internal enum KeyPathPatternStoredOffset { case unresolvedIndirectOffset(UnsafePointer) } @_unavailableInEmbedded +@unsafe internal struct KeyPathPatternComputedArguments { var getLayout: KeyPathComputedArgumentLayoutFn var witnesses: ComputedArgumentWitnessesPtr @@ -3088,7 +3099,7 @@ internal protocol KeyPathPatternVisitor { internal func _resolveRelativeAddress(_ base: UnsafeRawPointer, _ offset: Int32) -> UnsafeRawPointer { // Sign-extend the offset to pointer width and add with wrap on overflow. - return UnsafeRawPointer(bitPattern: Int(bitPattern: base) &+ Int(offset)) + return unsafe UnsafeRawPointer(bitPattern: Int(bitPattern: base) &+ Int(offset)) ._unsafelyUnwrappedUnchecked } internal func _resolveRelativeIndirectableAddress(_ base: UnsafeRawPointer, @@ -3096,10 +3107,10 @@ internal func _resolveRelativeIndirectableAddress(_ base: UnsafeRawPointer, -> UnsafeRawPointer { // Low bit indicates whether the reference is indirected or not. if offset & 1 != 0 { - let ptrToPtr = _resolveRelativeAddress(base, offset - 1) - return ptrToPtr.load(as: UnsafeRawPointer.self) + let ptrToPtr = unsafe _resolveRelativeAddress(base, offset - 1) + return unsafe ptrToPtr.load(as: UnsafeRawPointer.self) } - return _resolveRelativeAddress(base, offset) + return unsafe _resolveRelativeAddress(base, offset) } internal func _resolveCompactFunctionPointer(_ base: UnsafeRawPointer, _ offset: Int32) @@ -3107,15 +3118,15 @@ internal func _resolveCompactFunctionPointer(_ base: UnsafeRawPointer, _ offset: #if SWIFT_COMPACT_ABSOLUTE_FUNCTION_POINTER return UnsafeRawPointer(bitPattern: Int(offset))._unsafelyUnwrappedUnchecked #else - return _resolveRelativeAddress(base, offset) + return unsafe _resolveRelativeAddress(base, offset) #endif } internal func _loadRelativeAddress(at: UnsafeRawPointer, fromByteOffset: Int = 0, as: T.Type) -> T { - let offset = at.load(fromByteOffset: fromByteOffset, as: Int32.self) - return unsafeBitCast(_resolveRelativeAddress(at + fromByteOffset, offset), + let offset = unsafe at.load(fromByteOffset: fromByteOffset, as: Int32.self) + return unsafe unsafeBitCast(_resolveRelativeAddress(at + fromByteOffset, offset), to: T.self) } @@ -3124,16 +3135,16 @@ internal func _walkKeyPathPattern( _ pattern: UnsafeRawPointer, walker: inout W) { // Visit the header. - let genericEnvironment = _loadRelativeAddress(at: pattern, + let genericEnvironment = unsafe _loadRelativeAddress(at: pattern, as: UnsafeRawPointer.self) - let rootMetadataRef = _loadRelativeAddress(at: pattern, fromByteOffset: 4, + let rootMetadataRef = unsafe _loadRelativeAddress(at: pattern, fromByteOffset: 4, as: MetadataReference.self) - let leafMetadataRef = _loadRelativeAddress(at: pattern, fromByteOffset: 8, + let leafMetadataRef = unsafe _loadRelativeAddress(at: pattern, fromByteOffset: 8, as: MetadataReference.self) - let kvcString = _loadRelativeAddress(at: pattern, fromByteOffset: 12, + let kvcString = unsafe _loadRelativeAddress(at: pattern, fromByteOffset: 12, as: UnsafeRawPointer.self) - walker.visitHeader(genericEnvironment: genericEnvironment, + unsafe walker.visitHeader(genericEnvironment: genericEnvironment, rootMetadataRef: rootMetadataRef, leafMetadataRef: leafMetadataRef, kvcCompatibilityString: kvcString) @@ -3144,26 +3155,26 @@ internal func _walkKeyPathPattern( // header word, or else be stored out-of-line, or need instantiation of some // kind. let offset: KeyPathPatternStoredOffset - switch header.storedOffsetPayload { - case RawKeyPathComponent.Header.outOfLineOffsetPayload: - offset = .outOfLine(_pop(from: &componentBuffer, + switch unsafe header.storedOffsetPayload { + case unsafe RawKeyPathComponent.Header.outOfLineOffsetPayload: + unsafe offset = unsafe .outOfLine(_pop(from: &componentBuffer, as: UInt32.self)) - case RawKeyPathComponent.Header.unresolvedFieldOffsetPayload: - offset = .unresolvedFieldOffset(_pop(from: &componentBuffer, + case unsafe RawKeyPathComponent.Header.unresolvedFieldOffsetPayload: + unsafe offset = unsafe .unresolvedFieldOffset(_pop(from: &componentBuffer, as: UInt32.self)) - case RawKeyPathComponent.Header.unresolvedIndirectOffsetPayload: - let base = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked - let relativeOffset = _pop(from: &componentBuffer, + case unsafe RawKeyPathComponent.Header.unresolvedIndirectOffsetPayload: + let base = unsafe componentBuffer.baseAddress._unsafelyUnwrappedUnchecked + let relativeOffset = unsafe _pop(from: &componentBuffer, as: Int32.self) - let ptr = _resolveRelativeIndirectableAddress(base, relativeOffset) - offset = .unresolvedIndirectOffset( + let ptr = unsafe _resolveRelativeIndirectableAddress(base, relativeOffset) + unsafe offset = unsafe .unresolvedIndirectOffset( ptr.assumingMemoryBound(to: UInt.self)) default: - offset = .inline(header.storedOffsetPayload) + unsafe offset = unsafe .inline(header.storedOffsetPayload) } - let kind: KeyPathStructOrClass = header.kind == .struct + let kind: KeyPathStructOrClass = unsafe header.kind == .struct ? .struct : .class - walker.visitStoredComponent(kind: kind, + unsafe walker.visitStoredComponent(kind: kind, mutable: header.isStoredMutable, offset: offset) } @@ -3174,57 +3185,57 @@ internal func _walkKeyPathPattern( idValue: Int32, getter: UnsafeRawPointer, setter: UnsafeRawPointer?) { - let idValueBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked - let idValue = _pop(from: &componentBuffer, as: Int32.self) - let getterBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked - let getterRef = _pop(from: &componentBuffer, as: Int32.self) - let getter = _resolveCompactFunctionPointer(getterBase, getterRef) + let idValueBase = unsafe componentBuffer.baseAddress._unsafelyUnwrappedUnchecked + let idValue = unsafe _pop(from: &componentBuffer, as: Int32.self) + let getterBase = unsafe componentBuffer.baseAddress._unsafelyUnwrappedUnchecked + let getterRef = unsafe _pop(from: &componentBuffer, as: Int32.self) + let getter = unsafe _resolveCompactFunctionPointer(getterBase, getterRef) let setter: UnsafeRawPointer? - if header.isComputedSettable { - let setterBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked - let setterRef = _pop(from: &componentBuffer, as: Int32.self) - setter = _resolveCompactFunctionPointer(setterBase, setterRef) + if unsafe header.isComputedSettable { + let setterBase = unsafe componentBuffer.baseAddress._unsafelyUnwrappedUnchecked + let setterRef = unsafe _pop(from: &componentBuffer, as: Int32.self) + unsafe setter = unsafe _resolveCompactFunctionPointer(setterBase, setterRef) } else { - setter = nil + unsafe setter = nil } - return (idValueBase: idValueBase, idValue: idValue, + return unsafe (idValueBase: idValueBase, idValue: idValue, getter: getter, setter: setter) } func popComputedArguments(header: RawKeyPathComponent.Header, componentBuffer: inout UnsafeRawBufferPointer) -> KeyPathPatternComputedArguments? { - if header.hasComputedArguments { - let getLayoutBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked - let getLayoutRef = _pop(from: &componentBuffer, as: Int32.self) - let getLayoutRaw = _resolveCompactFunctionPointer(getLayoutBase, getLayoutRef) - let getLayoutSigned = _PtrAuth.sign(pointer: getLayoutRaw, + if unsafe header.hasComputedArguments { + let getLayoutBase = unsafe componentBuffer.baseAddress._unsafelyUnwrappedUnchecked + let getLayoutRef = unsafe _pop(from: &componentBuffer, as: Int32.self) + let getLayoutRaw = unsafe _resolveCompactFunctionPointer(getLayoutBase, getLayoutRef) + let getLayoutSigned = unsafe _PtrAuth.sign(pointer: getLayoutRaw, key: .processIndependentCode, discriminator: _PtrAuth.discriminator(for: KeyPathComputedArgumentLayoutFn.self)) - let getLayout = unsafeBitCast(getLayoutSigned, + let getLayout = unsafe unsafeBitCast(getLayoutSigned, to: KeyPathComputedArgumentLayoutFn.self) - let witnessesBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked - let witnessesRef = _pop(from: &componentBuffer, as: Int32.self) + let witnessesBase = unsafe componentBuffer.baseAddress._unsafelyUnwrappedUnchecked + let witnessesRef = unsafe _pop(from: &componentBuffer, as: Int32.self) let witnesses: UnsafeRawPointer if witnessesRef == 0 { - witnesses = __swift_keyPathGenericWitnessTable_addr() + unsafe witnesses = unsafe __swift_keyPathGenericWitnessTable_addr() } else { - witnesses = _resolveRelativeAddress(witnessesBase, witnessesRef) + unsafe witnesses = unsafe _resolveRelativeAddress(witnessesBase, witnessesRef) } - let initializerBase = componentBuffer.baseAddress._unsafelyUnwrappedUnchecked - let initializerRef = _pop(from: &componentBuffer, as: Int32.self) - let initializerRaw = _resolveCompactFunctionPointer(initializerBase, + let initializerBase = unsafe componentBuffer.baseAddress._unsafelyUnwrappedUnchecked + let initializerRef = unsafe _pop(from: &componentBuffer, as: Int32.self) + let initializerRaw = unsafe _resolveCompactFunctionPointer(initializerBase, initializerRef) - let initializerSigned = _PtrAuth.sign(pointer: initializerRaw, + let initializerSigned = unsafe _PtrAuth.sign(pointer: initializerRaw, key: .processIndependentCode, discriminator: _PtrAuth.discriminator(for: KeyPathComputedArgumentInitializerFn.self)) - let initializer = unsafeBitCast(initializerSigned, + let initializer = unsafe unsafeBitCast(initializerSigned, to: KeyPathComputedArgumentInitializerFn.self) - return KeyPathPatternComputedArguments(getLayout: getLayout, + return unsafe KeyPathPatternComputedArguments(getLayout: getLayout, witnesses: ComputedArgumentWitnessesPtr(witnesses), initializer: initializer) } else { @@ -3234,13 +3245,13 @@ internal func _walkKeyPathPattern( // We declare this down here to avoid the temptation to use it within // the functions above. - let bufferPtr = pattern.advanced(by: keyPathPatternHeaderSize) - let bufferHeader = bufferPtr.load(as: KeyPathBuffer.Header.self) - var buffer = UnsafeRawBufferPointer(start: bufferPtr + 4, + let bufferPtr = unsafe pattern.advanced(by: keyPathPatternHeaderSize) + let bufferHeader = unsafe bufferPtr.load(as: KeyPathBuffer.Header.self) + var buffer = unsafe UnsafeRawBufferPointer(start: bufferPtr + 4, count: bufferHeader.size) - while !buffer.isEmpty { - let header = _pop(from: &buffer, + while unsafe !buffer.isEmpty { + let header = unsafe _pop(from: &buffer, as: RawKeyPathComponent.Header.self) // Ensure that we pop an amount of data consistent with what @@ -3250,23 +3261,23 @@ internal func _walkKeyPathPattern( _internalInvariant({ bufferSizeBefore = buffer.count - expectedPop = header.patternComponentBodySize + expectedPop = unsafe header.patternComponentBodySize return true }()) - switch header.kind { + switch unsafe header.kind { case .class, .struct: - visitStored(header: header, componentBuffer: &buffer) + unsafe visitStored(header: header, componentBuffer: &buffer) case .computed: let (idValueBase, idValue, getter, setter) - = popComputedAccessors(header: header, + = unsafe popComputedAccessors(header: header, componentBuffer: &buffer) // If there are arguments, gather those too. - let arguments = popComputedArguments(header: header, + let arguments = unsafe popComputedArguments(header: header, componentBuffer: &buffer) - walker.visitComputedComponent(mutating: header.isComputedMutating, + unsafe walker.visitComputedComponent(mutating: header.isComputedMutating, idKind: header.computedIDKind, idResolution: header.computedIDResolution, idValueBase: idValueBase, @@ -3285,85 +3296,85 @@ internal func _walkKeyPathPattern( case .external: // Look at the external property descriptor to see if we should take it // over the component given in the pattern. - let genericParamCount = Int(header.payload) - let descriptorBase = buffer.baseAddress._unsafelyUnwrappedUnchecked - let descriptorOffset = _pop(from: &buffer, + let genericParamCount = unsafe Int(header.payload) + let descriptorBase = unsafe buffer.baseAddress._unsafelyUnwrappedUnchecked + let descriptorOffset = unsafe _pop(from: &buffer, as: Int32.self) let descriptor = - _resolveRelativeIndirectableAddress(descriptorBase, descriptorOffset) + unsafe _resolveRelativeIndirectableAddress(descriptorBase, descriptorOffset) let descriptorHeader: RawKeyPathComponent.Header - if descriptor != UnsafeRawPointer(bitPattern: 0) { - descriptorHeader = descriptor.load(as: RawKeyPathComponent.Header.self) - if descriptorHeader.isTrivialPropertyDescriptor { + if unsafe descriptor != UnsafeRawPointer(bitPattern: 0) { + unsafe descriptorHeader = unsafe descriptor.load(as: RawKeyPathComponent.Header.self) + if unsafe descriptorHeader.isTrivialPropertyDescriptor { // If the descriptor is trivial, then use the local candidate. // Skip the external generic parameter accessors to get to it. - _ = _pop(from: &buffer, as: Int32.self, count: genericParamCount) + _ = unsafe _pop(from: &buffer, as: Int32.self, count: genericParamCount) continue } } else { // If the external property descriptor is nil, skip it to access // the local candidate header. - _ = _pop(from: &buffer, as: Int32.self, count: genericParamCount) + _ = unsafe _pop(from: &buffer, as: Int32.self, count: genericParamCount) continue } // Grab the generic parameter accessors to pass to the external component. - let externalArgs = _pop(from: &buffer, as: Int32.self, + let externalArgs = unsafe _pop(from: &buffer, as: Int32.self, count: genericParamCount) // Grab the header for the local candidate in case we need it for // a computed property. - let localCandidateHeader = _pop(from: &buffer, + let localCandidateHeader = unsafe _pop(from: &buffer, as: RawKeyPathComponent.Header.self) - let localCandidateSize = localCandidateHeader.patternComponentBodySize + let localCandidateSize = unsafe localCandidateHeader.patternComponentBodySize _internalInvariant({ expectedPop += localCandidateSize + 4 return true }()) - let descriptorSize = descriptorHeader.propertyDescriptorBodySize - var descriptorBuffer = UnsafeRawBufferPointer(start: descriptor + 4, + let descriptorSize = unsafe descriptorHeader.propertyDescriptorBodySize + var descriptorBuffer = unsafe UnsafeRawBufferPointer(start: descriptor + 4, count: descriptorSize) // Look at what kind of component the external property has. - switch descriptorHeader.kind { + switch unsafe descriptorHeader.kind { case .struct, .class: // A stored component. We can instantiate it // without help from the local candidate. - _ = _pop(from: &buffer, as: UInt8.self, count: localCandidateSize) + _ = unsafe _pop(from: &buffer, as: UInt8.self, count: localCandidateSize) - visitStored(header: descriptorHeader, + unsafe visitStored(header: descriptorHeader, componentBuffer: &descriptorBuffer) case .computed: // A computed component. The accessors come from the descriptor. let (idValueBase, idValue, getter, setter) - = popComputedAccessors(header: descriptorHeader, + = unsafe popComputedAccessors(header: descriptorHeader, componentBuffer: &descriptorBuffer) // Get the arguments from the external descriptor and/or local candidate // component. let arguments: KeyPathPatternComputedArguments? - if localCandidateHeader.kind == .computed + if unsafe localCandidateHeader.kind == .computed && localCandidateHeader.hasComputedArguments { // If both have arguments, then we have to build a bit of a chimera. // The canonical identity and accessors come from the descriptor, // but the argument equality/hash handling is still as described // in the local candidate. // We don't need the local candidate's accessors. - _ = popComputedAccessors(header: localCandidateHeader, + _ = unsafe popComputedAccessors(header: localCandidateHeader, componentBuffer: &buffer) // We do need the local arguments. - arguments = popComputedArguments(header: localCandidateHeader, + unsafe arguments = unsafe popComputedArguments(header: localCandidateHeader, componentBuffer: &buffer) } else { // If the local candidate doesn't have arguments, we don't need // anything from it at all. - _ = _pop(from: &buffer, as: UInt8.self, count: localCandidateSize) - arguments = nil + _ = unsafe _pop(from: &buffer, as: UInt8.self, count: localCandidateSize) + unsafe arguments = nil } - walker.visitComputedComponent( + unsafe walker.visitComputedComponent( mutating: descriptorHeader.isComputedMutating, idKind: descriptorHeader.computedIDKind, idResolution: descriptorHeader.computedIDResolution, @@ -3392,24 +3403,25 @@ internal func _walkKeyPathPattern( """) // Break if this is the last component. - if buffer.isEmpty { break } + if unsafe buffer.isEmpty { break } // Otherwise, pop the intermediate component type accessor and // go around again. - let componentTypeBase = buffer.baseAddress._unsafelyUnwrappedUnchecked - let componentTypeOffset = _pop(from: &buffer, as: Int32.self) - let componentTypeRef = _resolveRelativeAddress(componentTypeBase, + let componentTypeBase = unsafe buffer.baseAddress._unsafelyUnwrappedUnchecked + let componentTypeOffset = unsafe _pop(from: &buffer, as: Int32.self) + let componentTypeRef = unsafe _resolveRelativeAddress(componentTypeBase, componentTypeOffset) - walker.visitIntermediateComponentType(metadataRef: componentTypeRef) - _internalInvariant(!buffer.isEmpty) + unsafe walker.visitIntermediateComponentType(metadataRef: componentTypeRef) + unsafe _internalInvariant(!buffer.isEmpty) } // We should have walked the entire pattern. - _internalInvariant(buffer.isEmpty, "did not walk entire pattern buffer") + unsafe _internalInvariant(buffer.isEmpty, "did not walk entire pattern buffer") walker.finish() } @_unavailableInEmbedded +@unsafe internal struct GetKeyPathClassAndInstanceSizeFromPattern : KeyPathPatternVisitor { // start with one word for the header @@ -3426,25 +3438,25 @@ internal struct GetKeyPathClassAndInstanceSizeFromPattern var isPureStruct: [Bool] = [] init(patternArgs: UnsafeRawPointer?) { - self.patternArgs = patternArgs + unsafe self.patternArgs = unsafe patternArgs } mutating func roundUpToPointerAlignment() { - size = MemoryLayout._roundingUpToAlignment(size) + unsafe size = unsafe MemoryLayout._roundingUpToAlignment(size) } mutating func visitHeader(genericEnvironment: UnsafeRawPointer?, rootMetadataRef: MetadataReference, leafMetadataRef: MetadataReference, kvcCompatibilityString: UnsafeRawPointer?) { - self.genericEnvironment = genericEnvironment + unsafe self.genericEnvironment = unsafe genericEnvironment // Get the root and leaf type metadata so we can form the class type // for the entire key path. - root = _resolveKeyPathMetadataReference( + unsafe root = unsafe _resolveKeyPathMetadataReference( rootMetadataRef, genericEnvironment: genericEnvironment, arguments: patternArgs) - leaf = _resolveKeyPathMetadataReference( + unsafe leaf = unsafe _resolveKeyPathMetadataReference( leafMetadataRef, genericEnvironment: genericEnvironment, arguments: patternArgs) @@ -3458,23 +3470,23 @@ internal struct GetKeyPathClassAndInstanceSizeFromPattern if mutable { switch kind { case .class: - capability = .reference + unsafe capability = .reference case .struct: break } } else { // Immutable properties can only be read. - capability = .readOnly + unsafe capability = .readOnly } // The size of the instantiated component depends on whether we can fit // the offset inline. - switch offset { + switch unsafe offset { case .inline: - size += 4 + unsafe size += 4 case .outOfLine, .unresolvedFieldOffset, .unresolvedIndirectOffset: - size += 8 + unsafe size += 8 } } @@ -3487,15 +3499,15 @@ internal struct GetKeyPathClassAndInstanceSizeFromPattern setter: UnsafeRawPointer?, arguments: KeyPathPatternComputedArguments?, externalArgs: UnsafeBufferPointer?) { - let settable = setter != nil + let settable = unsafe setter != nil switch (settable, mutating) { case (false, false): // If the property is get-only, the capability becomes read-only, unless // we get another reference-writable component. - capability = .readOnly + unsafe capability = .readOnly case (true, false): - capability = .reference + unsafe capability = .reference case (true, true): // Writable if the base is. No effect. break @@ -3504,88 +3516,88 @@ internal struct GetKeyPathClassAndInstanceSizeFromPattern } // Save space for the header... - size += 4 - roundUpToPointerAlignment() + unsafe size += 4 + unsafe roundUpToPointerAlignment() // ...id, getter, and maybe setter... - size += MemoryLayout.size * 2 + unsafe size += MemoryLayout.size * 2 if settable { - size += MemoryLayout.size + unsafe size += MemoryLayout.size } // ...and the arguments, if any. let argumentHeaderSize = MemoryLayout.size * 2 - switch (arguments, externalArgs) { + switch unsafe (arguments, externalArgs) { case (nil, nil): break case (let arguments?, nil): - size += argumentHeaderSize + unsafe size += argumentHeaderSize // If we have arguments, calculate how much space they need by invoking // the layout function. - let (addedSize, addedAlignmentMask) = arguments.getLayout(patternArgs) + let (addedSize, addedAlignmentMask) = unsafe arguments.getLayout(patternArgs) // TODO: Handle over-aligned values _internalInvariant(addedAlignmentMask < MemoryLayout.alignment, "overaligned computed property element not supported") - size += addedSize + unsafe size += addedSize case (let arguments?, let externalArgs?): // If we're referencing an external declaration, and it takes captured // arguments, then we have to build a bit of a chimera. The canonical // identity and accessors come from the descriptor, but the argument // handling is still as described in the local candidate. - size += argumentHeaderSize - let (addedSize, addedAlignmentMask) = arguments.getLayout(patternArgs) + unsafe size += argumentHeaderSize + let (addedSize, addedAlignmentMask) = unsafe arguments.getLayout(patternArgs) // TODO: Handle over-aligned values _internalInvariant(addedAlignmentMask < MemoryLayout.alignment, "overaligned computed property element not supported") - size += addedSize + unsafe size += addedSize // We also need to store the size of the local arguments so we can // find the external component arguments. - roundUpToPointerAlignment() - size += RawKeyPathComponent.Header.externalWithArgumentsExtraSize - size += MemoryLayout.size * externalArgs.count + unsafe roundUpToPointerAlignment() + unsafe size += RawKeyPathComponent.Header.externalWithArgumentsExtraSize + unsafe size += MemoryLayout.size * externalArgs.count case (nil, let externalArgs?): // If we're instantiating an external property with a local // candidate that has no arguments, then things are a little // easier. We only need to instantiate the generic // arguments for the external component's accessors. - size += argumentHeaderSize - size += MemoryLayout.size * externalArgs.count + unsafe size += argumentHeaderSize + unsafe size += MemoryLayout.size * externalArgs.count } } mutating func visitOptionalChainComponent() { // Optional chaining forces the entire keypath to be read-only, even if // there are further reference-writable components. - didChain = true - capability = .readOnly - size += 4 + unsafe didChain = true + unsafe capability = .readOnly + unsafe size += 4 } mutating func visitOptionalWrapComponent() { // Optional chaining forces the entire keypath to be read-only, even if // there are further reference-writable components. - didChain = true - capability = .readOnly - size += 4 + unsafe didChain = true + unsafe capability = .readOnly + unsafe size += 4 } mutating func visitOptionalForceComponent() { // Force-unwrapping passes through the mutability of the preceding keypath. - size += 4 + unsafe size += 4 } mutating func visitIntermediateComponentType(metadataRef _: MetadataReference) { // The instantiated component type will be stored in the instantiated // object. - roundUpToPointerAlignment() - size += MemoryLayout.size + unsafe roundUpToPointerAlignment() + unsafe size += MemoryLayout.size } mutating func finish() { - sizeWithMaxSize = size - sizeWithMaxSize = MemoryLayout._roundingUpToAlignment(sizeWithMaxSize) - sizeWithMaxSize &+= MemoryLayout.size + unsafe sizeWithMaxSize = unsafe size + unsafe sizeWithMaxSize = unsafe MemoryLayout._roundingUpToAlignment(sizeWithMaxSize) + unsafe sizeWithMaxSize &+= MemoryLayout.size } } @@ -3600,18 +3612,18 @@ internal func _getKeyPathClassAndInstanceSizeFromPattern( sizeWithMaxSize: Int, alignmentMask: Int ) { - var walker = GetKeyPathClassAndInstanceSizeFromPattern(patternArgs: arguments) - _walkKeyPathPattern(pattern, walker: &walker) + var walker = unsafe GetKeyPathClassAndInstanceSizeFromPattern(patternArgs: arguments) + unsafe _walkKeyPathPattern(pattern, walker: &walker) // Chaining always renders the whole key path read-only. - if walker.didChain { - walker.capability = .readOnly + if unsafe walker.didChain { + unsafe walker.capability = .readOnly } // Grab the class object for the key path type we'll end up with. func openRoot(_: Root.Type) -> AnyKeyPath.Type { func openLeaf(_: Leaf.Type) -> AnyKeyPath.Type { - switch walker.capability { + switch unsafe walker.capability { case .readOnly: return KeyPath.self case .value: @@ -3620,11 +3632,11 @@ internal func _getKeyPathClassAndInstanceSizeFromPattern( return ReferenceWritableKeyPath.self } } - return _openExistential(walker.leaf!, do: openLeaf) + return unsafe _openExistential(walker.leaf!, do: openLeaf) } - let classTy = _openExistential(walker.root!, do: openRoot) + let classTy = unsafe _openExistential(walker.root!, do: openRoot) - return (keyPathClass: classTy, + return unsafe (keyPathClass: classTy, rootType: walker.root!, size: walker.size, sizeWithMaxSize: walker.sizeWithMaxSize, @@ -3637,6 +3649,7 @@ internal func _getTypeSize(_: Type.Type) -> Int { } @_unavailableInEmbedded +@unsafe internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { var destData: UnsafeMutableRawBufferPointer var genericEnvironment: UnsafeRawPointer? @@ -3649,11 +3662,11 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { init(destData: UnsafeMutableRawBufferPointer, patternArgs: UnsafeRawPointer?, root: Any.Type) { - self.destData = destData - self.patternArgs = patternArgs - self.base = root + unsafe self.destData = unsafe destData + unsafe self.patternArgs = unsafe patternArgs + unsafe self.base = root - self.maxSize = _openExistential(root, do: _getTypeSize(_:)) + unsafe self.maxSize = _openExistential(root, do: _getTypeSize(_:)) } // Track the triviality of the resulting object data. @@ -3668,22 +3681,22 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { misalign: Int ) { let alignment = MemoryLayout.alignment - var baseAddress = destData.baseAddress._unsafelyUnwrappedUnchecked - var misalign = Int(bitPattern: baseAddress) & (alignment - 1) + var baseAddress = unsafe destData.baseAddress._unsafelyUnwrappedUnchecked + var misalign = unsafe Int(bitPattern: baseAddress) & (alignment - 1) if misalign != 0 { misalign = alignment - misalign - baseAddress = baseAddress.advanced(by: misalign) + unsafe baseAddress = unsafe baseAddress.advanced(by: misalign) } - return (baseAddress, misalign) + return unsafe (baseAddress, misalign) } mutating func pushDest(_ value: T) { let size = MemoryLayout.size - let (baseAddress, misalign) = adjustDestForAlignment(of: T.self) - _withUnprotectedUnsafeBytes(of: value) { - _memcpy(dest: baseAddress, src: $0.baseAddress._unsafelyUnwrappedUnchecked, + let (baseAddress, misalign) = unsafe adjustDestForAlignment(of: T.self) + unsafe _withUnprotectedUnsafeBytes(of: value) { + unsafe _memcpy(dest: baseAddress, src: $0.baseAddress._unsafelyUnwrappedUnchecked, size: UInt(size)) } - destData = UnsafeMutableRawBufferPointer( + unsafe destData = unsafe UnsafeMutableRawBufferPointer( start: baseAddress + size, count: destData.count - size - misalign) } @@ -3691,29 +3704,29 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { _ unsignedPointer: UnsafeRawPointer, discriminator: UInt64 ) { - let size = MemoryLayout.size + let size = unsafe MemoryLayout.size let (baseAddress, misalign) = - adjustDestForAlignment(of: UnsafeRawPointer.self) - baseAddress._storeFunctionPointerWithAddressDiscrimination( + unsafe adjustDestForAlignment(of: UnsafeRawPointer.self) + unsafe baseAddress._storeFunctionPointerWithAddressDiscrimination( unsignedPointer, discriminator: discriminator) - destData = UnsafeMutableRawBufferPointer( + unsafe destData = unsafe UnsafeMutableRawBufferPointer( start: baseAddress + size, count: destData.count - size - misalign) } mutating func updatePreviousComponentAddr() -> UnsafeMutableRawPointer? { - let oldValue = previousComponentAddr - previousComponentAddr = destData.baseAddress._unsafelyUnwrappedUnchecked - return oldValue + let oldValue = unsafe previousComponentAddr + unsafe previousComponentAddr = unsafe destData.baseAddress._unsafelyUnwrappedUnchecked + return unsafe oldValue } mutating func visitHeader(genericEnvironment: UnsafeRawPointer?, rootMetadataRef: MetadataReference, leafMetadataRef: MetadataReference, kvcCompatibilityString: UnsafeRawPointer?) { - self.genericEnvironment = genericEnvironment + unsafe self.genericEnvironment = unsafe genericEnvironment - let leaf = _resolveKeyPathMetadataReference( + let leaf = unsafe _resolveKeyPathMetadataReference( leafMetadataRef, genericEnvironment: genericEnvironment, arguments: patternArgs @@ -3721,74 +3734,74 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { let size = _openExistential(leaf, do: _getTypeSize(_:)) - maxSize = Swift.max(maxSize, size) + unsafe maxSize = unsafe Swift.max(maxSize, size) } mutating func visitStoredComponent(kind: KeyPathStructOrClass, mutable: Bool, offset: KeyPathPatternStoredOffset) { - let previous = updatePreviousComponentAddr() + let previous = unsafe updatePreviousComponentAddr() switch kind { case .struct: - isPureStruct.append(true) + unsafe isPureStruct.append(true) default: - isPureStruct.append(false) + unsafe isPureStruct.append(false) } switch kind { case .class: // A mutable class property can end the reference prefix. if mutable { - endOfReferencePrefixComponent = previous + unsafe endOfReferencePrefixComponent = unsafe previous } fallthrough case .struct: // Resolve the offset. - switch offset { + switch unsafe offset { case .inline(let value): - let header = RawKeyPathComponent.Header(stored: kind, + let header = unsafe RawKeyPathComponent.Header(stored: kind, mutable: mutable, inlineOffset: value) - pushDest(header) + unsafe pushDest(header) switch kind { case .struct: - structOffset += value + unsafe structOffset += value default: break } case .outOfLine(let offset): - let header = RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, + let header = unsafe RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, mutable: mutable) - pushDest(header) - pushDest(offset) + unsafe pushDest(header) + unsafe pushDest(offset) case .unresolvedFieldOffset(let offsetOfOffset): // Look up offset in the type metadata. The value in the pattern is // the offset within the metadata object. - let metadataPtr = unsafeBitCast(base, to: UnsafeRawPointer.self) + let metadataPtr = unsafe unsafeBitCast(base, to: UnsafeRawPointer.self) let offset: UInt32 switch kind { case .class: - offset = UInt32(metadataPtr.load(fromByteOffset: Int(offsetOfOffset), + offset = unsafe UInt32(metadataPtr.load(fromByteOffset: Int(offsetOfOffset), as: UInt.self)) case .struct: - offset = UInt32(metadataPtr.load(fromByteOffset: Int(offsetOfOffset), + offset = unsafe UInt32(metadataPtr.load(fromByteOffset: Int(offsetOfOffset), as: UInt32.self)) - structOffset += offset + unsafe structOffset += offset } - let header = RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, + let header = unsafe RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, mutable: mutable) - pushDest(header) - pushDest(offset) + unsafe pushDest(header) + unsafe pushDest(offset) case .unresolvedIndirectOffset(let pointerToOffset): // Look up offset in the indirectly-referenced variable we have a // pointer. - _internalInvariant(pointerToOffset.pointee <= UInt32.max) - let offset = UInt32(truncatingIfNeeded: pointerToOffset.pointee) - let header = RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, + unsafe _internalInvariant(pointerToOffset.pointee <= UInt32.max) + let offset = unsafe UInt32(truncatingIfNeeded: pointerToOffset.pointee) + let header = unsafe RawKeyPathComponent.Header(storedWithOutOfLineOffset: kind, mutable: mutable) - pushDest(header) - pushDest(offset) + unsafe pushDest(header) + unsafe pushDest(offset) } } } @@ -3802,12 +3815,12 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { setter: UnsafeRawPointer?, arguments: KeyPathPatternComputedArguments?, externalArgs: UnsafeBufferPointer?) { - isPureStruct.append(false) - let previous = updatePreviousComponentAddr() - let settable = setter != nil + unsafe isPureStruct.append(false) + let previous = unsafe updatePreviousComponentAddr() + let settable = unsafe setter != nil // A nonmutating settable property can end the reference prefix. if settable && !mutating { - endOfReferencePrefixComponent = previous + unsafe endOfReferencePrefixComponent = unsafe previous } // Resolve the ID. @@ -3818,64 +3831,64 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { _internalInvariant(idResolution == .resolved) // Zero-extend the integer value to get the instantiated id. let value = UInt(UInt32(bitPattern: idValue)) - resolvedID = UnsafeRawPointer(bitPattern: value) + unsafe resolvedID = unsafe UnsafeRawPointer(bitPattern: value) case .pointer: // If the pointer ID is unresolved, then it needs work to get to // the final value. switch idResolution { case .resolved: - resolvedID = _resolveRelativeAddress(idValueBase, idValue) + unsafe resolvedID = unsafe _resolveRelativeAddress(idValueBase, idValue) break case .resolvedAbsolute: let value = UInt(UInt32(bitPattern: idValue)) - resolvedID = UnsafeRawPointer(bitPattern: value) + unsafe resolvedID = unsafe UnsafeRawPointer(bitPattern: value) break case .indirectPointer: // The pointer in the pattern is an indirect pointer to the real // identifier pointer. - let absoluteID = _resolveRelativeAddress(idValueBase, idValue) - resolvedID = absoluteID + let absoluteID = unsafe _resolveRelativeAddress(idValueBase, idValue) + unsafe resolvedID = unsafe absoluteID .load(as: UnsafeRawPointer?.self) case .functionCall: // The pointer in the pattern is to a function that generates the // identifier pointer. typealias Resolver = @convention(c) (UnsafeRawPointer?) -> UnsafeRawPointer? - let absoluteID = _resolveCompactFunctionPointer(idValueBase, idValue) - let resolverSigned = _PtrAuth.sign( + let absoluteID = unsafe _resolveCompactFunctionPointer(idValueBase, idValue) + let resolverSigned = unsafe _PtrAuth.sign( pointer: absoluteID, key: .processIndependentCode, discriminator: _PtrAuth.discriminator(for: Resolver.self)) - let resolverFn = unsafeBitCast(resolverSigned, + let resolverFn = unsafe unsafeBitCast(resolverSigned, to: Resolver.self) - resolvedID = resolverFn(patternArgs) + unsafe resolvedID = unsafe resolverFn(patternArgs) } } // Bring over the header, getter, and setter. - let header = RawKeyPathComponent.Header(computedWithIDKind: idKind, + let header = unsafe RawKeyPathComponent.Header(computedWithIDKind: idKind, mutating: mutating, settable: settable, hasArguments: arguments != nil || externalArgs != nil, instantiatedFromExternalWithArguments: arguments != nil && externalArgs != nil) - pushDest(header) - pushDest(resolvedID) - pushAddressDiscriminatedFunctionPointer(getter, + unsafe pushDest(header) + unsafe pushDest(resolvedID) + unsafe pushAddressDiscriminatedFunctionPointer(getter, discriminator: ComputedAccessorsPtr.getterPtrAuthKey) - if let setter = setter { - pushAddressDiscriminatedFunctionPointer(setter, + if let setter = unsafe setter { + unsafe pushAddressDiscriminatedFunctionPointer(setter, discriminator: mutating ? ComputedAccessorsPtr.mutatingSetterPtrAuthKey : ComputedAccessorsPtr.nonmutatingSetterPtrAuthKey) } - if let arguments = arguments { + if let arguments = unsafe arguments { // Instantiate the arguments. - let (baseSize, alignmentMask) = arguments.getLayout(patternArgs) + let (baseSize, alignmentMask) = unsafe arguments.getLayout(patternArgs) _internalInvariant(alignmentMask < MemoryLayout.alignment, "overaligned computed arguments not implemented yet") @@ -3884,104 +3897,104 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { // If an external property descriptor also has arguments, they'll be // added to the end with pointer alignment. - if let externalArgs = externalArgs { + if let externalArgs = unsafe externalArgs { totalSize = MemoryLayout._roundingUpToAlignment(totalSize) totalSize += MemoryLayout.size * externalArgs.count } - pushDest(totalSize) - pushDest(arguments.witnesses) + unsafe pushDest(totalSize) + unsafe pushDest(arguments.witnesses) // A nonnull destructor in the witnesses file indicates the instantiated // payload is nontrivial. - if let _ = arguments.witnesses.destroy { - isTrivial = false + if let _ = unsafe arguments.witnesses.destroy { + unsafe isTrivial = false } // If the descriptor has arguments, store the size of its specific // arguments here, so we can drop them when trying to invoke // the component's witnesses. - if let externalArgs = externalArgs { - pushDest(externalArgs.count * MemoryLayout.size) + if let externalArgs = unsafe externalArgs { + unsafe pushDest(externalArgs.count * MemoryLayout.size) } // Initialize the local candidate arguments here. - _internalInvariant(Int(bitPattern: destData.baseAddress) & alignmentMask == 0, + unsafe _internalInvariant(Int(bitPattern: destData.baseAddress) & alignmentMask == 0, "argument destination not aligned") - arguments.initializer(patternArgs, + unsafe arguments.initializer(patternArgs, destData.baseAddress._unsafelyUnwrappedUnchecked) - destData = UnsafeMutableRawBufferPointer( + unsafe destData = unsafe UnsafeMutableRawBufferPointer( start: destData.baseAddress._unsafelyUnwrappedUnchecked + baseSize, count: destData.count - baseSize) } - if let externalArgs = externalArgs { - if arguments == nil { + if let externalArgs = unsafe externalArgs { + if unsafe arguments == nil { // If we're instantiating an external property without any local // arguments, then we only need to instantiate the arguments to the // property descriptor. let stride = MemoryLayout.size * externalArgs.count - pushDest(stride) - pushDest(__swift_keyPathGenericWitnessTable_addr()) + unsafe pushDest(stride) + unsafe pushDest(__swift_keyPathGenericWitnessTable_addr()) } // Write the descriptor's generic arguments, which should all be relative // references to metadata accessor functions. for i in externalArgs.indices { - let base = externalArgs.baseAddress._unsafelyUnwrappedUnchecked + i - let offset = base.pointee - let metadataRef = _resolveRelativeAddress(UnsafeRawPointer(base), offset) - let result = _resolveKeyPathGenericArgReference( + let base = unsafe externalArgs.baseAddress._unsafelyUnwrappedUnchecked + i + let offset = unsafe base.pointee + let metadataRef = unsafe _resolveRelativeAddress(UnsafeRawPointer(base), offset) + let result = unsafe _resolveKeyPathGenericArgReference( metadataRef, genericEnvironment: genericEnvironment, arguments: patternArgs) - pushDest(result) + unsafe pushDest(result) } } } mutating func visitOptionalChainComponent() { - isPureStruct.append(false) - let _ = updatePreviousComponentAddr() - let header = RawKeyPathComponent.Header(optionalChain: ()) - pushDest(header) + unsafe isPureStruct.append(false) + let _ = unsafe updatePreviousComponentAddr() + let header = unsafe RawKeyPathComponent.Header(optionalChain: ()) + unsafe pushDest(header) } mutating func visitOptionalWrapComponent() { - isPureStruct.append(false) - let _ = updatePreviousComponentAddr() - let header = RawKeyPathComponent.Header(optionalWrap: ()) - pushDest(header) + unsafe isPureStruct.append(false) + let _ = unsafe updatePreviousComponentAddr() + let header = unsafe RawKeyPathComponent.Header(optionalWrap: ()) + unsafe pushDest(header) } mutating func visitOptionalForceComponent() { - isPureStruct.append(false) - let _ = updatePreviousComponentAddr() - let header = RawKeyPathComponent.Header(optionalForce: ()) - pushDest(header) + unsafe isPureStruct.append(false) + let _ = unsafe updatePreviousComponentAddr() + let header = unsafe RawKeyPathComponent.Header(optionalForce: ()) + unsafe pushDest(header) } mutating func visitIntermediateComponentType(metadataRef: MetadataReference) { // Get the metadata for the intermediate type. - let metadata = _resolveKeyPathMetadataReference( + let metadata = unsafe _resolveKeyPathMetadataReference( metadataRef, genericEnvironment: genericEnvironment, arguments: patternArgs) - pushDest(metadata) - base = metadata + unsafe pushDest(metadata) + unsafe base = metadata let size = _openExistential(metadata, do: _getTypeSize(_:)) - maxSize = Swift.max(maxSize, size) + unsafe maxSize = unsafe Swift.max(maxSize, size) } mutating func finish() { // Finally, push our max size at the end of the buffer (and round up if // necessary). - pushDest(maxSize) + unsafe pushDest(maxSize) // Should have filled the entire buffer by the time we reach the end of the // pattern. - _internalInvariant(destData.isEmpty, + unsafe _internalInvariant(destData.isEmpty, "should have filled entire destination buffer") } } @@ -3990,6 +4003,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { // In debug builds of the standard library, check that instantiation produces // components whose sizes are consistent with the sizing visitor pass. @_unavailableInEmbedded +@unsafe internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { var sizeVisitor: GetKeyPathClassAndInstanceSizeFromPattern var instantiateVisitor: InstantiateKeyPathBuffer @@ -3999,20 +4013,20 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { init(sizeVisitor: GetKeyPathClassAndInstanceSizeFromPattern, instantiateVisitor: InstantiateKeyPathBuffer) { - self.sizeVisitor = sizeVisitor - self.instantiateVisitor = instantiateVisitor - origDest = self.instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked + unsafe self.sizeVisitor = unsafe sizeVisitor + unsafe self.instantiateVisitor = unsafe instantiateVisitor + unsafe origDest = unsafe self.instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked } mutating func visitHeader(genericEnvironment: UnsafeRawPointer?, rootMetadataRef: MetadataReference, leafMetadataRef: MetadataReference, kvcCompatibilityString: UnsafeRawPointer?) { - sizeVisitor.visitHeader(genericEnvironment: genericEnvironment, + unsafe sizeVisitor.visitHeader(genericEnvironment: genericEnvironment, rootMetadataRef: rootMetadataRef, leafMetadataRef: leafMetadataRef, kvcCompatibilityString: kvcCompatibilityString) - instantiateVisitor.visitHeader(genericEnvironment: genericEnvironment, + unsafe instantiateVisitor.visitHeader(genericEnvironment: genericEnvironment, rootMetadataRef: rootMetadataRef, leafMetadataRef: leafMetadataRef, kvcCompatibilityString: kvcCompatibilityString) @@ -4020,13 +4034,13 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { mutating func visitStoredComponent(kind: KeyPathStructOrClass, mutable: Bool, offset: KeyPathPatternStoredOffset) { - sizeVisitor.visitStoredComponent(kind: kind, mutable: mutable, + unsafe sizeVisitor.visitStoredComponent(kind: kind, mutable: mutable, offset: offset) - instantiateVisitor.visitStoredComponent(kind: kind, mutable: mutable, + unsafe instantiateVisitor.visitStoredComponent(kind: kind, mutable: mutable, offset: offset) - checkSizeConsistency() - structOffset = instantiateVisitor.structOffset - isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) + unsafe checkSizeConsistency() + unsafe structOffset = unsafe instantiateVisitor.structOffset + unsafe isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) } mutating func visitComputedComponent(mutating: Bool, idKind: KeyPathComputedIDKind, @@ -4037,7 +4051,7 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { setter: UnsafeRawPointer?, arguments: KeyPathPatternComputedArguments?, externalArgs: UnsafeBufferPointer?) { - sizeVisitor.visitComputedComponent(mutating: mutating, + unsafe sizeVisitor.visitComputedComponent(mutating: mutating, idKind: idKind, idResolution: idResolution, idValueBase: idValueBase, @@ -4046,7 +4060,7 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { setter: setter, arguments: arguments, externalArgs: externalArgs) - instantiateVisitor.visitComputedComponent(mutating: mutating, + unsafe instantiateVisitor.visitComputedComponent(mutating: mutating, idKind: idKind, idResolution: idResolution, idValueBase: idValueBase, @@ -4058,49 +4072,49 @@ internal struct ValidatingInstantiateKeyPathBuffer: KeyPathPatternVisitor { // Note: For this function and the ones below, modification of structOffset // is omitted since these types of KeyPaths won't have a pureStruct // offset anyway. - isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) - checkSizeConsistency() + unsafe isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) + unsafe checkSizeConsistency() } mutating func visitOptionalChainComponent() { - sizeVisitor.visitOptionalChainComponent() - instantiateVisitor.visitOptionalChainComponent() - isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) - checkSizeConsistency() + unsafe sizeVisitor.visitOptionalChainComponent() + unsafe instantiateVisitor.visitOptionalChainComponent() + unsafe isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) + unsafe checkSizeConsistency() } mutating func visitOptionalWrapComponent() { - sizeVisitor.visitOptionalWrapComponent() - instantiateVisitor.visitOptionalWrapComponent() - isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) - checkSizeConsistency() + unsafe sizeVisitor.visitOptionalWrapComponent() + unsafe instantiateVisitor.visitOptionalWrapComponent() + unsafe isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) + unsafe checkSizeConsistency() } mutating func visitOptionalForceComponent() { - sizeVisitor.visitOptionalForceComponent() - instantiateVisitor.visitOptionalForceComponent() - isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) - checkSizeConsistency() + unsafe sizeVisitor.visitOptionalForceComponent() + unsafe instantiateVisitor.visitOptionalForceComponent() + unsafe isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) + unsafe checkSizeConsistency() } mutating func visitIntermediateComponentType(metadataRef: MetadataReference) { - sizeVisitor.visitIntermediateComponentType(metadataRef: metadataRef) - instantiateVisitor.visitIntermediateComponentType(metadataRef: metadataRef) - isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) - checkSizeConsistency() + unsafe sizeVisitor.visitIntermediateComponentType(metadataRef: metadataRef) + unsafe instantiateVisitor.visitIntermediateComponentType(metadataRef: metadataRef) + unsafe isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) + unsafe checkSizeConsistency() } mutating func finish() { - sizeVisitor.finish() - instantiateVisitor.finish() - isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) - checkSizeConsistency(checkMaxSize: true) + unsafe sizeVisitor.finish() + unsafe instantiateVisitor.finish() + unsafe isPureStruct.append(contentsOf: instantiateVisitor.isPureStruct) + unsafe checkSizeConsistency(checkMaxSize: true) } func checkSizeConsistency(checkMaxSize: Bool = false) { - let nextDest = instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked - let curSize = nextDest - origDest + MemoryLayout.size + let nextDest = unsafe instantiateVisitor.destData.baseAddress._unsafelyUnwrappedUnchecked + let curSize = unsafe nextDest - origDest + MemoryLayout.size let sizeVisitorSize = if checkMaxSize { - sizeVisitor.sizeWithMaxSize + unsafe sizeVisitor.sizeWithMaxSize } else { - sizeVisitor.size + unsafe sizeVisitor.size } _internalInvariant(curSize == sizeVisitorSize, @@ -4117,22 +4131,22 @@ internal func _instantiateKeyPathBuffer( _ arguments: UnsafeRawPointer, _ sizeBeforeMaxSize: Int ) -> UInt32? { - let destHeaderPtr = origDestData.baseAddress._unsafelyUnwrappedUnchecked - var destData = UnsafeMutableRawBufferPointer( + let destHeaderPtr = unsafe origDestData.baseAddress._unsafelyUnwrappedUnchecked + var destData = unsafe UnsafeMutableRawBufferPointer( start: destHeaderPtr.advanced(by: MemoryLayout.size), count: origDestData.count &- MemoryLayout.size) #if INTERNAL_CHECKS_ENABLED // If checks are enabled, use a validating walker that ensures that the // size pre-walk and instantiation walk are in sync. - let sizeWalker = GetKeyPathClassAndInstanceSizeFromPattern( + let sizeWalker = unsafe GetKeyPathClassAndInstanceSizeFromPattern( patternArgs: arguments) - let instantiateWalker = InstantiateKeyPathBuffer( + let instantiateWalker = unsafe InstantiateKeyPathBuffer( destData: destData, patternArgs: arguments, root: rootType) - var walker = ValidatingInstantiateKeyPathBuffer(sizeVisitor: sizeWalker, + var walker = unsafe ValidatingInstantiateKeyPathBuffer(sizeVisitor: sizeWalker, instantiateVisitor: instantiateWalker) #else var walker = InstantiateKeyPathBuffer( @@ -4141,44 +4155,44 @@ internal func _instantiateKeyPathBuffer( root: rootType) #endif - _walkKeyPathPattern(pattern, walker: &walker) + unsafe _walkKeyPathPattern(pattern, walker: &walker) #if INTERNAL_CHECKS_ENABLED - let isTrivial = walker.instantiateVisitor.isTrivial + let isTrivial = unsafe walker.instantiateVisitor.isTrivial let endOfReferencePrefixComponent = - walker.instantiateVisitor.endOfReferencePrefixComponent + unsafe walker.instantiateVisitor.endOfReferencePrefixComponent #else let isTrivial = walker.isTrivial let endOfReferencePrefixComponent = walker.endOfReferencePrefixComponent #endif // Write out the header. - let destHeader = KeyPathBuffer.Header( + let destHeader = unsafe KeyPathBuffer.Header( size: sizeBeforeMaxSize &- MemoryLayout.size, trivial: isTrivial, hasReferencePrefix: endOfReferencePrefixComponent != nil, isSingleComponent: walker.isPureStruct.count == 1 ) - destHeaderPtr.storeBytes(of: destHeader, as: KeyPathBuffer.Header.self) + unsafe destHeaderPtr.storeBytes(of: destHeader, as: KeyPathBuffer.Header.self) // Mark the reference prefix if there is one. - if let endOfReferencePrefixComponent = endOfReferencePrefixComponent { - var componentHeader = endOfReferencePrefixComponent + if let endOfReferencePrefixComponent = unsafe endOfReferencePrefixComponent { + var componentHeader = unsafe endOfReferencePrefixComponent .load(as: RawKeyPathComponent.Header.self) - componentHeader.endOfReferencePrefix = true - endOfReferencePrefixComponent.storeBytes(of: componentHeader, + unsafe componentHeader.endOfReferencePrefix = true + unsafe endOfReferencePrefixComponent.storeBytes(of: componentHeader, as: RawKeyPathComponent.Header.self) } var isPureStruct = true var offset: UInt32? = nil - for value in walker.isPureStruct { + for value in unsafe walker.isPureStruct { isPureStruct = isPureStruct && value } if isPureStruct { - offset = walker.structOffset + offset = unsafe walker.structOffset } return offset @@ -4206,28 +4220,28 @@ public func _createOffsetBasedKeyPath( // boundary. let kpBufferSize = MemoryLayout.size + MemoryLayout.size let kp = kpTy._create(capacityInBytes: kpBufferSize) { - var builder = KeyPathBuffer.Builder($0) - let header = KeyPathBuffer.Header( + var builder = unsafe KeyPathBuffer.Builder($0) + let header = unsafe KeyPathBuffer.Header( size: kpBufferSize - MemoryLayout.size, trivial: true, hasReferencePrefix: false, isSingleComponent: true ) - builder.pushHeader(header) + unsafe builder.pushHeader(header) - let componentHeader = RawKeyPathComponent.Header( + let componentHeader = unsafe RawKeyPathComponent.Header( stored: _MetadataKind(root) == .struct ? .struct : .class, mutable: false, inlineOffset: UInt32(offset) ) - let component = RawKeyPathComponent( + let component = unsafe RawKeyPathComponent( header: componentHeader, body: UnsafeRawBufferPointer(start: nil, count: 0) ) - component.clone(into: &builder.buffer, endOfReferencePrefix: false) + unsafe component.clone(into: &builder.buffer, endOfReferencePrefix: false) } if _MetadataKind(root) == .struct { @@ -4249,7 +4263,7 @@ public func _rerootKeyPath( isSingleComponent, componentSize ) = existingKp.withBuffer { - ($0.trivial, $0.hasReferencePrefix, $0.isSingleComponent, $0.data.count) + unsafe ($0.trivial, $0.hasReferencePrefix, $0.isSingleComponent, $0.data.count) } let existingKpTy = type(of: existingKp) @@ -4283,23 +4297,23 @@ public func _rerootKeyPath( return newKpTy._create( capacityInBytes: capacity ) { - var builder = KeyPathBuffer.Builder($0) - let header = KeyPathBuffer.Header( + var builder = unsafe KeyPathBuffer.Builder($0) + let header = unsafe KeyPathBuffer.Header( size: componentSize, trivial: isTrivial, hasReferencePrefix: hasReferencePrefix, isSingleComponent: isSingleComponent ) - builder.pushHeader(header) + unsafe builder.pushHeader(header) existingKp.withBuffer { - var existingBuffer = $0 + var existingBuffer = unsafe $0 while true { - let (rawComponent, componentTy) = existingBuffer.next() + let (rawComponent, componentTy) = unsafe existingBuffer.next() - rawComponent.clone( + unsafe rawComponent.clone( into: &builder.buffer, endOfReferencePrefix: rawComponent.header.endOfReferencePrefix ) @@ -4311,7 +4325,7 @@ public func _rerootKeyPath( // Append the max size at the end of the existing keypath's buffer to the // end of the new keypath's buffer. - builder.push(existingBuffer.maxSize) + unsafe builder.push(existingBuffer.maxSize) } } as! PartialKeyPath } @@ -4336,23 +4350,23 @@ fileprivate func dynamicLibraryAddress( _: Base.Type, _ leaf: Leaf.Type ) -> String { - let getter: ComputedAccessorsPtr.Getter = pointer.getter() - let pointer = unsafeBitCast(getter, to: UnsafeRawPointer.self) - if let cString = keyPath_copySymbolName(UnsafeRawPointer(pointer)) { + let getter: ComputedAccessorsPtr.Getter = unsafe pointer.getter() + let pointer = unsafe unsafeBitCast(getter, to: UnsafeRawPointer.self) + if let cString = unsafe keyPath_copySymbolName(UnsafeRawPointer(pointer)) { defer { - keyPath_freeSymbolName(cString) + unsafe keyPath_freeSymbolName(cString) } - if let demangled = demangle(name: cString) + if let demangled = unsafe demangle(name: cString) .map({ pointer in defer { - pointer.deallocate() + unsafe pointer.deallocate() } - return String(cString: pointer) + return unsafe String(cString: pointer) }) { return demangled } } - return "" + return unsafe "" } #endif @@ -4366,23 +4380,23 @@ extension AnyKeyPath: CustomDebugStringConvertible { public var debugDescription: String { var description = "\\\(String(describing: Self.rootType))" return withBuffer { - var buffer = $0 - if buffer.data.isEmpty { + var buffer = unsafe $0 + if unsafe buffer.data.isEmpty { description.append(".self") return description } var valueType: Any.Type = Self.rootType while true { - let (rawComponent, optNextType) = buffer.next() + let (rawComponent, optNextType) = unsafe buffer.next() let hasEnded = optNextType == nil let nextType = optNextType ?? Self.valueType - switch rawComponent.value { + switch unsafe rawComponent.value { case .optionalForce, .optionalWrap, .optionalChain: break default: description.append(".") } - switch rawComponent.value { + switch unsafe rawComponent.value { case .class(let offset), .struct(let offset): let count = _getRecursiveChildCount(valueType) @@ -4394,16 +4408,16 @@ extension AnyKeyPath: CustomDebugStringConvertible { ) == offset }) if let index = index { - var field = _FieldReflectionMetadata() - _ = _getChildMetadata( + var field = unsafe _FieldReflectionMetadata() + _ = unsafe _getChildMetadata( valueType, index: index, fieldMetadata: &field ) defer { - field.freeFunc?(field.name) + unsafe field.freeFunc?(field.name) } - description.append(String(cString: field.name)) + unsafe description.append(String(cString: field.name)) } else { description.append("") } @@ -4412,7 +4426,7 @@ extension AnyKeyPath: CustomDebugStringConvertible { .mutatingGetSet(_, let accessors, _): func project(base: Base.Type) -> String { func project2(leaf: Leaf.Type) -> String { - dynamicLibraryAddress( + unsafe dynamicLibraryAddress( of: accessors, base, leaf diff --git a/stdlib/public/core/LazySequence.swift b/stdlib/public/core/LazySequence.swift index f4464b5767d5e..6139a1a64fa38 100644 --- a/stdlib/public/core/LazySequence.swift +++ b/stdlib/public/core/LazySequence.swift @@ -209,7 +209,7 @@ extension LazySequence: Sequence { public __consuming func _copyContents( initializing buf: UnsafeMutableBufferPointer ) -> (Iterator, UnsafeMutableBufferPointer.Index) { - return _base._copyContents(initializing: buf) + return unsafe _base._copyContents(initializing: buf) } @inlinable // lazy-performance diff --git a/stdlib/public/core/LifetimeManager.swift b/stdlib/public/core/LifetimeManager.swift index 4123125eee92f..fb16a2c09618f 100644 --- a/stdlib/public/core/LifetimeManager.swift +++ b/stdlib/public/core/LifetimeManager.swift @@ -103,7 +103,7 @@ public func withUnsafeMutablePointer< to value: inout T, _ body: (UnsafeMutablePointer) throws(E) -> Result ) throws(E) -> Result { - try body(UnsafeMutablePointer(Builtin.addressof(&value))) + try unsafe body(UnsafeMutablePointer(Builtin.addressof(&value))) } @_spi(SwiftStdlibLegacyABI) @available(swift, obsoleted: 1) @@ -113,7 +113,7 @@ internal func __abi_se0413_withUnsafeMutablePointer( to value: inout T, _ body: (UnsafeMutablePointer) throws -> Result ) throws -> Result { - return try body(UnsafeMutablePointer(Builtin.addressof(&value))) + return try unsafe body(UnsafeMutablePointer(Builtin.addressof(&value))) } /// Calls the given closure with a mutable pointer to the given argument. @@ -129,7 +129,7 @@ public func _withUnprotectedUnsafeMutablePointer< ) throws(E) -> Result { #if $BuiltinUnprotectedAddressOf - return try body(UnsafeMutablePointer(Builtin.unprotectedAddressOf(&value))) + return try unsafe body(UnsafeMutablePointer(Builtin.unprotectedAddressOf(&value))) #else return try body(UnsafeMutablePointer(Builtin.addressof(&value))) #endif @@ -161,7 +161,7 @@ public func withUnsafePointer( _ body: (UnsafePointer) throws(E) -> Result ) throws(E) -> Result { - return try body(UnsafePointer(Builtin.addressOfBorrow(value))) + return try unsafe body(UnsafePointer(Builtin.addressOfBorrow(value))) } /// ABI: Historical withUnsafePointer(to:_:) rethrows, expressed as "throws", @@ -174,7 +174,7 @@ internal func __abi_withUnsafePointer( _ body: (UnsafePointer) throws -> Result ) throws -> Result { - return try body(UnsafePointer(Builtin.addressOfBorrow(value))) + return try unsafe body(UnsafePointer(Builtin.addressOfBorrow(value))) } /// Invokes the given closure with a pointer to the given argument. @@ -206,7 +206,7 @@ public func withUnsafePointer( to value: inout T, _ body: (UnsafePointer) throws(E) -> Result ) throws(E) -> Result { - try body(UnsafePointer(Builtin.addressof(&value))) + try unsafe body(UnsafePointer(Builtin.addressof(&value))) } /// ABI: Historical withUnsafePointer(to:_:) rethrows, @@ -218,7 +218,7 @@ internal func __abi_se0413_withUnsafePointer( to value: inout T, _ body: (UnsafePointer) throws -> Result ) throws -> Result { - return try body(UnsafePointer(Builtin.addressof(&value))) + return try unsafe body(UnsafePointer(Builtin.addressof(&value))) } /// Invokes the given closure with a pointer to the given argument. @@ -233,7 +233,7 @@ public func _withUnprotectedUnsafePointer< _ body: (UnsafePointer) throws(E) -> Result ) throws(E) -> Result { #if $BuiltinUnprotectedAddressOf - return try body(UnsafePointer(Builtin.unprotectedAddressOf(&value))) + return try unsafe body(UnsafePointer(Builtin.unprotectedAddressOf(&value))) #else return try body(UnsafePointer(Builtin.addressof(&value))) #endif @@ -250,7 +250,7 @@ public func _withUnprotectedUnsafePointer< to value: borrowing T, _ body: (UnsafePointer) throws(E) -> Result ) throws(E) -> Result { - return try body(UnsafePointer(Builtin.unprotectedAddressOfBorrow(value))) + return try unsafe body(UnsafePointer(Builtin.unprotectedAddressOfBorrow(value))) } @available(*, deprecated, message: "Use the copy operator") diff --git a/stdlib/public/core/ManagedBuffer.swift b/stdlib/public/core/ManagedBuffer.swift index e5f9cb3cdea9d..6c13bb1155220 100644 --- a/stdlib/public/core/ManagedBuffer.swift +++ b/stdlib/public/core/ManagedBuffer.swift @@ -91,7 +91,7 @@ extension ManagedBuffer where Element: ~Copyable { minimumCapacity._builtinWordValue, Element.self) let initHeaderVal = try factory(p) - p.headerAddress.initialize(to: initHeaderVal) + unsafe p.headerAddress.initialize(to: initHeaderVal) // The _fixLifetime is not really needed, because p is used afterwards. // But let's be conservative and fix the lifetime after we use the // headerAddress. @@ -109,8 +109,8 @@ extension ManagedBuffer where Element: ~Copyable { @available(OpenBSD, unavailable, message: "malloc_size is unavailable.") public final var capacity: Int { let storageAddr = UnsafeMutableRawPointer(Builtin.bridgeToRawPointer(self)) - let endAddr = storageAddr + _swift_stdlib_malloc_size(storageAddr) - let realCapacity = endAddr.assumingMemoryBound(to: Element.self) - + let endAddr = unsafe storageAddr + _swift_stdlib_malloc_size(storageAddr) + let realCapacity = unsafe endAddr.assumingMemoryBound(to: Element.self) - firstElementAddress return realCapacity } @@ -118,14 +118,14 @@ extension ManagedBuffer where Element: ~Copyable { @_preInverseGenerics @inlinable internal final var firstElementAddress: UnsafeMutablePointer { - return UnsafeMutablePointer( + return unsafe UnsafeMutablePointer( Builtin.projectTailElems(self, Element.self)) } @_preInverseGenerics @inlinable internal final var headerAddress: UnsafeMutablePointer
{ - return UnsafeMutablePointer
(Builtin.addressof(&header)) + return unsafe UnsafeMutablePointer
(Builtin.addressof(&header)) } } @@ -140,7 +140,7 @@ extension ManagedBuffer where Element: ~Copyable { public final func withUnsafeMutablePointerToHeader( _ body: (UnsafeMutablePointer
) throws(E) -> R ) throws(E) -> R { - try withUnsafeMutablePointers { (v, _) throws(E) in try body(v) } + try unsafe withUnsafeMutablePointers { (v, _) throws(E) in try unsafe body(v) } } /// Call `body` with an `UnsafeMutablePointer` to the `Element` @@ -153,7 +153,7 @@ extension ManagedBuffer where Element: ~Copyable { public final func withUnsafeMutablePointerToElements( _ body: (UnsafeMutablePointer) throws(E) -> R ) throws(E) -> R { - try withUnsafeMutablePointers { (_, v) throws(E) in try body(v) } + try unsafe withUnsafeMutablePointers { (_, v) throws(E) in try unsafe body(v) } } /// Call `body` with `UnsafeMutablePointer`s to the stored `Header` @@ -169,7 +169,7 @@ extension ManagedBuffer where Element: ~Copyable { ) throws(E) -> R ) throws(E) -> R { defer { _fixLifetime(self) } - return try body(headerAddress, firstElementAddress) + return try unsafe body(headerAddress, firstElementAddress) } } @@ -180,7 +180,7 @@ extension ManagedBuffer { internal final func __legacy_withUnsafeMutablePointerToHeader( _ body: (UnsafeMutablePointer
) throws -> R ) rethrows -> R { - return try withUnsafeMutablePointers { (v, _) in return try body(v) } + return try unsafe withUnsafeMutablePointers { (v, _) in return try unsafe body(v) } } @_spi(SwiftStdlibLegacyABI) @available(swift, obsoleted: 1) @@ -189,7 +189,7 @@ extension ManagedBuffer { internal final func __legacy_withUnsafeMutablePointerToElements( _ body: (UnsafeMutablePointer) throws -> R ) rethrows -> R { - return try withUnsafeMutablePointers { return try body($1) } + return try unsafe withUnsafeMutablePointers { return try unsafe body($1) } } @_spi(SwiftStdlibLegacyABI) @available(swift, obsoleted: 1) @@ -201,7 +201,7 @@ extension ManagedBuffer { ) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } - return try body(headerAddress, firstElementAddress) + return try unsafe body(headerAddress, firstElementAddress) } } @@ -278,8 +278,8 @@ public struct ManagedBufferPointer< bufferClass: bufferClass, minimumCapacity: minimumCapacity) // initialize the header field - try withUnsafeMutablePointerToHeader { - $0.initialize(to: + try unsafe withUnsafeMutablePointerToHeader { + unsafe $0.initialize(to: try factory( self.buffer, { @@ -393,10 +393,10 @@ extension ManagedBufferPointer where Element: ~Copyable { @inlinable public var header: Header { _read { - yield _headerPointer.pointee + yield unsafe _headerPointer.pointee } _modify { - yield &_headerPointer.pointee + yield unsafe &_headerPointer.pointee } } } @@ -432,7 +432,7 @@ extension ManagedBufferPointer where Element: ~Copyable { public func withUnsafeMutablePointerToHeader( _ body: (UnsafeMutablePointer
) throws(E) -> R ) throws(E) -> R { - try withUnsafeMutablePointers { (v, _) throws(E) in try body(v) } + try unsafe withUnsafeMutablePointers { (v, _) throws(E) in try unsafe body(v) } } /// Call `body` with an `UnsafeMutablePointer` to the `Element` @@ -444,7 +444,7 @@ extension ManagedBufferPointer where Element: ~Copyable { public func withUnsafeMutablePointerToElements( _ body: (UnsafeMutablePointer) throws(E) -> R ) throws(E) -> R { - try withUnsafeMutablePointers { (_, v) throws(E) in try body(v) } + try unsafe withUnsafeMutablePointers { (_, v) throws(E) in try unsafe body(v) } } /// Call `body` with `UnsafeMutablePointer`s to the stored `Header` @@ -459,7 +459,7 @@ extension ManagedBufferPointer where Element: ~Copyable { ) throws(E) -> R ) throws(E) -> R { defer { _fixLifetime(_nativeBuffer) } - return try body(_headerPointer, _elementPointer) + return try unsafe body(_headerPointer, _elementPointer) } /// Returns `true` if `self` holds the only strong reference to its @@ -480,7 +480,7 @@ extension ManagedBufferPointer { internal func withUnsafeMutablePointerToHeader( _ body: (UnsafeMutablePointer
) throws -> R ) rethrows -> R { - try withUnsafeMutablePointers { (v, _) in try body(v) } + try unsafe withUnsafeMutablePointers { (v, _) in try unsafe body(v) } } @_spi(SwiftStdlibLegacyABI) @available(swift, obsoleted: 1) @@ -489,7 +489,7 @@ extension ManagedBufferPointer { internal func withUnsafeMutablePointerToElements( _ body: (UnsafeMutablePointer) throws -> R ) rethrows -> R { - try withUnsafeMutablePointers { (_, v) in try body(v) } + try unsafe withUnsafeMutablePointers { (_, v) in try unsafe body(v) } } @_spi(SwiftStdlibLegacyABI) @available(swift, obsoleted: 1) @@ -501,7 +501,7 @@ extension ManagedBufferPointer { ) throws -> R ) rethrows -> R { defer { _fixLifetime(_nativeBuffer) } - return try body(_headerPointer, _elementPointer) + return try unsafe body(_headerPointer, _elementPointer) } } @@ -511,7 +511,7 @@ extension ManagedBufferPointer where Element: ~Copyable { internal static func _checkValidBufferClass( _ bufferClass: AnyClass, creating: Bool = false ) { - _debugPrecondition( + unsafe _debugPrecondition( _class_getInstancePositiveExtentSize(bufferClass) == MemoryLayout<_HeapObject>.size || ( (!creating || bufferClass is ManagedBuffer.Type) @@ -530,7 +530,7 @@ extension ManagedBufferPointer where Element: ~Copyable { internal static func _internalInvariantValidBufferClass( _ bufferClass: AnyClass, creating: Bool = false ) { - _internalInvariant( + unsafe _internalInvariant( _class_getInstancePositiveExtentSize(bufferClass) == MemoryLayout<_HeapObject>.size || ( (!creating || bufferClass is ManagedBuffer.Type) @@ -550,7 +550,7 @@ extension ManagedBufferPointer where Element: ~Copyable { @_preInverseGenerics @inlinable internal static var _alignmentMask: Int { - return max( + return unsafe max( MemoryLayout<_HeapObject>.alignment, max(MemoryLayout
.alignment, MemoryLayout.alignment)) &- 1 } @@ -560,7 +560,7 @@ extension ManagedBufferPointer where Element: ~Copyable { @inlinable @available(OpenBSD, unavailable, message: "malloc_size is unavailable.") internal var _capacityInBytes: Int { - return _swift_stdlib_malloc_size(_address) + return unsafe _swift_stdlib_malloc_size(_address) } /// The address of this instance in a convenient pointer-to-bytes form @@ -575,7 +575,7 @@ extension ManagedBufferPointer where Element: ~Copyable { @inlinable internal static var _headerOffset: Int { _onFastPath() - return _roundUp( + return unsafe _roundUp( MemoryLayout<_HeapObject>.size, toAlignment: MemoryLayout
.alignment) } @@ -587,7 +587,7 @@ extension ManagedBufferPointer where Element: ~Copyable { @inlinable internal var _headerPointer: UnsafeMutablePointer
{ _onFastPath() - return (_address + ManagedBufferPointer._headerOffset).assumingMemoryBound( + return unsafe (_address + ManagedBufferPointer._headerOffset).assumingMemoryBound( to: Header.self) } @@ -598,7 +598,7 @@ extension ManagedBufferPointer where Element: ~Copyable { @inlinable internal var _elementPointer: UnsafeMutablePointer { _onFastPath() - return (_address + ManagedBufferPointer._elementOffset).assumingMemoryBound( + return unsafe (_address + ManagedBufferPointer._elementOffset).assumingMemoryBound( to: Element.self) } @@ -619,7 +619,7 @@ extension ManagedBufferPointer: Equatable { lhs: ManagedBufferPointer, rhs: ManagedBufferPointer ) -> Bool { - return lhs._address == rhs._address + return unsafe lhs._address == rhs._address } } diff --git a/stdlib/public/core/MemoryLayout.swift b/stdlib/public/core/MemoryLayout.swift index 73dd24230e8a8..5eeab8027228f 100644 --- a/stdlib/public/core/MemoryLayout.swift +++ b/stdlib/public/core/MemoryLayout.swift @@ -258,44 +258,44 @@ extension MemoryLayout where T: ~Copyable { } internal static func _roundingUpToAlignment(_ value: UnsafeRawPointer) -> UnsafeRawPointer { - return UnsafeRawPointer(bitPattern: + return unsafe UnsafeRawPointer(bitPattern: _roundingUpToAlignment(UInt(bitPattern: value))).unsafelyUnwrapped } internal static func _roundingDownToAlignment(_ value: UnsafeRawPointer) -> UnsafeRawPointer { - return UnsafeRawPointer(bitPattern: + return unsafe UnsafeRawPointer(bitPattern: _roundingDownToAlignment(UInt(bitPattern: value))).unsafelyUnwrapped } internal static func _roundingUpToAlignment(_ value: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer { - return UnsafeMutableRawPointer(bitPattern: + return unsafe UnsafeMutableRawPointer(bitPattern: _roundingUpToAlignment(UInt(bitPattern: value))).unsafelyUnwrapped } internal static func _roundingDownToAlignment(_ value: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer { - return UnsafeMutableRawPointer(bitPattern: + return unsafe UnsafeMutableRawPointer(bitPattern: _roundingDownToAlignment(UInt(bitPattern: value))).unsafelyUnwrapped } internal static func _roundingUpBaseToAlignment(_ value: UnsafeRawBufferPointer) -> UnsafeRawBufferPointer { - let baseAddressBits = Int(bitPattern: value.baseAddress) + let baseAddressBits = unsafe Int(bitPattern: value.baseAddress) var misalignment = baseAddressBits & _alignmentMask if misalignment != 0 { misalignment = _alignmentMask & -misalignment - return UnsafeRawBufferPointer( + return unsafe UnsafeRawBufferPointer( start: UnsafeRawPointer(bitPattern: baseAddressBits + misalignment), count: value.count - misalignment) } - return value + return unsafe value } internal static func _roundingUpBaseToAlignment(_ value: UnsafeMutableRawBufferPointer) -> UnsafeMutableRawBufferPointer { - let baseAddressBits = Int(bitPattern: value.baseAddress) + let baseAddressBits = unsafe Int(bitPattern: value.baseAddress) var misalignment = baseAddressBits & _alignmentMask if misalignment != 0 { misalignment = _alignmentMask & -misalignment - return UnsafeMutableRawBufferPointer( + return unsafe UnsafeMutableRawBufferPointer( start: UnsafeMutableRawPointer(bitPattern: baseAddressBits + misalignment), count: value.count - misalignment) } - return value + return unsafe value } } diff --git a/stdlib/public/core/MigrationSupport.swift b/stdlib/public/core/MigrationSupport.swift index ab436e4cc5751..5957ca74b53f7 100644 --- a/stdlib/public/core/MigrationSupport.swift +++ b/stdlib/public/core/MigrationSupport.swift @@ -351,18 +351,18 @@ extension Collection { extension UnsafeMutablePointer { @available(swift, deprecated: 4.1, obsoleted: 5.0, renamed: "initialize(repeating:count:)") public func initialize(to newValue: Pointee, count: Int = 1) { - initialize(repeating: newValue, count: count) + unsafe initialize(repeating: newValue, count: count) } @available(swift, deprecated: 4.1, obsoleted: 5.0, message: "the default argument to deinitialize(count:) has been removed, please specify the count explicitly") @discardableResult public func deinitialize() -> UnsafeMutableRawPointer { - return deinitialize(count: 1) + return unsafe deinitialize(count: 1) } @available(swift, deprecated: 4.1, obsoleted: 5.0, message: "Swift currently only supports freeing entire heap blocks, use deallocate() instead") public func deallocate(capacity _: Int) { - self.deallocate() + unsafe self.deallocate() } /// Initializes memory starting at this pointer's address with the elements @@ -379,8 +379,8 @@ extension UnsafeMutablePointer { @available(swift, deprecated: 4.2, obsoleted: 5.0, message: "it will be removed in Swift 5.0. Please use 'UnsafeMutableBufferPointer.initialize(from:)' instead") public func initialize(from source: C) where C.Element == Pointee { - let buf = UnsafeMutableBufferPointer(start: self, count: numericCast(source.count)) - var (remainders,writtenUpTo) = source._copyContents(initializing: buf) + let buf = unsafe UnsafeMutableBufferPointer(start: self, count: numericCast(source.count)) + var (remainders,writtenUpTo) = unsafe source._copyContents(initializing: buf) // ensure that exactly rhs.count elements were written _precondition(remainders.next() == nil, "rhs underreported its count") _precondition(writtenUpTo == buf.endIndex, "rhs overreported its count") @@ -401,7 +401,7 @@ extension UnsafeMutableRawPointer { public init?(@_nonEphemeral _ from: UnsafePointer?) { Builtin.unreachable() } } -extension UnsafeRawPointer: _CustomPlaygroundQuickLookable { +extension UnsafeRawPointer: @unsafe _CustomPlaygroundQuickLookable { internal var summary: String { let ptrValue = UInt64( bitPattern: Int64(Int(Builtin.ptrtoint_Word(_rawValue)))) @@ -412,11 +412,11 @@ extension UnsafeRawPointer: _CustomPlaygroundQuickLookable { @available(swift, deprecated: 4.2/*, obsoleted: 5.0*/, message: "UnsafeRawPointer.customPlaygroundQuickLook will be removed in a future Swift version") public var customPlaygroundQuickLook: _PlaygroundQuickLook { - return .text(summary) + return unsafe .text(summary) } } -extension UnsafeMutableRawPointer: _CustomPlaygroundQuickLookable { +extension UnsafeMutableRawPointer: @unsafe _CustomPlaygroundQuickLookable { private var summary: String { let ptrValue = UInt64( bitPattern: Int64(Int(Builtin.ptrtoint_Word(_rawValue)))) @@ -427,11 +427,11 @@ extension UnsafeMutableRawPointer: _CustomPlaygroundQuickLookable { @available(swift, deprecated: 4.2/*, obsoleted: 5.0*/, message: "UnsafeMutableRawPointer.customPlaygroundQuickLook will be removed in a future Swift version") public var customPlaygroundQuickLook: _PlaygroundQuickLook { - return .text(summary) + return unsafe .text(summary) } } -extension UnsafePointer: _CustomPlaygroundQuickLookable { +extension UnsafePointer: @unsafe _CustomPlaygroundQuickLookable { private var summary: String { let ptrValue = UInt64(bitPattern: Int64(Int(Builtin.ptrtoint_Word(_rawValue)))) return ptrValue == 0 @@ -441,11 +441,11 @@ extension UnsafePointer: _CustomPlaygroundQuickLookable { @available(swift, deprecated: 4.2/*, obsoleted: 5.0*/, message: "UnsafePointer.customPlaygroundQuickLook will be removed in a future Swift version") public var customPlaygroundQuickLook: PlaygroundQuickLook { - return .text(summary) + return unsafe .text(summary) } } -extension UnsafeMutablePointer: _CustomPlaygroundQuickLookable { +extension UnsafeMutablePointer: @unsafe _CustomPlaygroundQuickLookable { private var summary: String { let ptrValue = UInt64(bitPattern: Int64(Int(Builtin.ptrtoint_Word(_rawValue)))) return ptrValue == 0 @@ -455,7 +455,7 @@ extension UnsafeMutablePointer: _CustomPlaygroundQuickLookable { @available(swift, deprecated: 4.2/*, obsoleted: 5.0*/, message: "UnsafeMutablePointer.customPlaygroundQuickLook will be removed in a future Swift version") public var customPlaygroundQuickLook: PlaygroundQuickLook { - return .text(summary) + return unsafe .text(summary) } } @@ -476,12 +476,12 @@ extension UnsafeMutableRawPointer { @available(swift, deprecated: 4.1, obsoleted: 5.0, renamed: "deallocate()", message: "Swift currently only supports freeing entire heap blocks, use deallocate() instead") public func deallocate(bytes _: Int, alignedTo _: Int) { - self.deallocate() + unsafe self.deallocate() } @available(swift, deprecated: 4.1, obsoleted: 5.0, renamed: "copyMemory(from:byteCount:)") public func copyBytes(from source: UnsafeRawPointer, count: Int) { - copyMemory(from: source, byteCount: count) + unsafe copyMemory(from: source, byteCount: count) } @available(swift, deprecated: 4.1, obsoleted: 5.0, renamed: "initializeMemory(as:repeating:count:)") @@ -489,7 +489,7 @@ extension UnsafeMutableRawPointer { public func initializeMemory( as type: T.Type, at offset: Int = 0, count: Int = 1, to repeatedValue: T ) -> UnsafeMutablePointer { - return (self + offset * MemoryLayout.stride).initializeMemory( + return unsafe (self + offset * MemoryLayout.stride).initializeMemory( as: type, repeating: repeatedValue, count: count) } @@ -500,12 +500,12 @@ extension UnsafeMutableRawPointer { ) -> UnsafeMutablePointer { // TODO: Optimize where `C` is a `ContiguousArrayBuffer`. // Initialize and bind each element of the container. - var ptr = self + var ptr = unsafe self for element in source { - ptr.initializeMemory(as: C.Element.self, repeating: element, count: 1) - ptr += MemoryLayout.stride + unsafe ptr.initializeMemory(as: C.Element.self, repeating: element, count: 1) + unsafe ptr += MemoryLayout.stride } - return UnsafeMutablePointer(_rawValue) + return unsafe UnsafeMutablePointer(_rawValue) } } @@ -518,7 +518,7 @@ extension UnsafeMutableRawBufferPointer { @available(swift, deprecated: 4.1, obsoleted: 5.0, renamed: "copyMemory(from:)") public func copyBytes(from source: UnsafeRawBufferPointer) { - copyMemory(from: source) + unsafe copyMemory(from: source) } } diff --git a/stdlib/public/core/Misc.swift b/stdlib/public/core/Misc.swift index 8da2db8f32943..8500cad2bdc07 100644 --- a/stdlib/public/core/Misc.swift +++ b/stdlib/public/core/Misc.swift @@ -62,8 +62,8 @@ public // SPI (Distributed) func _getFunctionFullNameFromMangledName(mangledName: String) -> String? { let mangledNameUTF8 = Array(mangledName.utf8) let (stringPtr, count) = - mangledNameUTF8.withUnsafeBufferPointer { (mangledNameUTF8) in - return _getFunctionFullNameFromMangledNameImpl( + unsafe mangledNameUTF8.withUnsafeBufferPointer { (mangledNameUTF8) in + return unsafe _getFunctionFullNameFromMangledNameImpl( mangledNameUTF8.baseAddress!, UInt(mangledNameUTF8.endIndex)) } @@ -72,7 +72,7 @@ func _getFunctionFullNameFromMangledName(mangledName: String) -> String? { return nil } - return String._fromUTF8Repairing( + return unsafe String._fromUTF8Repairing( UnsafeBufferPointer(start: stringPtr, count: Int(count))).0 } @@ -90,8 +90,8 @@ public func _getTypeName(_ type: Any.Type, qualified: Bool) @_unavailableInEmbedded public // @testable func _typeName(_ type: Any.Type, qualified: Bool = true) -> String { - let (stringPtr, count) = _getTypeName(type, qualified: qualified) - return String._fromUTF8Repairing( + let (stringPtr, count) = unsafe _getTypeName(type, qualified: qualified) + return unsafe String._fromUTF8Repairing( UnsafeBufferPointer(start: stringPtr, count: count)).0 } @@ -107,12 +107,12 @@ public func _getMangledTypeName(_ type: any ~Copyable.Type) @_preInverseGenerics public // SPI func _mangledTypeName(_ type: any ~Copyable.Type) -> String? { - let (stringPtr, count) = _getMangledTypeName(type) + let (stringPtr, count) = unsafe _getMangledTypeName(type) guard count > 0 else { return nil } - let (result, repairsMade) = String._fromUTF8Repairing( + let (result, repairsMade) = unsafe String._fromUTF8Repairing( UnsafeBufferPointer(start: stringPtr, count: count)) _precondition(!repairsMade, "repairs made to _mangledTypeName, this is not expected since names should be valid UTF-8") @@ -126,8 +126,8 @@ func _mangledTypeName(_ type: any ~Copyable.Type) -> String? { public // SPI(Foundation) func _typeByName(_ name: String) -> Any.Type? { let nameUTF8 = Array(name.utf8) - return nameUTF8.withUnsafeBufferPointer { (nameUTF8) in - return _getTypeByMangledNameUntrusted(nameUTF8.baseAddress!, + return unsafe nameUTF8.withUnsafeBufferPointer { (nameUTF8) in + return unsafe _getTypeByMangledNameUntrusted(nameUTF8.baseAddress!, UInt(nameUTF8.endIndex)) } } diff --git a/stdlib/public/core/NFD.swift b/stdlib/public/core/NFD.swift index 1ccba3282b925..1d46a6e6bdef5 100644 --- a/stdlib/public/core/NFD.swift +++ b/stdlib/public/core/NFD.swift @@ -366,11 +366,11 @@ extension Unicode._NFDNormalizer { return } - var utf8 = decompEntry.utf8 + var utf8 = unsafe decompEntry.utf8 while utf8.count > 0 { - let (scalar, len) = _decodeScalar(utf8, startingAt: 0) - utf8 = UnsafeBufferPointer(rebasing: utf8[len...]) + let (scalar, len) = unsafe _decodeScalar(utf8, startingAt: 0) + unsafe utf8 = unsafe UnsafeBufferPointer(rebasing: utf8[len...]) // Fast path: Because this will be emitted into the completed NFD buffer, // we don't need to look at NFD_QC anymore which lets us do a larger diff --git a/stdlib/public/core/NativeDictionary.swift b/stdlib/public/core/NativeDictionary.swift index 983d3774a3f49..68dba26e934b2 100644 --- a/stdlib/public/core/NativeDictionary.swift +++ b/stdlib/public/core/NativeDictionary.swift @@ -14,6 +14,7 @@ /// implementation of Dictionary. @usableFromInline @frozen +@safe internal struct _NativeDictionary { @usableFromInline internal typealias Element = (key: Key, value: Value) @@ -26,21 +27,21 @@ internal struct _NativeDictionary { /// Constructs an instance from the empty singleton. @inlinable internal init() { - self._storage = __RawDictionaryStorage.empty + self._storage = unsafe __RawDictionaryStorage.empty } /// Constructs a dictionary adopting the given storage. @inlinable internal init(_ storage: __owned __RawDictionaryStorage) { - self._storage = storage + self._storage = unsafe storage } @inlinable internal init(capacity: Int) { if capacity == 0 { - self._storage = __RawDictionaryStorage.empty + self._storage = unsafe __RawDictionaryStorage.empty } else { - self._storage = _DictionaryStorage.allocate(capacity: capacity) + self._storage = unsafe _DictionaryStorage.allocate(capacity: capacity) } } @@ -53,11 +54,11 @@ internal struct _NativeDictionary { @inlinable internal init(_ cocoa: __owned __CocoaDictionary, capacity: Int) { if capacity == 0 { - self._storage = __RawDictionaryStorage.empty + self._storage = unsafe __RawDictionaryStorage.empty } else { _internalInvariant(cocoa.count <= capacity) self._storage = - _DictionaryStorage.convert(cocoa, capacity: capacity) + unsafe _DictionaryStorage.convert(cocoa, capacity: capacity) for (key, value) in cocoa { insertNew( key: _forceBridgeFromObjectiveC(key, Key.self), @@ -79,39 +80,39 @@ extension _NativeDictionary { // Primitive fields internal var capacity: Int { @inline(__always) get { - return _assumeNonNegative(_storage._capacity) + return unsafe _assumeNonNegative(_storage._capacity) } } @inlinable internal var hashTable: _HashTable { @inline(__always) get { - return _storage._hashTable + return unsafe _storage._hashTable } } @inlinable internal var age: Int32 { @inline(__always) get { - return _storage._age + return unsafe _storage._age } } // This API is unsafe and needs a `_fixLifetime` in the caller. @inlinable internal var _keys: UnsafeMutablePointer { - return _storage._rawKeys.assumingMemoryBound(to: Key.self) + return unsafe _storage._rawKeys.assumingMemoryBound(to: Key.self) } @inlinable internal var _values: UnsafeMutablePointer { - return _storage._rawValues.assumingMemoryBound(to: Value.self) + return unsafe _storage._rawValues.assumingMemoryBound(to: Value.self) } @inlinable @inline(__always) internal func invalidateIndices() { - _storage._age &+= 1 + unsafe _storage._age &+= 1 } } @@ -120,16 +121,16 @@ extension _NativeDictionary { // Low-level unchecked operations @inline(__always) internal func uncheckedKey(at bucket: Bucket) -> Key { defer { _fixLifetime(self) } - _internalInvariant(hashTable.isOccupied(bucket)) - return _keys[bucket.offset] + unsafe _internalInvariant(hashTable.isOccupied(bucket)) + return unsafe _keys[bucket.offset] } @inlinable @inline(__always) internal func uncheckedValue(at bucket: Bucket) -> Value { defer { _fixLifetime(self) } - _internalInvariant(hashTable.isOccupied(bucket)) - return _values[bucket.offset] + unsafe _internalInvariant(hashTable.isOccupied(bucket)) + return unsafe _values[bucket.offset] } @inlinable // FIXME(inline-always) was usableFromInline @@ -139,18 +140,18 @@ extension _NativeDictionary { // Low-level unchecked operations toKey key: __owned Key, value: __owned Value) { defer { _fixLifetime(self) } - _internalInvariant(hashTable.isValid(bucket)) - (_keys + bucket.offset).initialize(to: key) - (_values + bucket.offset).initialize(to: value) + unsafe _internalInvariant(hashTable.isValid(bucket)) + unsafe (_keys + bucket.offset).initialize(to: key) + unsafe (_values + bucket.offset).initialize(to: value) } @inlinable // FIXME(inline-always) was usableFromInline @inline(__always) internal func uncheckedDestroy(at bucket: Bucket) { defer { _fixLifetime(self) } - _internalInvariant(hashTable.isValid(bucket)) - (_keys + bucket.offset).deinitialize(count: 1) - (_values + bucket.offset).deinitialize(count: 1) + unsafe _internalInvariant(hashTable.isValid(bucket)) + unsafe (_keys + bucket.offset).deinitialize(count: 1) + unsafe (_values + bucket.offset).deinitialize(count: 1) } } @@ -158,13 +159,13 @@ extension _NativeDictionary { // Low-level lookup operations @inlinable @inline(__always) internal func hashValue(for key: Key) -> Int { - return key._rawHashValue(seed: _storage._seed) + return unsafe key._rawHashValue(seed: _storage._seed) } @inlinable @inline(__always) internal func find(_ key: Key) -> (bucket: Bucket, found: Bool) { - return _storage.find(key) + return unsafe _storage.find(key) } /// Search for a given element, assuming it has the specified hash value. @@ -177,7 +178,7 @@ extension _NativeDictionary { // Low-level lookup operations _ key: Key, hashValue: Int ) -> (bucket: Bucket, found: Bool) { - return _storage.find(key, hashValue: hashValue) + return unsafe _storage.find(key, hashValue: hashValue) } } @@ -189,18 +190,18 @@ extension _NativeDictionary { // ensureUnique moveElements: Bool ) { let capacity = Swift.max(capacity, self.capacity) - let newStorage = _DictionaryStorage.resize( + let newStorage = unsafe _DictionaryStorage.resize( original: _storage, capacity: capacity, move: moveElements) let result = _NativeDictionary(newStorage) if count > 0 { - for bucket in hashTable { + for unsafe bucket in unsafe hashTable { let key: Key let value: Value if moveElements { - key = (_keys + bucket.offset).move() - value = (_values + bucket.offset).move() + key = unsafe (_keys + bucket.offset).move() + value = unsafe (_values + bucket.offset).move() } else { key = self.uncheckedKey(at: bucket) value = self.uncheckedValue(at: bucket) @@ -210,8 +211,8 @@ extension _NativeDictionary { // ensureUnique if moveElements { // Clear out old storage, ensuring that its deinit won't overrelease the // elements we've just moved out. - _storage._hashTable.clear() - _storage._count = 0 + unsafe _storage._hashTable.clear() + unsafe _storage._count = 0 } } _storage = result._storage @@ -230,15 +231,15 @@ extension _NativeDictionary { // ensureUnique @inlinable @_semantics("optimize.sil.specialize.generic.size.never") internal mutating func copy() { - let newStorage = _DictionaryStorage.copy(original: _storage) - _internalInvariant(newStorage._scale == _storage._scale) - _internalInvariant(newStorage._age == _storage._age) - _internalInvariant(newStorage._seed == _storage._seed) + let newStorage = unsafe _DictionaryStorage.copy(original: _storage) + unsafe _internalInvariant(newStorage._scale == _storage._scale) + unsafe _internalInvariant(newStorage._age == _storage._age) + unsafe _internalInvariant(newStorage._seed == _storage._seed) let result = _NativeDictionary(newStorage) if count > 0 { - result.hashTable.copyContents(of: hashTable) - result._storage._count = self.count - for bucket in hashTable { + unsafe result.hashTable.copyContents(of: hashTable) + unsafe result._storage._count = self.count + for unsafe bucket in unsafe hashTable { let key = uncheckedKey(at: bucket) let value = uncheckedValue(at: bucket) result.uncheckedInitialize(at: bucket, toKey: key, value: value) @@ -278,9 +279,9 @@ extension _NativeDictionary { @inlinable @inline(__always) func validatedBucket(for index: _HashTable.Index) -> Bucket { - _precondition(hashTable.isOccupied(index.bucket) && index.age == age, + unsafe _precondition(hashTable.isOccupied(index.bucket) && index.age == age, "Attempting to access Dictionary elements using an invalid index") - return index.bucket + return unsafe index.bucket } @inlinable @@ -297,14 +298,14 @@ extension _NativeDictionary { let key = _forceBridgeFromObjectiveC(cocoa.key, Key.self) let (bucket, found) = find(key) if found { - return bucket + return unsafe bucket } } _preconditionFailure( "Attempting to access Dictionary elements using an invalid index") } #endif - return validatedBucket(for: index._asNative) + return unsafe validatedBucket(for: index._asNative) } } @@ -314,14 +315,14 @@ extension _NativeDictionary: _DictionaryBuffer { @inlinable internal var startIndex: Index { - let bucket = hashTable.startBucket - return Index(_native: _HashTable.Index(bucket: bucket, age: age)) + let bucket = unsafe hashTable.startBucket + return unsafe Index(_native: _HashTable.Index(bucket: bucket, age: age)) } @inlinable internal var endIndex: Index { - let bucket = hashTable.endBucket - return Index(_native: _HashTable.Index(bucket: bucket, age: age)) + let bucket = unsafe hashTable.endBucket + return unsafe Index(_native: _HashTable.Index(bucket: bucket, age: age)) } @inlinable @@ -333,9 +334,9 @@ extension _NativeDictionary: _DictionaryBuffer { return Index(_cocoa: i.dictionary.index(after: i)) } #endif - let bucket = validatedBucket(for: index._asNative) - let next = hashTable.occupiedBucket(after: bucket) - return Index(_native: _HashTable.Index(bucket: next, age: age)) + let bucket = unsafe validatedBucket(for: index._asNative) + let next = unsafe hashTable.occupiedBucket(after: bucket) + return unsafe Index(_native: _HashTable.Index(bucket: next, age: age)) } @inlinable @@ -346,13 +347,13 @@ extension _NativeDictionary: _DictionaryBuffer { } let (bucket, found) = find(key) guard found else { return nil } - return Index(_native: _HashTable.Index(bucket: bucket, age: age)) + return unsafe Index(_native: _HashTable.Index(bucket: bucket, age: age)) } @inlinable internal var count: Int { @inline(__always) get { - return _assumeNonNegative(_storage._count) + return unsafe _assumeNonNegative(_storage._count) } } @@ -415,14 +416,14 @@ extension _NativeDictionary { let (bucket, found) = mutatingFind(key, isUnique: isUnique) // If found, move the old value out of storage, wrapping it into an // optional before yielding it. - var value: Value? = (found ? (_values + bucket.offset).move() : nil) + var value: Value? = unsafe (found ? (_values + bucket.offset).move() : nil) defer { // This is in a defer block because yield might throw, and we need to // preserve Dictionary invariants when that happens. if let value = value { if found { // **Mutation.** Initialize storage to new value. - (_values + bucket.offset).initialize(to: value) + unsafe (_values + bucket.offset).initialize(to: value) } else { // **Insertion.** Insert the new entry at the correct place. Note // that `mutatingFind` already ensured that we have enough capacity. @@ -432,7 +433,7 @@ extension _NativeDictionary { if found { // **Removal.** We've already deinitialized the value; deinitialize // the key too and register the removal. - (_keys + bucket.offset).deinitialize(count: 1) + unsafe (_keys + bucket.offset).deinitialize(count: 1) _delete(at: bucket) } else { // Noop @@ -482,13 +483,13 @@ extension _NativeDictionary { // Insertions fatalError("duplicate keys in a Dictionary") #endif } - hashTable.insert(bucket) + unsafe hashTable.insert(bucket) uncheckedInitialize(at: bucket, toKey: key, value: value) } else { - let bucket = hashTable.insertNew(hashValue: hashValue) + let bucket = unsafe hashTable.insertNew(hashValue: hashValue) uncheckedInitialize(at: bucket, toKey: key, value: value) } - _storage._count &+= 1 + unsafe _storage._count &+= 1 } /// Insert a new element into uniquely held storage, replacing an existing @@ -505,8 +506,8 @@ extension _NativeDictionary { // Insertions // collisions arising from equality transitions during bridging, and in // that case it is desirable to keep values paired with their original // keys. This is not how `updateValue(_:, forKey:)` works. - (_keys + bucket.offset).pointee = key - (_values + bucket.offset).pointee = value + unsafe (_keys + bucket.offset).pointee = key + unsafe (_values + bucket.offset).pointee = value } else { _precondition(count < capacity) _insert(at: bucket, key: key, value: value) @@ -542,7 +543,7 @@ extension _NativeDictionary { // Insertions let rehashed = ensureUnique( isUnique: isUnique, capacity: count + (found ? 0 : 1)) - guard rehashed else { return (bucket, found) } + guard rehashed else { return unsafe (bucket, found) } let (b, f) = find(key) if f != found { #if !$Embedded @@ -551,7 +552,7 @@ extension _NativeDictionary { // Insertions fatalError("duplicate keys in a Dictionary") #endif } - return (b, found) + return unsafe (b, found) } @inlinable @@ -560,9 +561,9 @@ extension _NativeDictionary { // Insertions key: __owned Key, value: __owned Value) { _internalInvariant(count < capacity) - hashTable.insert(bucket) + unsafe hashTable.insert(bucket) uncheckedInitialize(at: bucket, toKey: key, value: value) - _storage._count += 1 + unsafe _storage._count += 1 } @inlinable @@ -573,8 +574,8 @@ extension _NativeDictionary { // Insertions ) -> Value? { let (bucket, found) = mutatingFind(key, isUnique: isUnique) if found { - let oldValue = (_values + bucket.offset).move() - (_values + bucket.offset).initialize(to: value) + let oldValue = unsafe (_values + bucket.offset).move() + unsafe (_values + bucket.offset).initialize(to: value) return oldValue } _insert(at: bucket, key: key, value: value) @@ -589,7 +590,7 @@ extension _NativeDictionary { // Insertions ) { let (bucket, found) = mutatingFind(key, isUnique: isUnique) if found { - (_values + bucket.offset).pointee = value + unsafe (_values + bucket.offset).pointee = value } else { _insert(at: bucket, key: key, value: value) } @@ -606,10 +607,10 @@ extension _NativeDictionary { ) { let rehashed = ensureUnique(isUnique: isUnique, capacity: capacity) _internalInvariant(!rehashed) - _internalInvariant(hashTable.isOccupied(a) && hashTable.isOccupied(b)) - let value = (_values + a.offset).move() - (_values + a.offset).moveInitialize(from: _values + b.offset, count: 1) - (_values + b.offset).initialize(to: value) + unsafe _internalInvariant(hashTable.isOccupied(a) && hashTable.isOccupied(b)) + let value = unsafe (_values + a.offset).move() + unsafe (_values + a.offset).moveInitialize(from: _values + b.offset, count: 1) + unsafe (_values + b.offset).initialize(to: value) } @_alwaysEmitIntoClient @@ -621,9 +622,9 @@ extension _NativeDictionary { if count == 0 { return _NativeDictionary() } if count == self.count { return self } let result = _NativeDictionary(capacity: count) - for offset in bitset { - let key = self.uncheckedKey(at: Bucket(offset: offset)) - let value = self.uncheckedValue(at: Bucket(offset: offset)) + for unsafe offset in unsafe bitset { + let key = unsafe self.uncheckedKey(at: Bucket(offset: offset)) + let value = unsafe self.uncheckedValue(at: Bucket(offset: offset)) result._unsafeInsertNew(key: key, value: value) // The hash table can have set bits after the end of the bitmap. // Ignore them. @@ -638,7 +639,7 @@ extension _NativeDictionary where Value: Equatable { @inlinable @inline(__always) func isEqual(to other: _NativeDictionary) -> Bool { - if self._storage === other._storage { return true } + if unsafe self._storage === other._storage { return true } if self.count != other.count { return false } for (key, value) in self { @@ -656,7 +657,7 @@ extension _NativeDictionary where Value: Equatable { if self.count != other.count { return false } defer { _fixLifetime(self) } - for bucket in self.hashTable { + for bucket in unsafe self.hashTable { let key = self.uncheckedKey(at: bucket) let value = self.uncheckedValue(at: bucket) guard @@ -681,21 +682,21 @@ extension _NativeDictionary: _HashTableDelegate { @inlinable @inline(__always) internal func moveEntry(from source: Bucket, to target: Bucket) { - _internalInvariant(hashTable.isValid(source)) - _internalInvariant(hashTable.isValid(target)) - (_keys + target.offset) + unsafe _internalInvariant(hashTable.isValid(source)) + unsafe _internalInvariant(hashTable.isValid(target)) + unsafe (_keys + target.offset) .moveInitialize(from: _keys + source.offset, count: 1) - (_values + target.offset) + unsafe (_values + target.offset) .moveInitialize(from: _values + source.offset, count: 1) } @inlinable @inline(__always) internal func swapEntry(_ left: Bucket, with right: Bucket) { - _internalInvariant(hashTable.isValid(left)) - _internalInvariant(hashTable.isValid(right)) - swap(&_keys[left.offset], &_keys[right.offset]) - swap(&_values[left.offset], &_values[right.offset]) + unsafe _internalInvariant(hashTable.isValid(left)) + unsafe _internalInvariant(hashTable.isValid(right)) + unsafe swap(&_keys[left.offset], &_keys[right.offset]) + unsafe swap(&_values[left.offset], &_values[right.offset]) } } @@ -704,9 +705,9 @@ extension _NativeDictionary { // Deletion @_effects(releasenone) @_semantics("optimize.sil.specialize.generic.size.never") internal func _delete(at bucket: Bucket) { - hashTable.delete(at: bucket, with: self) - _storage._count -= 1 - _internalInvariant(_storage._count >= 0) + unsafe hashTable.delete(at: bucket, with: self) + unsafe _storage._count -= 1 + unsafe _internalInvariant(_storage._count >= 0) invalidateIndices() } @@ -716,11 +717,11 @@ extension _NativeDictionary { // Deletion at bucket: Bucket, isUnique: Bool ) -> Element { - _internalInvariant(hashTable.isOccupied(bucket)) + unsafe _internalInvariant(hashTable.isOccupied(bucket)) let rehashed = ensureUnique(isUnique: isUnique, capacity: capacity) _internalInvariant(!rehashed) - let oldKey = (_keys + bucket.offset).move() - let oldValue = (_values + bucket.offset).move() + let oldKey = unsafe (_keys + bucket.offset).move() + let oldValue = unsafe (_values + bucket.offset).move() _delete(at: bucket) return (oldKey, oldValue) } @@ -728,19 +729,19 @@ extension _NativeDictionary { // Deletion @usableFromInline internal mutating func removeAll(isUnique: Bool) { guard isUnique else { - let scale = self._storage._scale - _storage = _DictionaryStorage.allocate( + let scale = unsafe self._storage._scale + _storage = unsafe _DictionaryStorage.allocate( scale: scale, age: nil, seed: nil) return } - for bucket in hashTable { - (_keys + bucket.offset).deinitialize(count: 1) - (_values + bucket.offset).deinitialize(count: 1) + for unsafe bucket in unsafe hashTable { + unsafe (_keys + bucket.offset).deinitialize(count: 1) + unsafe (_values + bucket.offset).deinitialize(count: 1) } - hashTable.clear() - _storage._count = 0 + unsafe hashTable.clear() + unsafe _storage._count = 0 invalidateIndices() } } @@ -750,13 +751,13 @@ extension _NativeDictionary { // High-level operations internal func mapValues( _ transform: (Value) throws -> T ) rethrows -> _NativeDictionary { - let resultStorage = _DictionaryStorage.copy(original: _storage) - _internalInvariant(resultStorage._seed == _storage._seed) + let resultStorage = unsafe _DictionaryStorage.copy(original: _storage) + unsafe _internalInvariant(resultStorage._seed == _storage._seed) let result = _NativeDictionary(resultStorage) // Because the current and new buffer have the same scale and seed, we can // initialize to the same locations in the new buffer, skipping hash value // recalculations. - for bucket in hashTable { + for unsafe bucket in unsafe hashTable { let key = self.uncheckedKey(at: bucket) let value = self.uncheckedValue(at: bucket) try result._insert(at: bucket, key: key, value: transform(value)) @@ -777,7 +778,7 @@ extension _NativeDictionary { // High-level operations if found { do { let newValue = try combine(uncheckedValue(at: bucket), value) - _values[bucket.offset] = newValue + unsafe _values[bucket.offset] = newValue } catch _MergeError.keyCollision { #if !$Embedded fatalError("Duplicate values for key: '\(key)'") @@ -805,7 +806,7 @@ extension _NativeDictionary { // High-level operations if found { do throws(_MergeError) { let newValue = try combine(uncheckedValue(at: bucket), value) - _values[bucket.offset] = newValue + unsafe _values[bucket.offset] = newValue } catch { #if !$Embedded fatalError("Duplicate values for key: '\(key)'") @@ -831,7 +832,7 @@ extension _NativeDictionary { // High-level operations let key = try keyForValue(value) let (bucket, found) = mutatingFind(key, isUnique: true) if found { - _values[bucket.offset].append(value) + unsafe _values[bucket.offset].append(value) } else { _insert(at: bucket, key: key, value: [value]) } @@ -842,15 +843,15 @@ extension _NativeDictionary { // High-level operations internal func filter( _ isIncluded: (Element) throws -> Bool ) rethrows -> _NativeDictionary { - try _UnsafeBitset.withTemporaryBitset( + try unsafe _UnsafeBitset.withTemporaryBitset( capacity: _storage._bucketCount ) { bitset in var count = 0 - for bucket in hashTable { + for unsafe bucket in unsafe hashTable { if try isIncluded( (uncheckedKey(at: bucket), uncheckedValue(at: bucket)) ) { - bitset.uncheckedInsert(bucket.offset) + unsafe bitset.uncheckedInsert(bucket.offset) count += 1 } } @@ -862,6 +863,7 @@ extension _NativeDictionary { // High-level operations extension _NativeDictionary: Sequence { @usableFromInline @frozen + @safe internal struct Iterator { // The iterator is iterating over a frozen view of the collection state, so // it keeps its own reference to the dictionary. @@ -874,7 +876,7 @@ extension _NativeDictionary: Sequence { @inline(__always) init(_ base: __owned _NativeDictionary) { self.base = base - self.iterator = base.hashTable.makeIterator() + self.iterator = unsafe base.hashTable.makeIterator() } } @@ -894,21 +896,21 @@ extension _NativeDictionary.Iterator: IteratorProtocol { @inlinable @inline(__always) internal mutating func nextKey() -> Key? { - guard let index = iterator.next() else { return nil } + guard let index = unsafe iterator.next() else { return nil } return base.uncheckedKey(at: index) } @inlinable @inline(__always) internal mutating func nextValue() -> Value? { - guard let index = iterator.next() else { return nil } + guard let index = unsafe iterator.next() else { return nil } return base.uncheckedValue(at: index) } @inlinable @inline(__always) internal mutating func next() -> Element? { - guard let index = iterator.next() else { return nil } + guard let index = unsafe iterator.next() else { return nil } let key = base.uncheckedKey(at: index) let value = base.uncheckedValue(at: index) return (key, value) diff --git a/stdlib/public/core/NativeSet.swift b/stdlib/public/core/NativeSet.swift index 9dbdde7bab05e..65de2c5c0f079 100644 --- a/stdlib/public/core/NativeSet.swift +++ b/stdlib/public/core/NativeSet.swift @@ -14,6 +14,7 @@ /// implementation of Set. @usableFromInline @frozen +@safe internal struct _NativeSet { /// See the comments on __RawSetStorage and its subclasses to understand why we /// store an untyped storage here. @@ -24,22 +25,22 @@ internal struct _NativeSet { @inlinable @inline(__always) internal init() { - self._storage = __RawSetStorage.empty + self._storage = unsafe __RawSetStorage.empty } /// Constructs a native set adopting the given storage. @inlinable @inline(__always) internal init(_ storage: __owned __RawSetStorage) { - self._storage = storage + self._storage = unsafe storage } @inlinable internal init(capacity: Int) { if capacity == 0 { - self._storage = __RawSetStorage.empty + self._storage = unsafe __RawSetStorage.empty } else { - self._storage = _SetStorage.allocate(capacity: capacity) + self._storage = unsafe _SetStorage.allocate(capacity: capacity) } } @@ -52,10 +53,10 @@ internal struct _NativeSet { @inlinable internal init(_ cocoa: __owned __CocoaSet, capacity: Int) { if capacity == 0 { - self._storage = __RawSetStorage.empty + self._storage = unsafe __RawSetStorage.empty } else { _internalInvariant(cocoa.count <= capacity) - self._storage = _SetStorage.convert(cocoa, capacity: capacity) + self._storage = unsafe _SetStorage.convert(cocoa, capacity: capacity) for element in cocoa { let nativeElement = _forceBridgeFromObjectiveC(element, Element.self) insertNew(nativeElement, isUnique: true) @@ -76,40 +77,40 @@ extension _NativeSet { // Primitive fields internal var capacity: Int { @inline(__always) get { - return _assumeNonNegative(_storage._capacity) + return unsafe _assumeNonNegative(_storage._capacity) } } @_alwaysEmitIntoClient @inline(__always) internal var bucketCount: Int { - _assumeNonNegative(_storage._bucketCount) + unsafe _assumeNonNegative(_storage._bucketCount) } @inlinable internal var hashTable: _HashTable { @inline(__always) get { - return _storage._hashTable + return unsafe _storage._hashTable } } @inlinable internal var age: Int32 { @inline(__always) get { - return _storage._age + return unsafe _storage._age } } // This API is unsafe and needs a `_fixLifetime` in the caller. @inlinable internal var _elements: UnsafeMutablePointer { - return _storage._rawElements.assumingMemoryBound(to: Element.self) + return unsafe _storage._rawElements.assumingMemoryBound(to: Element.self) } @inlinable @inline(__always) internal func invalidateIndices() { - _storage._age &+= 1 + unsafe _storage._age &+= 1 } } @@ -118,8 +119,8 @@ extension _NativeSet { // Low-level unchecked operations @inline(__always) internal func uncheckedElement(at bucket: Bucket) -> Element { defer { _fixLifetime(self) } - _internalInvariant(hashTable.isOccupied(bucket)) - return _elements[bucket.offset] + unsafe _internalInvariant(hashTable.isOccupied(bucket)) + return unsafe _elements[bucket.offset] } @inlinable @@ -128,8 +129,8 @@ extension _NativeSet { // Low-level unchecked operations at bucket: Bucket, to element: __owned Element ) { - _internalInvariant(hashTable.isValid(bucket)) - (_elements + bucket.offset).initialize(to: element) + unsafe _internalInvariant(hashTable.isValid(bucket)) + unsafe (_elements + bucket.offset).initialize(to: element) } @_alwaysEmitIntoClient @inlinable // Introduced in 5.1 @@ -138,8 +139,8 @@ extension _NativeSet { // Low-level unchecked operations at bucket: Bucket, to element: __owned Element ) { - _internalInvariant(hashTable.isOccupied(bucket)) - (_elements + bucket.offset).pointee = element + unsafe _internalInvariant(hashTable.isOccupied(bucket)) + unsafe (_elements + bucket.offset).pointee = element } } @@ -147,7 +148,7 @@ extension _NativeSet { // Low-level lookup operations @inlinable @inline(__always) internal func hashValue(for element: Element) -> Int { - return element._rawHashValue(seed: _storage._seed) + return unsafe element._rawHashValue(seed: _storage._seed) } @inlinable @@ -167,14 +168,14 @@ extension _NativeSet { // Low-level lookup operations hashValue: Int ) -> (bucket: Bucket, found: Bool) { let hashTable = self.hashTable - var bucket = hashTable.idealBucket(forHashValue: hashValue) - while hashTable._isOccupied(bucket) { + var bucket = unsafe hashTable.idealBucket(forHashValue: hashValue) + while unsafe hashTable._isOccupied(bucket) { if uncheckedElement(at: bucket) == element { - return (bucket, true) + return unsafe (bucket, true) } - bucket = hashTable.bucket(wrappedAfter: bucket) + unsafe bucket = unsafe hashTable.bucket(wrappedAfter: bucket) } - return (bucket, false) + return unsafe (bucket, false) } } @@ -182,19 +183,19 @@ extension _NativeSet { // ensureUnique @inlinable internal mutating func resize(capacity: Int) { let capacity = Swift.max(capacity, self.capacity) - let result = _NativeSet(_SetStorage.resize( + let result = unsafe _NativeSet(_SetStorage.resize( original: _storage, capacity: capacity, move: true)) if count > 0 { - for bucket in hashTable { - let element = (self._elements + bucket.offset).move() + for unsafe bucket in unsafe hashTable { + let element = unsafe (self._elements + bucket.offset).move() result._unsafeInsertNew(element) } // Clear out old storage, ensuring that its deinit won't overrelease the // elements we've just moved out. - _storage._hashTable.clear() - _storage._count = 0 + unsafe _storage._hashTable.clear() + unsafe _storage._count = 0 } _storage = result._storage } @@ -202,12 +203,12 @@ extension _NativeSet { // ensureUnique @inlinable internal mutating func copyAndResize(capacity: Int) { let capacity = Swift.max(capacity, self.capacity) - let result = _NativeSet(_SetStorage.resize( + let result = unsafe _NativeSet(_SetStorage.resize( original: _storage, capacity: capacity, move: false)) if count > 0 { - for bucket in hashTable { + for unsafe bucket in unsafe hashTable { result._unsafeInsertNew(self.uncheckedElement(at: bucket)) } } @@ -216,15 +217,15 @@ extension _NativeSet { // ensureUnique @inlinable internal mutating func copy() { - let newStorage = _SetStorage.copy(original: _storage) - _internalInvariant(newStorage._scale == _storage._scale) - _internalInvariant(newStorage._age == _storage._age) - _internalInvariant(newStorage._seed == _storage._seed) + let newStorage = unsafe _SetStorage.copy(original: _storage) + unsafe _internalInvariant(newStorage._scale == _storage._scale) + unsafe _internalInvariant(newStorage._age == _storage._age) + unsafe _internalInvariant(newStorage._seed == _storage._seed) let result = _NativeSet(newStorage) if count > 0 { - result.hashTable.copyContents(of: hashTable) - result._storage._count = self.count - for bucket in hashTable { + unsafe result.hashTable.copyContents(of: hashTable) + unsafe result._storage._count = self.count + for unsafe bucket in unsafe hashTable { let element = uncheckedElement(at: bucket) result.uncheckedInitialize(at: bucket, to: element) } @@ -263,9 +264,9 @@ extension _NativeSet { @inlinable @inline(__always) func validatedBucket(for index: _HashTable.Index) -> Bucket { - _precondition(hashTable.isOccupied(index.bucket) && index.age == age, + unsafe _precondition(hashTable.isOccupied(index.bucket) && index.age == age, "Attempting to access Set elements using an invalid index") - return index.bucket + return unsafe index.bucket } @inlinable @@ -281,14 +282,14 @@ extension _NativeSet { let element = _forceBridgeFromObjectiveC(cocoa.element, Element.self) let (bucket, found) = find(element) if found { - return bucket + return unsafe bucket } } _preconditionFailure( "Attempting to access Set elements using an invalid index") } #endif - return validatedBucket(for: index._asNative) + return unsafe validatedBucket(for: index._asNative) } } @@ -298,22 +299,22 @@ extension _NativeSet: _SetBuffer { @inlinable internal var startIndex: Index { - let bucket = hashTable.startBucket - return Index(_native: _HashTable.Index(bucket: bucket, age: age)) + let bucket = unsafe hashTable.startBucket + return unsafe Index(_native: _HashTable.Index(bucket: bucket, age: age)) } @inlinable internal var endIndex: Index { - let bucket = hashTable.endBucket - return Index(_native: _HashTable.Index(bucket: bucket, age: age)) + let bucket = unsafe hashTable.endBucket + return unsafe Index(_native: _HashTable.Index(bucket: bucket, age: age)) } @inlinable internal func index(after index: Index) -> Index { // Note that _asNative forces this not to work on Cocoa indices. - let bucket = validatedBucket(for: index._asNative) - let next = hashTable.occupiedBucket(after: bucket) - return Index(_native: _HashTable.Index(bucket: next, age: age)) + let bucket = unsafe validatedBucket(for: index._asNative) + let next = unsafe hashTable.occupiedBucket(after: bucket) + return unsafe Index(_native: _HashTable.Index(bucket: next, age: age)) } @inlinable @@ -325,13 +326,13 @@ extension _NativeSet: _SetBuffer { } let (bucket, found) = find(element) guard found else { return nil } - return Index(_native: _HashTable.Index(bucket: bucket, age: age)) + return unsafe Index(_native: _HashTable.Index(bucket: bucket, age: age)) } @inlinable internal var count: Int { @inline(__always) get { - return _assumeNonNegative(_storage._count) + return unsafe _assumeNonNegative(_storage._count) } } @@ -389,13 +390,13 @@ extension _NativeSet { // Insertions fatalError("duplicate elements in a Set") #endif } - hashTable.insert(bucket) + unsafe hashTable.insert(bucket) uncheckedInitialize(at: bucket, to: element) } else { - let bucket = hashTable.insertNew(hashValue: hashValue) + let bucket = unsafe hashTable.insertNew(hashValue: hashValue) uncheckedInitialize(at: bucket, to: element) } - _storage._count &+= 1 + unsafe _storage._count &+= 1 } /// Insert a new element into uniquely held storage. @@ -409,9 +410,9 @@ extension _NativeSet { // Insertions @inlinable internal func _unsafeInsertNew(_ element: __owned Element, at bucket: Bucket) { - hashTable.insert(bucket) + unsafe hashTable.insert(bucket) uncheckedInitialize(at: bucket, to: element) - _storage._count += 1 + unsafe _storage._count += 1 } @inlinable @@ -420,8 +421,8 @@ extension _NativeSet { // Insertions at bucket: Bucket, isUnique: Bool ) { - _internalInvariant(!hashTable.isOccupied(bucket)) - var bucket = bucket + unsafe _internalInvariant(!hashTable.isOccupied(bucket)) + var bucket = unsafe bucket let rehashed = ensureUnique(isUnique: isUnique, capacity: count + 1) if rehashed { let (b, f) = find(element) @@ -432,7 +433,7 @@ extension _NativeSet { // Insertions fatalError("duplicate elements in a Set") #endif } - bucket = b + unsafe bucket = unsafe b } _unsafeInsertNew(element, at: bucket) } @@ -455,10 +456,10 @@ extension _NativeSet { // Insertions fatalError("duplicate elements in a Set") #endif } - bucket = b + unsafe bucket = unsafe b } if found { - let old = (_elements + bucket.offset).move() + let old = unsafe (_elements + bucket.offset).move() uncheckedInitialize(at: bucket, to: element) return old } @@ -486,7 +487,7 @@ extension _NativeSet { @inlinable @inline(__always) func isEqual(to other: _NativeSet) -> Bool { - if self._storage === other._storage { return true } + if unsafe self._storage === other._storage { return true } if self.count != other.count { return false } for member in self { @@ -501,7 +502,7 @@ extension _NativeSet { if self.count != other.count { return false } defer { _fixLifetime(self) } - for bucket in self.hashTable { + for bucket in unsafe self.hashTable { let key = self.uncheckedElement(at: bucket) let bridgedKey = _bridgeAnythingToObjectiveC(key) guard other.contains(bridgedKey) else { return false } @@ -521,7 +522,7 @@ extension _NativeSet: _HashTableDelegate { @inlinable @inline(__always) internal func moveEntry(from source: Bucket, to target: Bucket) { - (_elements + target.offset) + unsafe (_elements + target.offset) .moveInitialize(from: _elements + source.offset, count: 1) } } @@ -530,9 +531,9 @@ extension _NativeSet { // Deletion @inlinable @_effects(releasenone) internal mutating func _delete(at bucket: Bucket) { - hashTable.delete(at: bucket, with: self) - _storage._count -= 1 - _internalInvariant(_storage._count >= 0) + unsafe hashTable.delete(at: bucket, with: self) + unsafe _storage._count -= 1 + unsafe _internalInvariant(_storage._count >= 0) invalidateIndices() } @@ -541,10 +542,10 @@ extension _NativeSet { // Deletion internal mutating func uncheckedRemove( at bucket: Bucket, isUnique: Bool) -> Element { - _internalInvariant(hashTable.isOccupied(bucket)) + unsafe _internalInvariant(hashTable.isOccupied(bucket)) let rehashed = ensureUnique(isUnique: isUnique, capacity: capacity) _internalInvariant(!rehashed) - let old = (_elements + bucket.offset).move() + let old = unsafe (_elements + bucket.offset).move() _delete(at: bucket) return old } @@ -552,18 +553,18 @@ extension _NativeSet { // Deletion @usableFromInline internal mutating func removeAll(isUnique: Bool) { guard isUnique else { - let scale = self._storage._scale - _storage = _SetStorage.allocate( + let scale = unsafe self._storage._scale + _storage = unsafe _SetStorage.allocate( scale: scale, age: nil, seed: nil) return } - for bucket in hashTable { - (_elements + bucket.offset).deinitialize(count: 1) + for unsafe bucket in unsafe hashTable { + unsafe (_elements + bucket.offset).deinitialize(count: 1) } - hashTable.clear() - _storage._count = 0 + unsafe hashTable.clear() + unsafe _storage._count = 0 invalidateIndices() } } @@ -571,6 +572,7 @@ extension _NativeSet { // Deletion extension _NativeSet: Sequence { @usableFromInline @frozen + @safe internal struct Iterator { // The iterator is iterating over a frozen view of the collection state, so // it keeps its own reference to the set. @@ -583,7 +585,7 @@ extension _NativeSet: Sequence { @inline(__always) init(_ base: __owned _NativeSet) { self.base = base - self.iterator = base.hashTable.makeIterator() + self.iterator = unsafe base.hashTable.makeIterator() } } @@ -601,7 +603,7 @@ extension _NativeSet.Iterator: IteratorProtocol { @inlinable @inline(__always) internal mutating func next() -> Element? { - guard let index = iterator.next() else { return nil } + guard let index = unsafe iterator.next() else { return nil } return base.uncheckedElement(at: index) } } @@ -610,13 +612,13 @@ extension _NativeSet { @_alwaysEmitIntoClient internal func isSubset(of possibleSuperset: S) -> Bool where S.Element == Element { - _UnsafeBitset.withTemporaryBitset(capacity: self.bucketCount) { seen in + unsafe _UnsafeBitset.withTemporaryBitset(capacity: self.bucketCount) { seen in // Mark elements in self that we've seen in `possibleSuperset`. var seenCount = 0 for element in possibleSuperset { let (bucket, found) = find(element) guard found else { continue } - let inserted = seen.uncheckedInsert(bucket.offset) + let inserted = unsafe seen.uncheckedInsert(bucket.offset) if inserted { seenCount += 1 if seenCount == self.count { @@ -631,7 +633,7 @@ extension _NativeSet { @_alwaysEmitIntoClient internal func isStrictSubset(of possibleSuperset: S) -> Bool where S.Element == Element { - _UnsafeBitset.withTemporaryBitset(capacity: self.bucketCount) { seen in + unsafe _UnsafeBitset.withTemporaryBitset(capacity: self.bucketCount) { seen in // Mark elements in self that we've seen in `possibleSuperset`. var seenCount = 0 var isStrict = false @@ -644,7 +646,7 @@ extension _NativeSet { } continue } - let inserted = seen.uncheckedInsert(bucket.offset) + let inserted = unsafe seen.uncheckedInsert(bucket.offset) if inserted { seenCount += 1 if seenCount == self.count, isStrict { @@ -659,13 +661,13 @@ extension _NativeSet { @_alwaysEmitIntoClient internal func isStrictSuperset(of possibleSubset: S) -> Bool where S.Element == Element { - _UnsafeBitset.withTemporaryBitset(capacity: self.bucketCount) { seen in + unsafe _UnsafeBitset.withTemporaryBitset(capacity: self.bucketCount) { seen in // Mark elements in self that we've seen in `possibleStrictSubset`. var seenCount = 0 for element in possibleSubset { let (bucket, found) = find(element) guard found else { return false } - let inserted = seen.uncheckedInsert(bucket.offset) + let inserted = unsafe seen.uncheckedInsert(bucket.offset) if inserted { seenCount += 1 if seenCount == self.count { @@ -686,8 +688,8 @@ extension _NativeSet { if count == 0 { return _NativeSet() } if count == self.count { return self } let result = _NativeSet(capacity: count) - for offset in bitset { - result._unsafeInsertNew(self.uncheckedElement(at: Bucket(offset: offset))) + for unsafe offset in unsafe bitset { + unsafe result._unsafeInsertNew(self.uncheckedElement(at: Bucket(offset: offset))) // The hash table can have set bits after the end of the bitmap. // Ignore them. count -= 1 @@ -707,33 +709,33 @@ extension _NativeSet { while let next = it.next() { let (b, found) = find(next) if found { - bucket = b + unsafe bucket = unsafe b break } } - guard let bucket = bucket else { return self } + guard let bucket = unsafe bucket else { return self } // Rather than directly creating a new set, calculate the difference in a // bitset first. This ensures we hash each element (in both sets) only once, // and that we'll have an exact count for the result set, preventing // rehashings during insertions. - return _UnsafeBitset.withTemporaryCopy(of: hashTable.bitset) { difference in + return unsafe _UnsafeBitset.withTemporaryCopy(of: hashTable.bitset) { difference in var remainingCount = self.count - let removed = difference.uncheckedRemove(bucket.offset) + let removed = unsafe difference.uncheckedRemove(bucket.offset) _internalInvariant(removed) remainingCount -= 1 while let element = it.next() { let (bucket, found) = find(element) if found { - if difference.uncheckedRemove(bucket.offset) { + if unsafe difference.uncheckedRemove(bucket.offset) { remainingCount -= 1 if remainingCount == 0 { return _NativeSet() } } } } - _internalInvariant(difference.count > 0) + unsafe _internalInvariant(difference.count > 0) return extractSubset(using: difference, count: remainingCount) } } @@ -742,11 +744,11 @@ extension _NativeSet { internal __consuming func filter( _ isIncluded: (Element) throws -> Bool ) rethrows -> _NativeSet { - try _UnsafeBitset.withTemporaryBitset(capacity: bucketCount) { bitset in + try unsafe _UnsafeBitset.withTemporaryBitset(capacity: bucketCount) { bitset in var count = 0 - for bucket in hashTable { + for unsafe bucket in unsafe hashTable { if try isIncluded(uncheckedElement(at: bucket)) { - bitset.uncheckedInsert(bucket.offset) + unsafe bitset.uncheckedInsert(bucket.offset) count += 1 } } @@ -762,7 +764,7 @@ extension _NativeSet { // bitset first. This minimizes hashing, and ensures that we'll have an // exact count for the result set, preventing rehashings during // insertions. - _UnsafeBitset.withTemporaryBitset(capacity: bucketCount) { bitset in + unsafe _UnsafeBitset.withTemporaryBitset(capacity: bucketCount) { bitset in var count = 0 // Prefer to iterate over the smaller set. However, we must be careful to // only include elements from `self`, not `other`. @@ -771,14 +773,14 @@ extension _NativeSet { let (bucket, found) = find(element) if found { // `other` is a `Set`, so we can assume it doesn't have duplicates. - bitset.uncheckedInsert(bucket.offset) + unsafe bitset.uncheckedInsert(bucket.offset) count += 1 } } } else { - for bucket in hashTable { + for unsafe bucket in unsafe hashTable { if other.find(uncheckedElement(at: bucket)).found { - bitset.uncheckedInsert(bucket.offset) + unsafe bitset.uncheckedInsert(bucket.offset) count += 1 } } @@ -795,13 +797,13 @@ extension _NativeSet { // Rather than directly creating a new set, mark common elements in a bitset // first. This minimizes hashing, and ensures that we'll have an exact count // for the result set, preventing rehashings during insertions. - _UnsafeBitset.withTemporaryBitset(capacity: bucketCount) { bitset in + unsafe _UnsafeBitset.withTemporaryBitset(capacity: bucketCount) { bitset in var count = 0 for element in other { let (bucket, found) = find(element) // Note: we need to be careful not to increment `count` here if the // element is a duplicate item. - if found, bitset.uncheckedInsert(bucket.offset) { + if found, unsafe bitset.uncheckedInsert(bucket.offset) { count += 1 } } diff --git a/stdlib/public/core/NewtypeWrapper.swift b/stdlib/public/core/NewtypeWrapper.swift index ac6464198ad07..ba21f17aa24ad 100644 --- a/stdlib/public/core/NewtypeWrapper.swift +++ b/stdlib/public/core/NewtypeWrapper.swift @@ -87,11 +87,11 @@ where Base: _SwiftNewtypeWrapper & Hashable, Base.RawValue: Hashable { func _downCastConditional(into result: UnsafeMutablePointer) -> Bool { if let value = _value as? T { - result.initialize(to: value) + unsafe result.initialize(to: value) return true } if let value = _value.rawValue as? T { - result.initialize(to: value) + unsafe result.initialize(to: value) return true } return false diff --git a/stdlib/public/core/ObjectIdentifier.swift b/stdlib/public/core/ObjectIdentifier.swift index 32ce456ee6d5c..b03ed5920ac15 100644 --- a/stdlib/public/core/ObjectIdentifier.swift +++ b/stdlib/public/core/ObjectIdentifier.swift @@ -63,7 +63,7 @@ public struct ObjectIdentifier: Sendable { /// - x: A metatype. @inlinable // trivial-implementation public init(_ x: Any.Type) { - self._value = unsafeBitCast(x, to: Builtin.RawPointer.self) + self._value = unsafe unsafeBitCast(x, to: Builtin.RawPointer.self) } } @@ -81,7 +81,7 @@ public struct ObjectIdentifier: Sendable { @inlinable // trivial-implementation public init(_ x: Object.Type) { - self._value = unsafeBitCast(x, to: Builtin.RawPointer.self) + self._value = unsafe unsafeBitCast(x, to: Builtin.RawPointer.self) } } diff --git a/stdlib/public/core/OutputStream.swift b/stdlib/public/core/OutputStream.swift index dc32a240d16c4..39c8d01308f1d 100644 --- a/stdlib/public/core/OutputStream.swift +++ b/stdlib/public/core/OutputStream.swift @@ -81,7 +81,7 @@ extension TextOutputStream { public mutating func _unlock() {} public mutating func _writeASCII(_ buffer: UnsafeBufferPointer) { - write(String._fromASCII(buffer)) + unsafe write(String._fromASCII(buffer)) } } @@ -284,13 +284,13 @@ internal func _opaqueSummary(_ metadata: Any.Type) -> UnsafePointer? internal func _fallbackEnumRawValue(_ value: T) -> Int64? { switch MemoryLayout.size(ofValue: value) { case 8: - return unsafeBitCast(value, to:Int64.self) + return unsafe unsafeBitCast(value, to:Int64.self) case 4: - return Int64(unsafeBitCast(value, to:Int32.self)) + return unsafe Int64(unsafeBitCast(value, to:Int32.self)) case 2: - return Int64(unsafeBitCast(value, to:Int16.self)) + return unsafe Int64(unsafeBitCast(value, to:Int16.self)) case 1: - return Int64(unsafeBitCast(value, to:Int8.self)) + return unsafe Int64(unsafeBitCast(value, to:Int8.self)) default: return nil } @@ -354,8 +354,8 @@ internal func _adHocPrint_unlocked( } target.write(")") case .enum: - if let cString = _getEnumCaseName(value), - let caseName = String(validatingCString: cString) { + if let cString = unsafe _getEnumCaseName(value), + let caseName = unsafe String(validatingCString: cString) { // Write the qualified type name in debugPrint. if isDebugPrint { printTypeName(mirror.subjectType) @@ -389,8 +389,8 @@ internal func _adHocPrint_unlocked( printTypeName(metatypeValue) } else { // Fall back to the type or an opaque summary of the kind - if let cString = _opaqueSummary(mirror.subjectType), - let opaqueSummary = String(validatingCString: cString) { + if let cString = unsafe _opaqueSummary(mirror.subjectType), + let opaqueSummary = unsafe String(validatingCString: cString) { target.write(opaqueSummary) } else { target.write(_typeName(mirror.subjectType, qualified: true)) @@ -533,8 +533,8 @@ internal func _dumpPrint_unlocked( return case .`enum`: target.write(_typeName(mirror.subjectType, qualified: true)) - if let cString = _getEnumCaseName(value), - let caseName = String(validatingCString: cString) { + if let cString = unsafe _getEnumCaseName(value), + let caseName = unsafe String(validatingCString: cString) { target.write(".") target.write(caseName) } @@ -568,7 +568,7 @@ internal struct _Stdout: TextOutputStream { var string = string _ = string.withUTF8 { utf8 in - _swift_stdlib_fwrite_stdout(utf8.baseAddress!, 1, utf8.count) + unsafe _swift_stdlib_fwrite_stdout(utf8.baseAddress!, 1, utf8.count) } } } @@ -582,7 +582,7 @@ extension String: TextOutputStream { } public mutating func _writeASCII(_ buffer: UnsafeBufferPointer) { - self._guts.append(_StringGuts(buffer, isASCII: true)) + unsafe self._guts.append(_StringGuts(buffer, isASCII: true)) } } diff --git a/stdlib/public/core/Pointer.swift b/stdlib/public/core/Pointer.swift index b5ec5e79cc064..9f98888504d7d 100644 --- a/stdlib/public/core/Pointer.swift +++ b/stdlib/public/core/Pointer.swift @@ -50,7 +50,7 @@ extension _Pointer { /// - Parameter from: The opaque pointer to convert to a typed pointer. @_transparent public init(_ from: OpaquePointer) { - self.init(from._rawValue) + unsafe self.init(from._rawValue) } /// Creates a new typed pointer from the given opaque pointer. @@ -59,8 +59,8 @@ extension _Pointer { /// `from` is `nil`, the result of this initializer is `nil`. @_transparent public init?(_ from: OpaquePointer?) { - guard let unwrapped = from else { return nil } - self.init(unwrapped) + guard let unwrapped = unsafe from else { return nil } + unsafe self.init(unwrapped) } /// Creates a new pointer from the given address, specified as a bit @@ -432,14 +432,14 @@ func _convertConstArrayToPointerArgument< FromElement, ToPointer: _Pointer >(_ arr: [FromElement]) -> (AnyObject?, ToPointer) { - let (owner, opaquePointer) = arr._cPointerArgs() + let (owner, opaquePointer) = unsafe arr._cPointerArgs() let validPointer: ToPointer - if let addr = opaquePointer { + if let addr = unsafe opaquePointer { validPointer = ToPointer(addr._rawValue) } else { let lastAlignedValue = ~(MemoryLayout.alignment - 1) - let lastAlignedPointer = UnsafeRawPointer(bitPattern: lastAlignedValue)! + let lastAlignedPointer = unsafe UnsafeRawPointer(bitPattern: lastAlignedValue)! validPointer = ToPointer(lastAlignedPointer._rawValue) } return (owner, validPointer) @@ -451,14 +451,14 @@ func _convertConstArrayToPointerArgument< FromElement, ToPointer: _Pointer >(_ arr: [FromElement]) -> (Builtin.NativeObject?, ToPointer) { - let (owner, opaquePointer) = arr._cPointerArgs() + let (owner, opaquePointer) = unsafe arr._cPointerArgs() let validPointer: ToPointer - if let addr = opaquePointer { + if let addr = unsafe opaquePointer { validPointer = ToPointer(addr._rawValue) } else { let lastAlignedValue = ~(MemoryLayout.alignment - 1) - let lastAlignedPointer = UnsafeRawPointer(bitPattern: lastAlignedValue)! + let lastAlignedPointer = unsafe UnsafeRawPointer(bitPattern: lastAlignedValue)! validPointer = ToPointer(lastAlignedPointer._rawValue) } return (owner, validPointer) @@ -480,7 +480,7 @@ func _convertMutableArrayToPointerArgument< // Call reserve to force contiguous storage. a.reserveCapacity(0) - _debugPrecondition(a._baseAddressIfContiguous != nil || a.isEmpty) + unsafe _debugPrecondition(a._baseAddressIfContiguous != nil || a.isEmpty) return _convertConstArrayToPointerArgument(a) } @@ -496,7 +496,7 @@ func _convertMutableArrayToPointerArgument< // Call reserve to force contiguous storage. a.reserveCapacity(0) - _debugPrecondition(a._baseAddressIfContiguous != nil || a.isEmpty) + _debugPrecondition(unsafe a._baseAddressIfContiguous != nil || a.isEmpty) return _convertConstArrayToPointerArgument(a) } diff --git a/stdlib/public/core/Prespecialize.swift b/stdlib/public/core/Prespecialize.swift index bd37c86400170..5fad8802d4c24 100644 --- a/stdlib/public/core/Prespecialize.swift +++ b/stdlib/public/core/Prespecialize.swift @@ -103,7 +103,7 @@ internal func _prespecialize() { consume(Optional>.self) consume(Optional.self) consume(Optional.self) - consume(Optional>.self) + unsafe consume(Optional>.self) consume(PartialRangeFrom.self) consume(Range.self) consume(ReversedCollection>.self) @@ -112,10 +112,10 @@ internal func _prespecialize() { consume(Set.self) consume(Set.Iterator.self) consume(Set.self) - consume(Unmanaged.self) - consume(UnsafeBufferPointer.self) - consume(UnsafeBufferPointer.self) - consume(UnsafePointer.self) + unsafe consume(Unmanaged.self) + unsafe consume(UnsafeBufferPointer.self) + unsafe consume(UnsafeBufferPointer.self) + unsafe consume(UnsafePointer.self) } @_specializeExtension diff --git a/stdlib/public/core/PtrAuth.swift b/stdlib/public/core/PtrAuth.swift index c54d2175b50d9..80207f67cc732 100644 --- a/stdlib/public/core/PtrAuth.swift +++ b/stdlib/public/core/PtrAuth.swift @@ -90,7 +90,7 @@ internal enum _PtrAuth { static func blend(pointer: UnsafeRawPointer, discriminator: UInt64) -> UInt64 { return UInt64(Builtin.int_ptrauth_blend( - UInt64(UInt(bitPattern: pointer))._value, + UInt64(unsafe UInt(bitPattern: pointer))._value, discriminator._value)) } @@ -101,11 +101,11 @@ internal enum _PtrAuth { key: Key, discriminator: UInt64) -> UnsafeRawPointer { let bitPattern = UInt64(Builtin.int_ptrauth_sign( - UInt64(UInt(bitPattern: pointer))._value, + UInt64(unsafe UInt(bitPattern: pointer))._value, key._value._value, discriminator._value)) - return UnsafeRawPointer(bitPattern: + return unsafe UnsafeRawPointer(bitPattern: UInt(truncatingIfNeeded: bitPattern)).unsafelyUnwrapped } @@ -118,13 +118,13 @@ internal enum _PtrAuth { newKey: Key, newDiscriminator: UInt64) -> UnsafeRawPointer { let bitPattern = UInt64(Builtin.int_ptrauth_resign( - UInt64(UInt(bitPattern: pointer))._value, + UInt64(unsafe UInt(bitPattern: pointer))._value, oldKey._value._value, oldDiscriminator._value, newKey._value._value, newDiscriminator._value)) - return UnsafeRawPointer(bitPattern: + return unsafe UnsafeRawPointer(bitPattern: UInt(truncatingIfNeeded: bitPattern)).unsafelyUnwrapped } @@ -150,7 +150,7 @@ internal enum _PtrAuth { static func sign(pointer: UnsafeRawPointer, key: Key, discriminator: UInt64) -> UnsafeRawPointer { - return pointer + return unsafe pointer } /// Authenticate a pointer using one scheme and resign it using another. @@ -160,7 +160,7 @@ internal enum _PtrAuth { oldDiscriminator: UInt64, newKey: Key, newDiscriminator: UInt64) -> UnsafeRawPointer { - return pointer + return unsafe pointer } /// Get the type-specific discriminator for a function type. @@ -185,19 +185,19 @@ extension UnsafeRawPointer { as type: T.Type, discriminator: UInt64 ) -> T { - let src = self + offset + let src = unsafe self + offset - let srcDiscriminator = _PtrAuth.blend(pointer: src, + let srcDiscriminator = unsafe _PtrAuth.blend(pointer: src, discriminator: discriminator) - let ptr = src.load(as: UnsafeRawPointer.self) - let resigned = _PtrAuth.authenticateAndResign( + let ptr = unsafe src.load(as: UnsafeRawPointer.self) + let resigned = unsafe _PtrAuth.authenticateAndResign( pointer: ptr, oldKey: .processIndependentCode, oldDiscriminator: srcDiscriminator, newKey: .processIndependentCode, newDiscriminator: _PtrAuth.discriminator(for: type)) - return unsafeBitCast(resigned, to: type) + return unsafe unsafeBitCast(resigned, to: type) } @_semantics("no.preserve.debugger") // Don't keep the generic version alive @@ -207,21 +207,21 @@ extension UnsafeRawPointer { as type: Optional.Type, discriminator: UInt64 ) -> Optional { - let src = self + offset + let src = unsafe self + offset - let srcDiscriminator = _PtrAuth.blend(pointer: src, + let srcDiscriminator = unsafe _PtrAuth.blend(pointer: src, discriminator: discriminator) - guard let ptr = src.load(as: Optional.self) else { + guard let ptr = unsafe src.load(as: Optional.self) else { return nil } - let resigned = _PtrAuth.authenticateAndResign( + let resigned = unsafe _PtrAuth.authenticateAndResign( pointer: ptr, oldKey: .processIndependentCode, oldDiscriminator: srcDiscriminator, newKey: .processIndependentCode, newDiscriminator: _PtrAuth.discriminator(for: T.self)) - return .some(unsafeBitCast(resigned, to: T.self)) + return unsafe .some(unsafeBitCast(resigned, to: T.self)) } } @@ -233,22 +233,22 @@ extension UnsafeMutableRawPointer { from src: UnsafeRawPointer, discriminator: UInt64 ) { - if src == UnsafeRawPointer(self) { return } + if unsafe src == UnsafeRawPointer(self) { return } - let srcDiscriminator = _PtrAuth.blend(pointer: src, + let srcDiscriminator = unsafe _PtrAuth.blend(pointer: src, discriminator: discriminator) - let destDiscriminator = _PtrAuth.blend(pointer: self, + let destDiscriminator = unsafe _PtrAuth.blend(pointer: self, discriminator: discriminator) - let ptr = src.load(as: UnsafeRawPointer.self) - let resigned = _PtrAuth.authenticateAndResign( + let ptr = unsafe src.load(as: UnsafeRawPointer.self) + let resigned = unsafe _PtrAuth.authenticateAndResign( pointer: ptr, oldKey: .processIndependentCode, oldDiscriminator: srcDiscriminator, newKey: .processIndependentCode, newDiscriminator: destDiscriminator) - storeBytes(of: resigned, as: UnsafeRawPointer.self) + unsafe storeBytes(of: resigned, as: UnsafeRawPointer.self) } @_transparent @@ -256,11 +256,11 @@ extension UnsafeMutableRawPointer { _ unsignedPointer: UnsafeRawPointer, discriminator: UInt64 ) { - let destDiscriminator = _PtrAuth.blend(pointer: self, + let destDiscriminator = unsafe _PtrAuth.blend(pointer: self, discriminator: discriminator) - let signed = _PtrAuth.sign(pointer: unsignedPointer, + let signed = unsafe _PtrAuth.sign(pointer: unsignedPointer, key: .processIndependentCode, discriminator: destDiscriminator) - storeBytes(of: signed, as: UnsafeRawPointer.self) + unsafe storeBytes(of: signed, as: UnsafeRawPointer.self) } } diff --git a/stdlib/public/core/Random.swift b/stdlib/public/core/Random.swift index d0895664ffdb0..3e2459e043756 100644 --- a/stdlib/public/core/Random.swift +++ b/stdlib/public/core/Random.swift @@ -158,8 +158,8 @@ public struct SystemRandomNumberGenerator: RandomNumberGenerator, Sendable { @inlinable public mutating func next() -> UInt64 { var random: UInt64 = 0 - _withUnprotectedUnsafeMutablePointer(to: &random) { - swift_stdlib_random($0, MemoryLayout.size) + unsafe _withUnprotectedUnsafeMutablePointer(to: &random) { + unsafe swift_stdlib_random($0, MemoryLayout.size) } return random } diff --git a/stdlib/public/core/RandomAccessCollection.swift b/stdlib/public/core/RandomAccessCollection.swift index 0ac96e36d1767..ba141440e2e68 100644 --- a/stdlib/public/core/RandomAccessCollection.swift +++ b/stdlib/public/core/RandomAccessCollection.swift @@ -293,7 +293,7 @@ where Index: Strideable, @inlinable public func index(after i: Index) -> Index { // FIXME: swift-3-indexing-model: tests for the trap. - _failEarlyRangeCheck( + unsafe _failEarlyRangeCheck( i, bounds: Range(uncheckedBounds: (startIndex, endIndex))) return i.advanced(by: 1) } @@ -307,7 +307,7 @@ where Index: Strideable, public func index(before i: Index) -> Index { let result = i.advanced(by: -1) // FIXME: swift-3-indexing-model: tests for the trap. - _failEarlyRangeCheck( + unsafe _failEarlyRangeCheck( result, bounds: Range(uncheckedBounds: (startIndex, endIndex))) return result } @@ -343,7 +343,7 @@ where Index: Strideable, // compute those bounds, which is probably too slow in the general // case. // FIXME: swift-3-indexing-model: tests for the trap. - _failEarlyRangeCheck( + unsafe _failEarlyRangeCheck( result, bounds: ClosedRange(uncheckedBounds: (startIndex, endIndex))) return result } @@ -360,9 +360,9 @@ where Index: Strideable, @inlinable public func distance(from start: Index, to end: Index) -> Index.Stride { // FIXME: swift-3-indexing-model: tests for traps. - _failEarlyRangeCheck( + unsafe _failEarlyRangeCheck( start, bounds: ClosedRange(uncheckedBounds: (startIndex, endIndex))) - _failEarlyRangeCheck( + unsafe _failEarlyRangeCheck( end, bounds: ClosedRange(uncheckedBounds: (startIndex, endIndex))) return start.distance(to: end) } diff --git a/stdlib/public/core/RangeSetRanges.swift b/stdlib/public/core/RangeSetRanges.swift index 32078b0c6aaf7..7d5797f8bcdeb 100644 --- a/stdlib/public/core/RangeSetRanges.swift +++ b/stdlib/public/core/RangeSetRanges.swift @@ -75,7 +75,7 @@ extension RangeSet { let newUpper = Swift.max( _storage[lastValid].upperBound, _storage[current].upperBound) - _storage[lastValid] = Range( + _storage[lastValid] = unsafe Range( uncheckedBounds: (_storage[lastValid].lowerBound, newUpper)) } else { // Otherwise, this is a valid new range to add to the range set: diff --git a/stdlib/public/core/ReflectionMirror.swift b/stdlib/public/core/ReflectionMirror.swift index 3ea429fe20d59..308cce2353122 100644 --- a/stdlib/public/core/ReflectionMirror.swift +++ b/stdlib/public/core/ReflectionMirror.swift @@ -17,7 +17,7 @@ import SwiftShims internal func _isClassType(_ type: Any.Type) -> Bool { // a thick metatype is represented with a pointer metadata structure, // so this unsafeBitCast is a safe operation here. - return swift_isClassType(unsafeBitCast(type, to: UnsafeRawPointer.self)) + return unsafe swift_isClassType(unsafeBitCast(type, to: UnsafeRawPointer.self)) } @_silgen_name("swift_getMetadataKind") @@ -64,10 +64,10 @@ internal func getChild(of value: T, type: Any.Type, index: Int) -> (label: St var nameC: UnsafePointer? = nil var freeFunc: NameFreeFunc? = nil - let value = _getChild(of: value, type: type, index: index, outName: &nameC, outFreeFunc: &freeFunc) + let value = unsafe _getChild(of: value, type: type, index: index, outName: &nameC, outFreeFunc: &freeFunc) - let name = nameC.flatMap({ String(validatingCString: $0) }) - freeFunc?(nameC) + let name = unsafe nameC.flatMap({ unsafe String(validatingCString: $0) }) + unsafe freeFunc?(nameC) return (name, value) } @@ -79,8 +79,8 @@ internal func _getQuickLookObject(_: T) -> AnyObject? internal func _isImpl(_ object: AnyObject, kindOf: UnsafePointer) -> Bool internal func _is(_ object: AnyObject, kindOf `class`: String) -> Bool { - return `class`.withCString { - return _isImpl(object, kindOf: $0) + return unsafe `class`.withCString { + return unsafe _isImpl(object, kindOf: $0) } } @@ -88,8 +88,8 @@ internal func _getClassPlaygroundQuickLook( _ object: AnyObject ) -> _PlaygroundQuickLook? { if _is(object, kindOf: "NSNumber") { - let number: _NSNumber = unsafeBitCast(object, to: _NSNumber.self) - switch UInt8(number.objCType[0]) { + let number: _NSNumber = unsafe unsafeBitCast(object, to: _NSNumber.self) + switch unsafe UInt8(number.objCType[0]) { case UInt8(ascii: "d"): return .double(number.doubleValue) case UInt8(ascii: "f"): @@ -285,17 +285,17 @@ public func _forEachField( for i in 0..( for i in 0..( default: supportedType = false } - if !supportedType || !field.isStrong { + if unsafe !supportedType || !field.isStrong { if !ignoreUnknown { return false } continue; } func keyPathType(for: Leaf.Type) -> PartialKeyPath.Type { - if field.isVar { return WritableKeyPath.self } + if unsafe field.isVar { return WritableKeyPath.self } return KeyPath.self } let resultSize = MemoryLayout.size + MemoryLayout.size let partialKeyPath = _openExistential(childType, do: keyPathType) ._create(capacityInBytes: resultSize) { - var destBuilder = KeyPathBuffer.Builder($0) - destBuilder.pushHeader(KeyPathBuffer.Header( + var destBuilder = unsafe KeyPathBuffer.Builder($0) + unsafe destBuilder.pushHeader(KeyPathBuffer.Header( size: resultSize - MemoryLayout.size, trivial: true, hasReferencePrefix: false, isSingleComponent: true )) - let component = RawKeyPathComponent( + let component = unsafe RawKeyPathComponent( header: RawKeyPathComponent.Header(stored: .struct, mutable: field.isVar, inlineOffset: UInt32(offset)), body: UnsafeRawBufferPointer(start: nil, count: 0)) - component.clone( + unsafe component.clone( into: &destBuilder.buffer, endOfReferencePrefix: false) } - if let name = field.name { - if !body(name, partialKeyPath) { + if let name = unsafe field.name { + if unsafe !body(name, partialKeyPath) { return false } } else { - if !body("", partialKeyPath) { + if unsafe !body("", partialKeyPath) { return false } } diff --git a/stdlib/public/core/Runtime.swift b/stdlib/public/core/Runtime.swift index ccd68b6353079..e391f60d30483 100644 --- a/stdlib/public/core/Runtime.swift +++ b/stdlib/public/core/Runtime.swift @@ -28,11 +28,11 @@ func _stdlib_atomicCompareExchangeStrongPtr( desired: UnsafeRawPointer? ) -> Bool { // We use Builtin.Word here because Builtin.RawPointer can't be nil. - let (oldValue, won) = Builtin.cmpxchg_seqcst_seqcst_Word( + let (oldValue, won) = unsafe Builtin.cmpxchg_seqcst_seqcst_Word( target._rawValue, UInt(bitPattern: expected.pointee)._builtinWordValue, UInt(bitPattern: desired)._builtinWordValue) - expected.pointee = UnsafeRawPointer(bitPattern: Int(oldValue)) + unsafe expected.pointee = UnsafeRawPointer(bitPattern: Int(oldValue)) return Bool(won) } @@ -70,11 +70,11 @@ func _stdlib_atomicCompareExchangeStrongPtr( expected: UnsafeMutablePointer>, desired: UnsafeMutablePointer ) -> Bool { - let rawTarget = UnsafeMutableRawPointer(target).assumingMemoryBound( + let rawTarget = unsafe UnsafeMutableRawPointer(target).assumingMemoryBound( to: Optional.self) - let rawExpected = UnsafeMutableRawPointer(expected).assumingMemoryBound( + let rawExpected = unsafe UnsafeMutableRawPointer(expected).assumingMemoryBound( to: Optional.self) - return _stdlib_atomicCompareExchangeStrongPtr( + return unsafe _stdlib_atomicCompareExchangeStrongPtr( object: rawTarget, expected: rawExpected, desired: UnsafeRawPointer(desired)) @@ -114,11 +114,11 @@ func _stdlib_atomicCompareExchangeStrongPtr( expected: UnsafeMutablePointer?>, desired: UnsafeMutablePointer? ) -> Bool { - let rawTarget = UnsafeMutableRawPointer(target).assumingMemoryBound( + let rawTarget = unsafe UnsafeMutableRawPointer(target).assumingMemoryBound( to: Optional.self) - let rawExpected = UnsafeMutableRawPointer(expected).assumingMemoryBound( + let rawExpected = unsafe UnsafeMutableRawPointer(expected).assumingMemoryBound( to: Optional.self) - return _stdlib_atomicCompareExchangeStrongPtr( + return unsafe _stdlib_atomicCompareExchangeStrongPtr( object: rawTarget, expected: rawExpected, desired: UnsafeRawPointer(desired)) @@ -135,19 +135,19 @@ func _stdlib_atomicInitializeARCRef( // Note: this assumes that AnyObject? is layout-compatible with a RawPointer // that simply points to the same memory. var expected: UnsafeRawPointer? = nil - let unmanaged = Unmanaged.passRetained(desired) - let desiredPtr = unmanaged.toOpaque() - let rawTarget = UnsafeMutableRawPointer(target).assumingMemoryBound( + let unmanaged = unsafe Unmanaged.passRetained(desired) + let desiredPtr = unsafe unmanaged.toOpaque() + let rawTarget = unsafe UnsafeMutableRawPointer(target).assumingMemoryBound( to: Optional.self) - let wonRace = withUnsafeMutablePointer(to: &expected) { - _stdlib_atomicCompareExchangeStrongPtr( + let wonRace = unsafe withUnsafeMutablePointer(to: &expected) { + unsafe _stdlib_atomicCompareExchangeStrongPtr( object: rawTarget, expected: $0, desired: desiredPtr ) } if !wonRace { // Some other thread initialized the value. Balance the retain that we // performed on 'desired'. - unmanaged.release() + unsafe unmanaged.release() } return wonRace } @@ -159,8 +159,8 @@ func _stdlib_atomicLoadARCRef( object target: UnsafeMutablePointer ) -> AnyObject? { let value = Builtin.atomicload_seqcst_Word(target._rawValue) - if let unwrapped = UnsafeRawPointer(bitPattern: Int(value)) { - return Unmanaged.fromOpaque(unwrapped).takeUnretainedValue() + if let unwrapped = unsafe UnsafeRawPointer(bitPattern: Int(value)) { + return unsafe Unmanaged.fromOpaque(unwrapped).takeUnretainedValue() } return nil } @@ -175,21 +175,21 @@ public func _stdlib_atomicAcquiringInitializeARCRef( // Note: this assumes that AnyObject? is layout-compatible with a RawPointer // that simply points to the same memory, and that `nil` is represented by an // all-zero bit pattern. - let unmanaged = Unmanaged.passRetained(desired) - let desiredPtr = unmanaged.toOpaque() + let unmanaged = unsafe Unmanaged.passRetained(desired) + let desiredPtr = unsafe unmanaged.toOpaque() let (value, won) = Builtin.cmpxchg_acqrel_acquire_Word( target._rawValue, 0._builtinWordValue, Builtin.ptrtoint_Word(desiredPtr._rawValue)) - if Bool(won) { return unmanaged } + if Bool(won) { return unsafe unmanaged } // Some other thread initialized the value before us. Balance the retain that // we performed on 'desired', and return what we loaded. - unmanaged.release() + unsafe unmanaged.release() let ptr = UnsafeRawPointer(Builtin.inttoptr_Word(value)) - return Unmanaged.fromOpaque(ptr) + return unsafe Unmanaged.fromOpaque(ptr) } @_alwaysEmitIntoClient @@ -200,7 +200,7 @@ public func _stdlib_atomicAcquiringLoadARCRef( let value = Builtin.atomicload_acquire_Word(target._rawValue) if Int(value) == 0 { return nil } let opaque = UnsafeRawPointer(Builtin.inttoptr_Word(value)) - return Unmanaged.fromOpaque(opaque) + return unsafe Unmanaged.fromOpaque(opaque) } //===----------------------------------------------------------------------===// @@ -247,8 +247,8 @@ internal struct _Buffer32 { internal mutating func withBytes( _ body: (UnsafeMutablePointer) throws -> Result ) rethrows -> Result { - return try withUnsafeMutablePointer(to: &self) { - try body(UnsafeMutableRawPointer($0).assumingMemoryBound(to: UInt8.self)) + return try unsafe withUnsafeMutablePointer(to: &self) { + try unsafe body(UnsafeMutableRawPointer($0).assumingMemoryBound(to: UInt8.self)) } } } @@ -333,8 +333,8 @@ internal struct _Buffer72 { internal mutating func withBytes( _ body: (UnsafeMutablePointer) throws -> Result ) rethrows -> Result { - return try withUnsafeMutablePointer(to: &self) { - try body(UnsafeMutableRawPointer($0).assumingMemoryBound(to: UInt8.self)) + return try unsafe withUnsafeMutablePointer(to: &self) { + try unsafe body(UnsafeMutableRawPointer($0).assumingMemoryBound(to: UInt8.self)) } } } @@ -366,8 +366,8 @@ internal func _float16ToString( ) -> (buffer: _Buffer32, length: Int) { _internalInvariant(MemoryLayout<_Buffer32>.size == 32) var buffer = _Buffer32() - let length = buffer.withBytes { (bufferPtr) in - _float16ToStringImpl(bufferPtr, 32, _CFloat16Argument(value), debug) + let length = unsafe buffer.withBytes { (bufferPtr) in + unsafe _float16ToStringImpl(bufferPtr, 32, _CFloat16Argument(value), debug) } return (buffer, length) } @@ -391,7 +391,7 @@ internal func _float32ToString( ) -> (buffer: _Buffer32, length: Int) { _internalInvariant(MemoryLayout<_Buffer32>.size == 32) var buffer = _Buffer32() - let length = buffer.withBytes { (bufferPtr) in Int( + let length = unsafe buffer.withBytes { (bufferPtr) in unsafe Int( truncatingIfNeeded: _float32ToStringImpl(bufferPtr, 32, value, debug) )} return (buffer, length) @@ -415,7 +415,7 @@ internal func _float64ToString( ) -> (buffer: _Buffer32, length: Int) { _internalInvariant(MemoryLayout<_Buffer32>.size == 32) var buffer = _Buffer32() - let length = buffer.withBytes { (bufferPtr) in Int( + let length = unsafe buffer.withBytes { (bufferPtr) in unsafe Int( truncatingIfNeeded: _float64ToStringImpl(bufferPtr, 32, value, debug) )} return (buffer, length) @@ -442,8 +442,8 @@ internal func _float80ToString( ) -> (buffer: _Buffer32, length: Int) { _internalInvariant(MemoryLayout<_Buffer32>.size == 32) var buffer = _Buffer32() - let length = buffer.withBytes { (bufferPtr) in Int( - truncatingIfNeeded: _float80ToStringImpl(bufferPtr, 32, value, debug) + let length = unsafe buffer.withBytes { (bufferPtr) in Int( + truncatingIfNeeded: unsafe _float80ToStringImpl(bufferPtr, 32, value, debug) )} return (buffer, length) } @@ -470,7 +470,7 @@ internal func _int64ToStringImpl( _ radix: Int64, _ uppercase: Bool ) -> UInt64 { - return UInt64(value._toStringImpl(buffer, bufferLength, Int(radix), uppercase)) + return UInt64(unsafe value._toStringImpl(buffer, bufferLength, Int(radix), uppercase)) } #endif @@ -481,17 +481,17 @@ internal func _int64ToString( ) -> String { if radix >= 10 { var buffer = _Buffer32() - return buffer.withBytes { (bufferPtr) in - let actualLength = _int64ToStringImpl(bufferPtr, 32, value, radix, uppercase) - return String._fromASCII(UnsafeBufferPointer( + return unsafe buffer.withBytes { (bufferPtr) in + let actualLength = unsafe _int64ToStringImpl(bufferPtr, 32, value, radix, uppercase) + return unsafe String._fromASCII(UnsafeBufferPointer( start: bufferPtr, count: Int(truncatingIfNeeded: actualLength) )) } } else { var buffer = _Buffer72() - return buffer.withBytes { (bufferPtr) in - let actualLength = _int64ToStringImpl(bufferPtr, 72, value, radix, uppercase) - return String._fromASCII(UnsafeBufferPointer( + return unsafe buffer.withBytes { (bufferPtr) in + let actualLength = unsafe _int64ToStringImpl(bufferPtr, 72, value, radix, uppercase) + return unsafe String._fromASCII(UnsafeBufferPointer( start: bufferPtr, count: Int(truncatingIfNeeded: actualLength) )) } @@ -519,7 +519,7 @@ internal func _uint64ToStringImpl( _ radix: Int64, _ uppercase: Bool ) -> UInt64 { - return UInt64(value._toStringImpl(buffer, bufferLength, Int(radix), uppercase)) + return unsafe UInt64(value._toStringImpl(buffer, bufferLength, Int(radix), uppercase)) } #endif @@ -531,17 +531,17 @@ func _uint64ToString( ) -> String { if radix >= 10 { var buffer = _Buffer32() - return buffer.withBytes { (bufferPtr) in - let actualLength = _uint64ToStringImpl(bufferPtr, 32, value, radix, uppercase) - return String._fromASCII(UnsafeBufferPointer( + return unsafe buffer.withBytes { (bufferPtr) in + let actualLength = unsafe _uint64ToStringImpl(bufferPtr, 32, value, radix, uppercase) + return unsafe String._fromASCII(UnsafeBufferPointer( start: bufferPtr, count: Int(truncatingIfNeeded: actualLength) )) } } else { var buffer = _Buffer72() - return buffer.withBytes { (bufferPtr) in - let actualLength = _uint64ToStringImpl(bufferPtr, 72, value, radix, uppercase) - return String._fromASCII(UnsafeBufferPointer( + return unsafe buffer.withBytes { (bufferPtr) in + let actualLength = unsafe _uint64ToStringImpl(bufferPtr, 72, value, radix, uppercase) + return unsafe String._fromASCII(UnsafeBufferPointer( start: bufferPtr, count: Int(truncatingIfNeeded: actualLength) )) } @@ -550,13 +550,13 @@ func _uint64ToString( @inlinable internal func _rawPointerToString(_ value: Builtin.RawPointer) -> String { - var result = _uint64ToString( + var result = unsafe _uint64ToString( UInt64( UInt(bitPattern: UnsafeRawPointer(value))), radix: 16, uppercase: false ) - for _ in 0..<(2 * MemoryLayout.size - result.utf16.count) { + for _ in unsafe 0..<(2 * MemoryLayout.size - result.utf16.count) { result = "0" + result } return "0x" + result diff --git a/stdlib/public/core/RuntimeFunctionCounters.swift b/stdlib/public/core/RuntimeFunctionCounters.swift index 89cceeb2b2850..6e5324dc2805d 100644 --- a/stdlib/public/core/RuntimeFunctionCounters.swift +++ b/stdlib/public/core/RuntimeFunctionCounters.swift @@ -44,23 +44,23 @@ internal func _collectAllReferencesInsideObjectImpl( if type(of: value) is AnyObject.Type { // Object is a class (but not an ObjC-bridged struct) let toAnyObject = _unsafeDowncastToAnyObject(fromAny: value) - ref = UnsafeRawPointer(Unmanaged.passUnretained(toAnyObject).toOpaque()) + unsafe ref = unsafe UnsafeRawPointer(Unmanaged.passUnretained(toAnyObject).toOpaque()) id = ObjectIdentifier(toAnyObject) } else if type(of: value) is Builtin.BridgeObject.Type { - ref = UnsafeRawPointer( + unsafe ref = UnsafeRawPointer( Builtin.bridgeToRawPointer(value as! Builtin.BridgeObject)) id = nil } else if type(of: value) is Builtin.NativeObject.Type { - ref = UnsafeRawPointer( + unsafe ref = UnsafeRawPointer( Builtin.bridgeToRawPointer(value as! Builtin.NativeObject)) id = nil } else if let metatypeInstance = value as? Any.Type { // Object is a metatype id = ObjectIdentifier(metatypeInstance) - ref = nil + unsafe ref = nil } else { id = nil - ref = nil + unsafe ref = nil } if let theId = id { @@ -74,8 +74,8 @@ internal func _collectAllReferencesInsideObjectImpl( } // If it is a reference, add it to the result. - if let ref = ref { - references.append(ref) + if let ref = unsafe ref { + unsafe references.append(ref) } // Recursively visit the children of the current value. @@ -84,7 +84,7 @@ internal func _collectAllReferencesInsideObjectImpl( for _ in 0..> public static func getRuntimeFunctionNames() -> [String] { - let names = _RuntimeFunctionCounters._getRuntimeFunctionNames() + let names = unsafe _RuntimeFunctionCounters._getRuntimeFunctionNames() let numRuntimeFunctionCounters = Int(_RuntimeFunctionCounters.getNumRuntimeFunctionCounters()) var functionNames: [String] = [] functionNames.reserveCapacity(numRuntimeFunctionCounters) for index in 0.. [UnsafeRawPointer] { var visited: [ObjectIdentifier: Int] = [:] - var references: [UnsafeRawPointer] = [] - _collectAllReferencesInsideObjectImpl( + var references: [UnsafeRawPointer] = unsafe [] + unsafe _collectAllReferencesInsideObjectImpl( value, references: &references, visitedItems: &visited) - return references + return unsafe references } /// Build a map from counter name to counter index inside the state struct. @@ -292,9 +292,9 @@ internal struct _RuntimeFunctionCountersState: _RuntimeFunctionCountersStats { "0..<\(_RuntimeFunctionCounters.numRuntimeFunctionCounters)") } var tmpCounters = counters - let counter: UInt32 = withUnsafePointer(to: &tmpCounters) { ptr in - return ptr.withMemoryRebound(to: UInt32.self, capacity: 64) { buf in - return buf[index] + let counter: UInt32 = unsafe withUnsafePointer(to: &tmpCounters) { ptr in + return unsafe ptr.withMemoryRebound(to: UInt32.self, capacity: 64) { buf in + return unsafe buf[index] } } return counter @@ -306,9 +306,9 @@ internal struct _RuntimeFunctionCountersState: _RuntimeFunctionCountersStats { fatalError("Counter index should be in the range " + "0..<\(_RuntimeFunctionCounters.numRuntimeFunctionCounters)") } - withUnsafeMutablePointer(to: &counters) { - $0.withMemoryRebound(to: UInt32.self, capacity: 64) { - $0[index] = newValue + unsafe withUnsafeMutablePointer(to: &counters) { + unsafe $0.withMemoryRebound(to: UInt32.self, capacity: 64) { + unsafe $0[index] = newValue } } } @@ -383,7 +383,7 @@ extension _RuntimeFunctionCountersStats { print("counter \(i) : " + "\(Counters.runtimeFunctionNames[i])" + " at offset: " + - "\(Counters.runtimeFunctionCountersOffsets[i]):" + + "\(unsafe Counters.runtimeFunctionCountersOffsets[i]):" + " \(self[i])", to: &to) } } @@ -403,7 +403,7 @@ extension _RuntimeFunctionCountersStats { print("counter \(i) : " + "\(Counters.runtimeFunctionNames[i])" + " at offset: " + - "\(Counters.runtimeFunctionCountersOffsets[i]): " + + "\(unsafe Counters.runtimeFunctionCountersOffsets[i]): " + "before \(self[i]) " + "after \(after[i])" + " diff=\(after[i]-self[i])", to: &to) } @@ -464,18 +464,18 @@ struct _ObjectRuntimeFunctionCountersState: _RuntimeFunctionCountersStats { // Initialize with the counters for a given object. public init(_ p: UnsafeRawPointer) { - getObjectRuntimeFunctionCounters(p) + unsafe getObjectRuntimeFunctionCounters(p) } public init() { } mutating public func getObjectRuntimeFunctionCounters(_ o: UnsafeRawPointer) { - _RuntimeFunctionCounters.getObjectRuntimeFunctionCounters(o, &state) + unsafe _RuntimeFunctionCounters.getObjectRuntimeFunctionCounters(o, &state) } mutating public func setObjectRuntimeFunctionCounters(_ o: UnsafeRawPointer) { - _RuntimeFunctionCounters.setObjectRuntimeFunctionCounters(o, &state) + unsafe _RuntimeFunctionCounters.setObjectRuntimeFunctionCounters(o, &state) } public subscript(_ index: String) -> UInt32 { @@ -504,9 +504,9 @@ public // @testable func _collectReferencesInsideObject(_ value: Any) -> [UnsafeRawPointer] { let savedMode = _RuntimeFunctionCounters.disableRuntimeFunctionCountersUpdates() // Collect all references inside the object - let refs = _RuntimeFunctionCounters.collectAllReferencesInsideObject(value) + let refs = unsafe _RuntimeFunctionCounters.collectAllReferencesInsideObject(value) _RuntimeFunctionCounters.enableRuntimeFunctionCountersUpdates(mode: savedMode) - return refs + return unsafe refs } /// A helper method to measure how global and per-object function counters @@ -521,8 +521,8 @@ func _measureRuntimeFunctionCountersDiffs( _RuntimeFunctionCounters.disableRuntimeFunctionCountersUpdates() let globalCountersBefore = _GlobalRuntimeFunctionCountersState() var objectsCountersBefore: [_ObjectRuntimeFunctionCountersState] = [] - for object in objects { - objectsCountersBefore.append(_ObjectRuntimeFunctionCountersState(object)) + for object in unsafe objects { + unsafe objectsCountersBefore.append(_ObjectRuntimeFunctionCountersState(object)) } // Enable counters updates. _RuntimeFunctionCounters.enableRuntimeFunctionCountersUpdates( @@ -535,8 +535,8 @@ func _measureRuntimeFunctionCountersDiffs( let globalCountersAfter = _GlobalRuntimeFunctionCountersState() var objectsCountersDiff: [_ObjectRuntimeFunctionCountersState] = [] - for (idx, object) in objects.enumerated() { - let objectCountersAfter = _ObjectRuntimeFunctionCountersState(object) + for (idx, object) in unsafe objects.enumerated() { + let objectCountersAfter = unsafe _ObjectRuntimeFunctionCountersState(object) objectsCountersDiff.append( objectsCountersBefore[idx].diff(objectCountersAfter)) } diff --git a/stdlib/public/core/Sequence.swift b/stdlib/public/core/Sequence.swift index db71a64fc682e..e34bb9d8df267 100644 --- a/stdlib/public/core/Sequence.swift +++ b/stdlib/public/core/Sequence.swift @@ -445,6 +445,7 @@ public protocol Sequence { /// - Returns: The value returned from `body`, unless the sequence doesn't /// support contiguous storage, in which case the method ignores `body` and /// returns `nil`. + @safe func withContiguousStorageIfAvailable( _ body: (_ buffer: UnsafeBufferPointer) throws -> R ) rethrows -> R? @@ -1224,7 +1225,7 @@ extension Sequence { public __consuming func _copyContents( initializing buffer: UnsafeMutableBufferPointer ) -> (Iterator, UnsafeMutableBufferPointer.Index) { - return _copySequenceContents(initializing: buffer) + return unsafe _copySequenceContents(initializing: buffer) } @_alwaysEmitIntoClient @@ -1237,13 +1238,14 @@ extension Sequence { guard let x = it.next() else { return (it, idx) } - ptr.initialize(to: x) - ptr += 1 + unsafe ptr.initialize(to: x) + unsafe ptr += 1 } return (it, buffer.endIndex) } @inlinable + @safe public func withContiguousStorageIfAvailable( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { diff --git a/stdlib/public/core/Set.swift b/stdlib/public/core/Set.swift index b12ee8df1e150..8b57ffde828ed 100644 --- a/stdlib/public/core/Set.swift +++ b/stdlib/public/core/Set.swift @@ -500,7 +500,7 @@ internal struct _SetAnyHashableBox: _AnyHashableBox { into result: UnsafeMutablePointer ) -> Bool { guard let value = _value as? T else { return false } - result.initialize(to: value) + unsafe result.initialize(to: value) return true } } @@ -1289,6 +1289,7 @@ extension Set { @frozen @usableFromInline + @safe internal enum _Variant { case native(_HashTable.Index) #if _runtime(_ObjC) @@ -1364,7 +1365,7 @@ extension Set.Index { internal var _asNative: _HashTable.Index { switch _variant { case .native(let nativeIndex): - return nativeIndex + return unsafe nativeIndex #if _runtime(_ObjC) case .cocoa: _preconditionFailure( @@ -1391,7 +1392,7 @@ extension Set.Index { _preconditionFailure( "Attempting to access Set elements using an invalid index") } - let dummy = _HashTable.Index(bucket: _HashTable.Bucket(offset: 0), age: 0) + let dummy = unsafe _HashTable.Index(bucket: _HashTable.Bucket(offset: 0), age: 0) _variant = .native(dummy) defer { _variant = .cocoa(cocoa) } yield &cocoa @@ -1408,7 +1409,7 @@ extension Set.Index: Equatable { ) -> Bool { switch (lhs._variant, rhs._variant) { case (.native(let lhsNative), .native(let rhsNative)): - return lhsNative == rhsNative + return unsafe lhsNative == rhsNative #if _runtime(_ObjC) case (.cocoa(let lhsCocoa), .cocoa(let rhsCocoa)): lhs._cocoaPath() @@ -1428,7 +1429,7 @@ extension Set.Index: Comparable { ) -> Bool { switch (lhs._variant, rhs._variant) { case (.native(let lhsNative), .native(let rhsNative)): - return lhsNative < rhsNative + return unsafe lhsNative < rhsNative #if _runtime(_ObjC) case (.cocoa(let lhsCocoa), .cocoa(let rhsCocoa)): lhs._cocoaPath() @@ -1455,9 +1456,9 @@ extension Set.Index: Hashable { return } hasher.combine(0 as UInt8) - hasher.combine(_asNative.bucket.offset) + unsafe hasher.combine(_asNative.bucket.offset) #else - hasher.combine(_asNative.bucket.offset) + unsafe hasher.combine(_asNative.bucket.offset) #endif } } diff --git a/stdlib/public/core/SetBridging.swift b/stdlib/public/core/SetBridging.swift index ec6d77601a1e0..578fb156c82d8 100644 --- a/stdlib/public/core/SetBridging.swift +++ b/stdlib/public/core/SetBridging.swift @@ -17,10 +17,10 @@ import SwiftShims /// Equivalent to `NSSet.allObjects`, but does not leave objects on the /// autorelease pool. internal func _stdlib_NSSet_allObjects(_ object: AnyObject) -> _BridgingBuffer { - let nss = unsafeBitCast(object, to: _NSSet.self) + let nss = unsafe unsafeBitCast(object, to: _NSSet.self) let count = nss.count let storage = _BridgingBuffer(count) - nss.getObjects(storage.baseAddress) + unsafe nss.getObjects(storage.baseAddress) return storage } @@ -35,10 +35,10 @@ extension _NativeSet { // Bridging // Temporary var for SOME type safety. let nsSet: _NSSetCore - if _storage === __RawSetStorage.empty || count == 0 { - nsSet = __RawSetStorage.empty + if unsafe _storage === __RawSetStorage.empty || count == 0 { + unsafe nsSet = __RawSetStorage.empty } else if _isBridgedVerbatimToObjectiveC(Element.self) { - nsSet = unsafeDowncast(_storage, to: _SetStorage.self) + unsafe nsSet = unsafeDowncast(_storage, to: _SetStorage.self) } else { nsSet = _SwiftDeferredNSSet(self) } @@ -52,6 +52,7 @@ extension _NativeSet { // Bridging /// An NSEnumerator that works with any _NativeSet of verbatim bridgeable /// elements. Used by the various NSSet impls. +@safe final internal class _SwiftSetNSEnumerator : __SwiftNativeNSEnumerator, _NSEnumerator { @@ -70,8 +71,8 @@ final internal class _SwiftSetNSEnumerator _internalInvariant(_orphanedFoundationSubclassesReparented) self.base = base self.bridgedElements = nil - self.nextBucket = base.hashTable.startBucket - self.endBucket = base.hashTable.endBucket + self.nextBucket = unsafe base.hashTable.startBucket + self.endBucket = unsafe base.hashTable.endBucket super.init() } @@ -80,16 +81,16 @@ final internal class _SwiftSetNSEnumerator _internalInvariant(!_isBridgedVerbatimToObjectiveC(Element.self)) _internalInvariant(_orphanedFoundationSubclassesReparented) self.base = deferred.native - self.bridgedElements = deferred.bridgeElements() - self.nextBucket = base.hashTable.startBucket - self.endBucket = base.hashTable.endBucket + self.bridgedElements = unsafe deferred.bridgeElements() + self.nextBucket = unsafe base.hashTable.startBucket + self.endBucket = unsafe base.hashTable.endBucket super.init() } private func bridgedElement(at bucket: _HashTable.Bucket) -> AnyObject { - _internalInvariant(base.hashTable.isOccupied(bucket)) + unsafe _internalInvariant(base.hashTable.isOccupied(bucket)) if let bridgedElements = self.bridgedElements { - return bridgedElements[bucket] + return unsafe bridgedElements[bucket] } return _bridgeAnythingToObjectiveC(base.uncheckedElement(at: bucket)) } @@ -102,11 +103,11 @@ final internal class _SwiftSetNSEnumerator @objc internal func nextObject() -> AnyObject? { - if nextBucket == endBucket { + if unsafe nextBucket == endBucket { return nil } let bucket = nextBucket - nextBucket = base.hashTable.occupiedBucket(after: nextBucket) + nextBucket = unsafe base.hashTable.occupiedBucket(after: nextBucket) return self.bridgedElement(at: bucket) } @@ -116,24 +117,24 @@ final internal class _SwiftSetNSEnumerator objects: UnsafeMutablePointer, count: Int ) -> Int { - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr } - if nextBucket == endBucket { - state.pointee = theState + if unsafe nextBucket == endBucket { + unsafe state.pointee = theState return 0 } // Return only a single element so that code can start iterating via fast // enumeration, terminate it, and continue via NSEnumerator. - let unmanagedObjects = _UnmanagedAnyObjectArray(objects) - unmanagedObjects[0] = self.bridgedElement(at: nextBucket) - nextBucket = base.hashTable.occupiedBucket(after: nextBucket) - state.pointee = theState + let unmanagedObjects = unsafe _UnmanagedAnyObjectArray(objects) + unsafe unmanagedObjects[0] = self.bridgedElement(at: nextBucket) + nextBucket = unsafe base.hashTable.occupiedBucket(after: nextBucket) + unsafe state.pointee = theState return 1 } } @@ -167,44 +168,44 @@ final internal class _SwiftDeferredNSSet /// Set elements. @nonobjc private var _bridgedElementsPtr: UnsafeMutablePointer { - return _getUnsafePointerToStoredProperties(self) + return unsafe _getUnsafePointerToStoredProperties(self) .assumingMemoryBound(to: Optional.self) } /// The buffer for bridged Set elements, if present. @nonobjc private var _bridgedElements: __BridgingHashBuffer? { - guard let ref = _stdlib_atomicLoadARCRef(object: _bridgedElementsPtr) else { + guard let ref = unsafe _stdlib_atomicLoadARCRef(object: _bridgedElementsPtr) else { return nil } - return unsafeDowncast(ref, to: __BridgingHashBuffer.self) + return unsafe unsafeDowncast(ref, to: __BridgingHashBuffer.self) } /// Attach a buffer for bridged Set elements. @nonobjc private func _initializeBridgedElements(_ storage: __BridgingHashBuffer) { - _stdlib_atomicInitializeARCRef( + unsafe _stdlib_atomicInitializeARCRef( object: _bridgedElementsPtr, desired: storage) } @nonobjc internal func bridgeElements() -> __BridgingHashBuffer { - if let bridgedElements = _bridgedElements { return bridgedElements } + if let bridgedElements = unsafe _bridgedElements { return unsafe bridgedElements } // Allocate and initialize heap storage for bridged objects. - let bridged = __BridgingHashBuffer.allocate( + let bridged = unsafe __BridgingHashBuffer.allocate( owner: native._storage, hashTable: native.hashTable) - for bucket in native.hashTable { + for bucket in unsafe native.hashTable { let object = _bridgeAnythingToObjectiveC( native.uncheckedElement(at: bucket)) - bridged.initialize(at: bucket, to: object) + unsafe bridged.initialize(at: bucket, to: object) } // Atomically put the bridged elements in place. - _initializeBridgedElements(bridged) - return _bridgedElements! + unsafe _initializeBridgedElements(bridged) + return unsafe _bridgedElements! } @objc @@ -226,8 +227,8 @@ final internal class _SwiftDeferredNSSet let (bucket, found) = native.find(element) guard found else { return nil } - let bridged = bridgeElements() - return bridged[bucket] + let bridged = unsafe bridgeElements() + return unsafe bridged[bucket] } @objc @@ -249,39 +250,39 @@ final internal class _SwiftDeferredNSSet defer { _fixLifetime(self) } let hashTable = native.hashTable - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr - theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + unsafe theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) } // Test 'objects' rather than 'count' because (a) this is very rare anyway, // and (b) the optimizer should then be able to optimize away the // unwrapping check below. - if _slowPath(objects == nil) { + if unsafe _slowPath(objects == nil) { return 0 } - let unmanagedObjects = _UnmanagedAnyObjectArray(objects!) - var bucket = _HashTable.Bucket(offset: Int(theState.extra.0)) - let endBucket = hashTable.endBucket - _precondition(bucket == endBucket || hashTable.isOccupied(bucket), + let unmanagedObjects = unsafe _UnmanagedAnyObjectArray(objects!) + var bucket = unsafe _HashTable.Bucket(offset: Int(theState.extra.0)) + let endBucket = unsafe hashTable.endBucket + unsafe _precondition(bucket == endBucket || hashTable.isOccupied(bucket), "Invalid fast enumeration state") // Only need to bridge once, so we can hoist it out of the loop. - let bridgedElements = bridgeElements() + let bridgedElements = unsafe bridgeElements() var stored = 0 for i in 0.. AnyObject? { - let nss = unsafeBitCast(object, to: _NSSet.self) + let nss = unsafe unsafeBitCast(object, to: _NSSet.self) return nss.member(element) } } @@ -393,13 +394,13 @@ extension __CocoaSet: _SetBuffer { @usableFromInline internal var count: Int { - let nss = unsafeBitCast(object, to: _NSSet.self) + let nss = unsafe unsafeBitCast(object, to: _NSSet.self) return nss.count } @usableFromInline internal func contains(_ element: AnyObject) -> Bool { - let nss = unsafeBitCast(object, to: _NSSet.self) + let nss = unsafe unsafeBitCast(object, to: _NSSet.self) return nss.member(element) != nil } @@ -423,7 +424,7 @@ extension __CocoaSet { @inline(__always) get { let storage = _bridgeObject(toNative: _storage) - return unsafeDowncast(storage, to: Storage.self) + return unsafe unsafeDowncast(storage, to: Storage.self) } } @@ -467,7 +468,7 @@ extension __CocoaSet.Index { internal var handleBitPattern: UInt { @_effects(readonly) get { - return unsafeBitCast(storage, to: UInt.self) + return unsafe unsafeBitCast(storage, to: UInt.self) } } } @@ -489,7 +490,7 @@ extension __CocoaSet.Index { internal var age: Int32 { @_effects(releasenone) get { - return _HashTable.age(for: storage.base.object) + return unsafe _HashTable.age(for: storage.base.object) } } } @@ -515,6 +516,7 @@ extension __CocoaSet.Index: Comparable { } extension __CocoaSet: Sequence { + @safe @usableFromInline final internal class Iterator { // Cocoa Set iterator has to be a class, otherwise we cannot @@ -524,23 +526,23 @@ extension __CocoaSet: Sequence { // This stored property should be stored at offset zero. There's code below // relying on this. internal var _fastEnumerationState: _SwiftNSFastEnumerationState = - _makeSwiftNSFastEnumerationState() + unsafe _makeSwiftNSFastEnumerationState() // This stored property should be stored right after // `_fastEnumerationState`. There's code below relying on this. - internal var _fastEnumerationStackBuf = _CocoaFastEnumerationStackBuf() + internal var _fastEnumerationStackBuf = unsafe _CocoaFastEnumerationStackBuf() internal let base: __CocoaSet internal var _fastEnumerationStatePtr: UnsafeMutablePointer<_SwiftNSFastEnumerationState> { - return _getUnsafePointerToStoredProperties(self).assumingMemoryBound( + return unsafe _getUnsafePointerToStoredProperties(self).assumingMemoryBound( to: _SwiftNSFastEnumerationState.self) } internal var _fastEnumerationStackBufPtr: UnsafeMutablePointer<_CocoaFastEnumerationStackBuf> { - return UnsafeMutableRawPointer(_fastEnumerationStatePtr + 1) + return unsafe UnsafeMutableRawPointer(_fastEnumerationStatePtr + 1) .assumingMemoryBound(to: _CocoaFastEnumerationStackBuf.self) } @@ -576,12 +578,12 @@ extension __CocoaSet.Iterator: IteratorProtocol { } let base = self.base if itemIndex == itemCount { - let stackBufCount = _fastEnumerationStackBuf.count + let stackBufCount = unsafe _fastEnumerationStackBuf.count // We can't use `withUnsafeMutablePointer` here to get pointers to // properties, because doing so might introduce a writeback storage, but // fast enumeration relies on the pointer identity of the enumeration // state struct. - itemCount = base.object.countByEnumerating( + itemCount = unsafe base.object.countByEnumerating( with: _fastEnumerationStatePtr, objects: UnsafeMutableRawPointer(_fastEnumerationStackBufPtr) .assumingMemoryBound(to: AnyObject.self), @@ -593,10 +595,10 @@ extension __CocoaSet.Iterator: IteratorProtocol { itemIndex = 0 } let itemsPtrUP = - UnsafeMutableRawPointer(_fastEnumerationState.itemsPtr!) + unsafe UnsafeMutableRawPointer(_fastEnumerationState.itemsPtr!) .assumingMemoryBound(to: AnyObject.self) - let itemsPtr = _UnmanagedAnyObjectArray(itemsPtrUP) - let key: AnyObject = itemsPtr[itemIndex] + let itemsPtr = unsafe _UnmanagedAnyObjectArray(itemsPtrUP) + let key: AnyObject = unsafe itemsPtr[itemIndex] itemIndex += 1 return key } @@ -625,11 +627,11 @@ extension Set { return Set(_native: deferred.native) } - if let nativeStorage = s as? _SetStorage { + if let nativeStorage = unsafe s as? _SetStorage { return Set(_native: _NativeSet(nativeStorage)) } - if s === __RawSetStorage.empty { + if unsafe s === __RawSetStorage.empty { return Set() } diff --git a/stdlib/public/core/SetCasting.swift b/stdlib/public/core/SetCasting.swift index 5dcca7adf151b..d39aba31daf32 100644 --- a/stdlib/public/core/SetCasting.swift +++ b/stdlib/public/core/SetCasting.swift @@ -62,7 +62,7 @@ public func _setUpCast( internal func _setDownCastIndirect( _ source: UnsafePointer>, _ target: UnsafeMutablePointer>) { - target.initialize(to: _setDownCast(source.pointee)) + unsafe target.initialize(to: _setDownCast(source.pointee)) } /// Implements a forced downcast. This operation should have O(1) complexity. @@ -107,8 +107,8 @@ internal func _setDownCastConditionalIndirect( _ source: UnsafePointer>, _ target: UnsafeMutablePointer> ) -> Bool { - if let result: Set = _setDownCastConditional(source.pointee) { - target.initialize(to: result) + if let result: Set = unsafe _setDownCastConditional(source.pointee) { + unsafe target.initialize(to: result) return true } return false diff --git a/stdlib/public/core/SetStorage.swift b/stdlib/public/core/SetStorage.swift index 37d3d326aeb59..02b1eea4354d5 100644 --- a/stdlib/public/core/SetStorage.swift +++ b/stdlib/public/core/SetStorage.swift @@ -22,6 +22,7 @@ import SwiftShims @_fixed_layout @usableFromInline @_objc_non_lazy_realization +@unsafe internal class __RawSetStorage: __SwiftNativeNSSet { // NOTE: The precise layout of this type is relied on in the runtime to // provide a statically allocated empty singleton. See @@ -82,15 +83,15 @@ internal class __RawSetStorage: __SwiftNativeNSSet { @inlinable @nonobjc internal final var _bucketCount: Int { - @inline(__always) get { return 1 &<< _scale } + @inline(__always) get { return unsafe 1 &<< _scale } } @inlinable @nonobjc internal final var _metadata: UnsafeMutablePointer<_HashTable.Word> { @inline(__always) get { - let address = Builtin.projectTailElems(self, _HashTable.Word.self) - return UnsafeMutablePointer(address) + let address = unsafe Builtin.projectTailElems(self, _HashTable.Word.self) + return unsafe UnsafeMutablePointer(address) } } @@ -100,7 +101,7 @@ internal class __RawSetStorage: __SwiftNativeNSSet { @nonobjc internal final var _hashTable: _HashTable { @inline(__always) get { - return _HashTable(words: _metadata, bucketCount: _bucketCount) + return unsafe _HashTable(words: _metadata, bucketCount: _bucketCount) } } } @@ -110,7 +111,7 @@ internal class __RawSetStorage: __SwiftNativeNSSet { // NOTE: older runtimes called this class _EmptySetSingleton. The two // must coexist without conflicting ObjC class names, so it was renamed. // The old names must not be used in the new runtime. -@_fixed_layout +@unsafe @_fixed_layout @usableFromInline @_objc_non_lazy_realization internal class __EmptySetSingleton: __RawSetStorage { @@ -134,6 +135,7 @@ internal class __EmptySetSingleton: __RawSetStorage { // // TODO: We should figure out how to make this a constant so that it's placed in // non-writable memory (can't be a let, Builtin.addressof below requires a var). +@unsafe public var _swiftEmptySetSingleton: (Int, Int, Int, Int, UInt8, UInt8, UInt16, UInt32, Int, Int, Int) = ( /*isa*/0, /*refcount*/-1, // HeapObject header @@ -155,19 +157,19 @@ extension __RawSetStorage { @inlinable @nonobjc internal static var empty: __EmptySetSingleton { - return Builtin.bridgeFromRawPointer( + return unsafe Builtin.bridgeFromRawPointer( Builtin.addressof(&_swiftEmptySetSingleton)) } } -extension __EmptySetSingleton: _NSSetCore { +extension __EmptySetSingleton: @unsafe _NSSetCore { #if _runtime(_ObjC) // // NSSet implementation, assuming Self is the empty singleton // @objc(copyWithZone:) internal func copy(with zone: _SwiftNSZone?) -> AnyObject { - return self + return unsafe self } @objc @@ -192,21 +194,21 @@ extension __EmptySetSingleton: _NSSetCore { ) -> Int { // Even though we never do anything in here, we need to update the // state so that callers know we actually ran. - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr } - state.pointee = theState + unsafe state.pointee = theState return 0 } #endif } -@usableFromInline +@unsafe @usableFromInline final internal class _SetStorage - : __RawSetStorage, _NSSetCore { + : __RawSetStorage, @unsafe _NSSetCore { // This type is made with allocWithTailElems, so no init is ever called. // But we still need to have an init to satisfy the compiler. @nonobjc @@ -215,21 +217,21 @@ final internal class _SetStorage } deinit { - guard _count > 0 else { return } + guard unsafe _count > 0 else { return } if !_isPOD(Element.self) { - let elements = _elements - for bucket in _hashTable { - (elements + bucket.offset).deinitialize(count: 1) + let elements = unsafe _elements + for unsafe bucket in unsafe _hashTable { + unsafe (elements + bucket.offset).deinitialize(count: 1) } } - _fixLifetime(self) + unsafe _fixLifetime(self) } @inlinable final internal var _elements: UnsafeMutablePointer { @inline(__always) get { - return self._rawElements.assumingMemoryBound(to: Element.self) + return unsafe self._rawElements.assumingMemoryBound(to: Element.self) } } @@ -245,17 +247,17 @@ final internal class _SetStorage @objc(copyWithZone:) internal func copy(with zone: _SwiftNSZone?) -> AnyObject { - return self + return unsafe self } @objc internal var count: Int { - return _count + return unsafe _count } @objc internal func objectEnumerator() -> _NSEnumerator { - return _SwiftSetNSEnumerator(asNative) + return unsafe _SwiftSetNSEnumerator(asNative) } @objc(countByEnumeratingWithState:objects:count:) @@ -263,38 +265,38 @@ final internal class _SetStorage with state: UnsafeMutablePointer<_SwiftNSFastEnumerationState>, objects: UnsafeMutablePointer?, count: Int ) -> Int { - defer { _fixLifetime(self) } - let hashTable = _hashTable - var theState = state.pointee - if theState.state == 0 { - theState.state = 1 // Arbitrary non-zero value. - theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) - theState.mutationsPtr = _fastEnumerationStorageMutationsPtr - theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) + defer { unsafe _fixLifetime(self) } + let hashTable = unsafe _hashTable + var theState = unsafe state.pointee + if unsafe theState.state == 0 { + unsafe theState.state = 1 // Arbitrary non-zero value. + unsafe theState.itemsPtr = AutoreleasingUnsafeMutablePointer(objects) + unsafe theState.mutationsPtr = _fastEnumerationStorageMutationsPtr + unsafe theState.extra.0 = CUnsignedLong(hashTable.startBucket.offset) } // Test 'objects' rather than 'count' because (a) this is very rare anyway, // and (b) the optimizer should then be able to optimize away the // unwrapping check below. - if _slowPath(objects == nil) { + if unsafe _slowPath(objects == nil) { return 0 } - let unmanagedObjects = _UnmanagedAnyObjectArray(objects!) - var bucket = _HashTable.Bucket(offset: Int(theState.extra.0)) - let endBucket = hashTable.endBucket - _precondition(bucket == endBucket || hashTable.isOccupied(bucket), + let unmanagedObjects = unsafe _UnmanagedAnyObjectArray(objects!) + var bucket = unsafe _HashTable.Bucket(offset: Int(theState.extra.0)) + let endBucket = unsafe hashTable.endBucket + unsafe _precondition(bucket == endBucket || hashTable.isOccupied(bucket), "Invalid fast enumeration state") var stored = 0 for i in 0.. guard let native = _conditionallyBridgeFromObjectiveC(object, Element.self) else { return nil } - let (bucket, found) = asNative.find(native) + let (bucket, found) = unsafe asNative.find(native) guard found else { return nil } - return _bridgeAnythingToObjectiveC(_elements[bucket.offset]) + return unsafe _bridgeAnythingToObjectiveC(_elements[bucket.offset]) } #endif } @@ -314,7 +316,7 @@ extension _SetStorage { @usableFromInline @_effects(releasenone) internal static func copy(original: __RawSetStorage) -> _SetStorage { - return .allocate( + return unsafe .allocate( scale: original._scale, age: original._age, seed: original._seed) @@ -327,15 +329,15 @@ extension _SetStorage { capacity: Int, move: Bool ) -> _SetStorage { - let scale = _HashTable.scale(forCapacity: capacity) - return allocate(scale: scale, age: nil, seed: nil) + let scale = unsafe _HashTable.scale(forCapacity: capacity) + return unsafe allocate(scale: scale, age: nil, seed: nil) } @usableFromInline @_effects(releasenone) static internal func allocate(capacity: Int) -> _SetStorage { - let scale = _HashTable.scale(forCapacity: capacity) - return allocate(scale: scale, age: nil, seed: nil) + let scale = unsafe _HashTable.scale(forCapacity: capacity) + return unsafe allocate(scale: scale, age: nil, seed: nil) } #if _runtime(_ObjC) @@ -345,9 +347,9 @@ extension _SetStorage { _ cocoa: __CocoaSet, capacity: Int ) -> _SetStorage { - let scale = _HashTable.scale(forCapacity: capacity) - let age = _HashTable.age(for: cocoa.object) - return allocate(scale: scale, age: age, seed: nil) + let scale = unsafe _HashTable.scale(forCapacity: capacity) + let age = unsafe _HashTable.age(for: cocoa.object) + return unsafe allocate(scale: scale, age: age, seed: nil) } #endif @@ -361,36 +363,36 @@ extension _SetStorage { _internalInvariant(scale >= 0 && scale < Int.bitWidth - 1) let bucketCount = (1 as Int) &<< scale - let wordCount = _UnsafeBitset.wordCount(forCapacity: bucketCount) - let storage = Builtin.allocWithTailElems_2( + let wordCount = unsafe _UnsafeBitset.wordCount(forCapacity: bucketCount) + let storage = unsafe Builtin.allocWithTailElems_2( _SetStorage.self, wordCount._builtinWordValue, _HashTable.Word.self, bucketCount._builtinWordValue, Element.self) - let metadataAddr = Builtin.projectTailElems(storage, _HashTable.Word.self) - let elementsAddr = Builtin.getTailAddr_Word( + let metadataAddr = unsafe Builtin.projectTailElems(storage, _HashTable.Word.self) + let elementsAddr = unsafe Builtin.getTailAddr_Word( metadataAddr, wordCount._builtinWordValue, _HashTable.Word.self, Element.self) - storage._count = 0 - storage._capacity = _HashTable.capacity(forScale: scale) - storage._scale = scale - storage._reservedScale = 0 - storage._extra = 0 + unsafe storage._count = 0 + unsafe storage._capacity = unsafe _HashTable.capacity(forScale: scale) + unsafe storage._scale = scale + unsafe storage._reservedScale = 0 + unsafe storage._extra = 0 if let age = age { - storage._age = age + unsafe storage._age = age } else { // The default mutation count is simply a scrambled version of the storage // address. - storage._age = Int32( + unsafe storage._age = Int32( truncatingIfNeeded: ObjectIdentifier(storage).hashValue) } - storage._seed = seed ?? _HashTable.hashSeed(for: Builtin.castToNativeObject(storage), scale: scale) - storage._rawElements = UnsafeMutableRawPointer(elementsAddr) + unsafe storage._seed = unsafe seed ?? _HashTable.hashSeed(for: Builtin.castToNativeObject(storage), scale: scale) + unsafe storage._rawElements = UnsafeMutableRawPointer(elementsAddr) // Initialize hash table metadata. - storage._hashTable.clear() - return storage + unsafe storage._hashTable.clear() + return unsafe storage } } diff --git a/stdlib/public/core/SetVariant.swift b/stdlib/public/core/SetVariant.swift index 471ea1969004c..fc9615572c60f 100644 --- a/stdlib/public/core/SetVariant.swift +++ b/stdlib/public/core/SetVariant.swift @@ -29,6 +29,7 @@ internal protocol _SetBuffer { extension Set { @usableFromInline @frozen + @safe internal struct _Variant { @usableFromInline internal var object: _BridgeStorage<__RawSetStorage> @@ -37,7 +38,7 @@ extension Set { @inline(__always) init(dummy: ()) { #if _pointerBitWidth(_64) && !$Embedded - self.object = _BridgeStorage(taggedPayload: 0) + self.object = unsafe _BridgeStorage(taggedPayload: 0) #elseif _pointerBitWidth(_32) || $Embedded self.init(native: _NativeSet()) #else @@ -48,14 +49,14 @@ extension Set { @inlinable @inline(__always) init(native: __owned _NativeSet) { - self.object = _BridgeStorage(native: native._storage) + self.object = unsafe _BridgeStorage(native: native._storage) } #if _runtime(_ObjC) @inlinable @inline(__always) init(cocoa: __owned __CocoaSet) { - self.object = _BridgeStorage(objC: cocoa.object) + self.object = unsafe _BridgeStorage(objC: cocoa.object) } #endif } @@ -72,7 +73,7 @@ extension Set._Variant { @inlinable internal mutating func isUniquelyReferenced() -> Bool { - return object.isUniquelyReferencedUnflaggedNative() + return unsafe object.isUniquelyReferencedUnflaggedNative() } #if _runtime(_ObjC) @@ -86,18 +87,18 @@ extension Set._Variant { @usableFromInline @_transparent internal var asNative: _NativeSet { get { - return _NativeSet(object.unflaggedNativeInstance) + return unsafe _NativeSet(object.unflaggedNativeInstance) } set { self = .init(native: newValue) } _modify { - var native = _NativeSet(object.unflaggedNativeInstance) + var native = unsafe _NativeSet(object.unflaggedNativeInstance) self = .init(dummy: ()) defer { // This is in a defer block because yield might throw, and we need to // preserve Set's storage invariants when that happens. - object = .init(native: native._storage) + object = unsafe .init(native: native._storage) } yield &native } @@ -106,7 +107,7 @@ extension Set._Variant { #if _runtime(_ObjC) @inlinable internal var asCocoa: __CocoaSet { - return __CocoaSet(object.objCInstance) + return unsafe __CocoaSet(object.objCInstance) } #endif diff --git a/stdlib/public/core/Shims.swift b/stdlib/public/core/Shims.swift index 0815bb8ee400d..27807475bc2f2 100644 --- a/stdlib/public/core/Shims.swift +++ b/stdlib/public/core/Shims.swift @@ -20,7 +20,7 @@ import SwiftShims @inlinable internal func _makeSwiftNSFastEnumerationState() -> _SwiftNSFastEnumerationState { - return _SwiftNSFastEnumerationState( + return unsafe _SwiftNSFastEnumerationState( state: 0, itemsPtr: nil, mutationsPtr: nil, extra: (0, 0, 0, 0, 0)) } @@ -34,10 +34,10 @@ internal var _fastEnumerationStorageMutationsTarget: CUnsignedLong = 0 /// implementations. @usableFromInline internal let _fastEnumerationStorageMutationsPtr = - UnsafeMutablePointer(Builtin.addressof(&_fastEnumerationStorageMutationsTarget)) + unsafe UnsafeMutablePointer(Builtin.addressof(&_fastEnumerationStorageMutationsTarget)) #endif @usableFromInline @_alwaysEmitIntoClient internal func _mallocSize(ofAllocation ptr: UnsafeRawPointer) -> Int? { - return _swift_stdlib_has_malloc_size() ? _swift_stdlib_malloc_size(ptr) : nil + return unsafe _swift_stdlib_has_malloc_size() ? _swift_stdlib_malloc_size(ptr) : nil } diff --git a/stdlib/public/core/Slice.swift b/stdlib/public/core/Slice.swift index 8825f0ee04eb0..00ed55130f17e 100644 --- a/stdlib/public/core/Slice.swift +++ b/stdlib/public/core/Slice.swift @@ -228,8 +228,8 @@ extension Slice: Collection { try _base.withContiguousStorageIfAvailable { buffer in let start = _base.distance(from: _base.startIndex, to: _startIndex) let count = _base.distance(from: _startIndex, to: _endIndex) - let slice = UnsafeBufferPointer(rebasing: buffer[start ..< start + count]) - return try body(slice) + let slice = unsafe UnsafeBufferPointer(rebasing: buffer[start ..< start + count]) + return try unsafe body(slice) } } } @@ -239,14 +239,14 @@ extension Slice { public __consuming func _copyContents( initializing buffer: UnsafeMutableBufferPointer ) -> (Iterator, UnsafeMutableBufferPointer.Index) { - if let (_, copied) = self.withContiguousStorageIfAvailable({ - $0._copyContents(initializing: buffer) + if let (_, copied) = unsafe self.withContiguousStorageIfAvailable({ + unsafe $0._copyContents(initializing: buffer) }) { let position = index(startIndex, offsetBy: copied) return (Iterator(_elements: self, _position: position), copied) } - return _copySequenceContents(initializing: buffer) + return unsafe _copySequenceContents(initializing: buffer) } } @@ -300,23 +300,23 @@ extension Slice: MutableCollection where Base: MutableCollection { // that we don't calculate index distances unless we know we'll use them. // The expectation here is that the base collection will make itself // contiguous on the first try and the second call will be relatively cheap. - guard _base.withContiguousMutableStorageIfAvailable({ _ in }) != nil + guard unsafe _base.withContiguousMutableStorageIfAvailable({ _ in }) != nil else { return nil } let start = _base.distance(from: _base.startIndex, to: _startIndex) let count = _base.distance(from: _startIndex, to: _endIndex) - return try _base.withContiguousMutableStorageIfAvailable { buffer in - var slice = UnsafeMutableBufferPointer( + return try unsafe _base.withContiguousMutableStorageIfAvailable { buffer in + var slice = unsafe UnsafeMutableBufferPointer( rebasing: buffer[start ..< start + count]) - let copy = slice + let copy = unsafe slice defer { - _precondition( + unsafe _precondition( slice.baseAddress == copy.baseAddress && slice.count == copy.count, "Slice.withContiguousMutableStorageIfAvailable: replacing the buffer is not allowed") } - return try body(&slice) + return try unsafe body(&slice) } } } diff --git a/stdlib/public/core/SliceBuffer.swift b/stdlib/public/core/SliceBuffer.swift index ac9ef6b142ea7..b32d106f7f39f 100644 --- a/stdlib/public/core/SliceBuffer.swift +++ b/stdlib/public/core/SliceBuffer.swift @@ -13,6 +13,7 @@ /// Buffer type for `ArraySlice`. @frozen @usableFromInline +@safe internal struct _SliceBuffer : _ArrayBufferProtocol, RandomAccessCollection @@ -51,7 +52,7 @@ internal struct _SliceBuffer endIndexAndFlags: UInt ) { self.owner = owner - self.subscriptBaseAddress = subscriptBaseAddress + self.subscriptBaseAddress = unsafe subscriptBaseAddress self.startIndex = startIndex self.endIndexAndFlags = endIndexAndFlags } @@ -62,7 +63,7 @@ internal struct _SliceBuffer indices: Range, hasNativeBuffer: Bool ) { self.owner = owner - self.subscriptBaseAddress = subscriptBaseAddress + self.subscriptBaseAddress = unsafe subscriptBaseAddress self.startIndex = indices.lowerBound let bufferFlag = UInt(hasNativeBuffer ? 1 : 0) self.endIndexAndFlags = (UInt(indices.upperBound) << 1) | bufferFlag @@ -77,7 +78,7 @@ internal struct _SliceBuffer #else self.owner = _emptyArrayStorage #endif - self.subscriptBaseAddress = empty.firstElementAddress + self.subscriptBaseAddress = unsafe empty.firstElementAddress self.startIndex = empty.startIndex self.endIndexAndFlags = 1 _invariantCheck() @@ -86,7 +87,7 @@ internal struct _SliceBuffer @inlinable internal init(_buffer buffer: NativeBuffer, shiftedToStartIndex: Int) { let shift = buffer.startIndex - shiftedToStartIndex - self.init( + unsafe self.init( owner: buffer.owner, subscriptBaseAddress: buffer.subscriptBaseAddress + shift, indices: shiftedToStartIndex.. return NativeBuffer( owner as? __ContiguousArrayStorageBase ?? _emptyArrayStorage) #else - return NativeBuffer(unsafeBitCast(_nativeObject(toNative: owner), + return NativeBuffer(unsafe unsafeBitCast(_nativeObject(toNative: owner), to: __ContiguousArrayStorageBase.self)) #endif } @@ -154,7 +155,7 @@ internal struct _SliceBuffer let oldCount = count var native = nativeBuffer - let hiddenElementCount = firstElementAddress - native.firstElementAddress + let hiddenElementCount = unsafe firstElementAddress - native.firstElementAddress _internalInvariant(native.count + growth <= native.capacity) @@ -180,7 +181,7 @@ internal struct _SliceBuffer @inlinable internal var firstElementAddress: UnsafeMutablePointer { - return subscriptBaseAddress + startIndex + return unsafe subscriptBaseAddress + startIndex } @inlinable @@ -205,7 +206,7 @@ internal struct _SliceBuffer // function isn't called for subscripting, this won't slow // down that case. var native = nativeBuffer - let offset = self.firstElementAddress - native.firstElementAddress + let offset = unsafe self.firstElementAddress - native.firstElementAddress let backingCount = native.count let myCount = count @@ -260,8 +261,8 @@ internal struct _SliceBuffer _internalInvariant(bounds.upperBound >= bounds.lowerBound) _internalInvariant(bounds.upperBound <= endIndex) let c = bounds.count - target.initialize(from: subscriptBaseAddress + bounds.lowerBound, count: c) - return target + c + unsafe target.initialize(from: subscriptBaseAddress + bounds.lowerBound, count: c) + return unsafe target + c } @inlinable @@ -271,7 +272,7 @@ internal struct _SliceBuffer _invariantCheck() guard buffer.count > 0 else { return (makeIterator(), 0) } let c = Swift.min(self.count, buffer.count) - buffer.baseAddress!.initialize( + unsafe buffer.baseAddress!.initialize( from: firstElementAddress, count: c) _fixLifetime(owner) @@ -314,8 +315,8 @@ internal struct _SliceBuffer return count } let n = nativeBuffer - let nativeEnd = n.firstElementAddress + n.count - if (firstElementAddress + count) == nativeEnd { + let nativeEnd = unsafe n.firstElementAddress + n.count + if unsafe (firstElementAddress + count) == nativeEnd { return count + (n.capacity - n.count) } return count @@ -373,7 +374,7 @@ internal struct _SliceBuffer internal func getElement(_ i: Int) -> Element { _internalInvariant(i >= startIndex, "slice index is out of range (before startIndex)") _internalInvariant(i < endIndex, "slice index is out of range") - return subscriptBaseAddress[i] + return unsafe subscriptBaseAddress[i] } /// Access the element at `position`. @@ -388,7 +389,7 @@ internal struct _SliceBuffer nonmutating set { _internalInvariant(position >= startIndex, "slice index is out of range (before startIndex)") _internalInvariant(position < endIndex, "slice index is out of range") - subscriptBaseAddress[position] = newValue + unsafe subscriptBaseAddress[position] = newValue } } @@ -438,7 +439,7 @@ internal struct _SliceBuffer _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } - return try body(UnsafeBufferPointer(start: firstElementAddress, + return try unsafe body(UnsafeBufferPointer(start: firstElementAddress, count: count)) } @@ -449,7 +450,7 @@ internal struct _SliceBuffer _ body: (UnsafeBufferPointer) throws(E) -> R ) throws(E) -> R { defer { _fixLifetime(self) } - return try body(UnsafeBufferPointer(start: firstElementAddress, + return try unsafe body(UnsafeBufferPointer(start: firstElementAddress, count: count)) } @@ -461,7 +462,7 @@ internal struct _SliceBuffer _ body: (UnsafeMutableBufferPointer) throws -> R ) rethrows -> R { defer { _fixLifetime(self) } - return try body( + return try unsafe body( UnsafeMutableBufferPointer(start: firstElementAddress, count: count)) } @@ -472,14 +473,14 @@ internal struct _SliceBuffer _ body: (UnsafeMutableBufferPointer) throws(E) -> R ) throws(E) -> R { defer { _fixLifetime(self) } - return try body( + return try unsafe body( UnsafeMutableBufferPointer(start: firstElementAddress, count: count)) } @inlinable internal func unsafeCastElements(to type: T.Type) -> _SliceBuffer { _internalInvariant(_isClassOrObjCExistential(T.self)) - let baseAddress = UnsafeMutableRawPointer(self.subscriptBaseAddress) + let baseAddress = unsafe UnsafeMutableRawPointer(self.subscriptBaseAddress) .assumingMemoryBound(to: T.self) return _SliceBuffer( owner: self.owner, @@ -505,7 +506,7 @@ extension _SliceBuffer { let result = _ContiguousArrayBuffer( _uninitializedCount: count, minimumCapacity: 0) - result.firstElementAddress.initialize( + unsafe result.firstElementAddress.initialize( from: firstElementAddress, count: count) return ContiguousArray(_buffer: result) } diff --git a/stdlib/public/core/SmallString.swift b/stdlib/public/core/SmallString.swift index 99d08a06b49d5..87420bc3d277b 100644 --- a/stdlib/public/core/SmallString.swift +++ b/stdlib/public/core/SmallString.swift @@ -145,9 +145,9 @@ extension _SmallString { // No bits should be set between the last code unit and the discriminator var copy = self - withUnsafeBytes(of: ©._storage) { - _internalInvariant( - $0[count..<_SmallString.capacity].allSatisfy { $0 == 0 }) + unsafe withUnsafeBytes(of: ©._storage) { + unsafe _internalInvariant( + $0[count..<_SmallString.capacity].allSatisfy { unsafe $0 == 0 }) } } #endif // INTERNAL_CHECKS_ENABLED @@ -205,8 +205,8 @@ extension _SmallString: RandomAccessCollection, MutableCollection { get { // TODO(String performance): In-vector-register operation return self.withUTF8 { utf8 in - let rebased = UnsafeBufferPointer(rebasing: utf8[bounds]) - return _SmallString(rebased)._unsafelyUnwrappedUnchecked + let rebased = unsafe UnsafeBufferPointer(rebasing: utf8[bounds]) + return unsafe _SmallString(rebased)._unsafelyUnwrappedUnchecked } } // This setter is required for _SmallString to be a valid MutableCollection. @@ -218,21 +218,22 @@ extension _SmallString: RandomAccessCollection, MutableCollection { extension _SmallString { @inlinable @inline(__always) + @safe internal func withUTF8( _ f: (UnsafeBufferPointer) throws -> Result ) rethrows -> Result { let count = self.count var raw = self.zeroTerminatedRawCodeUnits - return try Swift._withUnprotectedUnsafeBytes(of: &raw) { - let rawPtr = $0.baseAddress._unsafelyUnwrappedUnchecked + return try unsafe Swift._withUnprotectedUnsafeBytes(of: &raw) { + let rawPtr = unsafe $0.baseAddress._unsafelyUnwrappedUnchecked // Rebind the underlying (UInt64, UInt64) tuple to UInt8 for the // duration of the closure. Accessing self after this rebind is undefined. - let ptr = rawPtr.bindMemory(to: UInt8.self, capacity: count) + let ptr = unsafe rawPtr.bindMemory(to: UInt8.self, capacity: count) defer { // Restore the memory type of self._storage - _ = rawPtr.bindMemory(to: RawBitPattern.self, capacity: 1) + _ = unsafe rawPtr.bindMemory(to: RawBitPattern.self, capacity: 1) } - return try f(UnsafeBufferPointer(_uncheckedStart: ptr, count: count)) + return try unsafe f(unsafe UnsafeBufferPointer(_uncheckedStart: ptr, count: count)) } } @@ -243,8 +244,8 @@ extension _SmallString { fileprivate mutating func withMutableCapacity( _ f: (UnsafeMutableRawBufferPointer) throws -> Int ) rethrows { - let len = try withUnsafeMutableBytes(of: &_storage) { - try f(.init(start: $0.baseAddress, count: _SmallString.capacity)) + let len = try unsafe withUnsafeMutableBytes(of: &_storage) { + try unsafe f(.init(start: $0.baseAddress, count: _SmallString.capacity)) } if len <= 0 { @@ -300,9 +301,9 @@ extension _SmallString { // TODO(SIMD): The below can be replaced with just be a masked unaligned // vector load - let ptr = input.baseAddress._unsafelyUnwrappedUnchecked - let leading = _bytesToUInt64(ptr, Swift.min(input.count, 8)) - let trailing = count > 8 ? _bytesToUInt64(ptr + 8, count &- 8) : 0 + let ptr = unsafe input.baseAddress._unsafelyUnwrappedUnchecked + let leading = unsafe _bytesToUInt64(ptr, Swift.min(input.count, 8)) + let trailing = unsafe count > 8 ? _bytesToUInt64(ptr + 8, count &- 8) : 0 self.init(leading: leading, trailing: trailing, count: count) } @@ -314,8 +315,8 @@ extension _SmallString { ) throws -> Int ) rethrows { self.init() - try self.withMutableCapacity { - try $0.withMemoryRebound(to: UInt8.self, initializer) + try unsafe self.withMutableCapacity { + try unsafe $0.withMemoryRebound(to: UInt8.self, initializer) } self._invariantCheck() } @@ -350,12 +351,12 @@ extension _SmallString { internal init?(taggedCocoa cocoa: AnyObject) { self.init() var success = true - self.withMutableCapacity { + unsafe self.withMutableCapacity { /* For regular NSTaggedPointerStrings we will always succeed here, but tagged NSLocalizedStrings may not fit in a SmallString */ - if let len = _bridgeTagged(cocoa, intoUTF8: $0) { + if let len = unsafe _bridgeTagged(cocoa, intoUTF8: $0) { return len } success = false @@ -371,12 +372,12 @@ extension _SmallString { internal init?(taggedASCIICocoa cocoa: AnyObject) { self.init() var success = true - self.withMutableCapacity { + unsafe self.withMutableCapacity { /* For regular NSTaggedPointerStrings we will always succeed here, but tagged NSLocalizedStrings may not fit in a SmallString */ - if let len = _bridgeTaggedASCII(cocoa, intoUTF8: $0) { + if let len = unsafe _bridgeTaggedASCII(cocoa, intoUTF8: $0) { return len } success = false @@ -432,7 +433,7 @@ internal func _bytesToUInt64( var r: UInt64 = 0 var shift: Int = 0 for idx in 0.. Bool ) rethrows { let didSortUnsafeBuffer: Void? = - try withContiguousMutableStorageIfAvailable { buffer in - try buffer._stableSortImpl(by: areInIncreasingOrder) + try unsafe withContiguousMutableStorageIfAvailable { buffer in + try unsafe buffer._stableSortImpl(by: areInIncreasingOrder) } if didSortUnsafeBuffer == nil { // Fallback since we can't use an unsafe buffer: sort into an outside @@ -351,19 +351,19 @@ internal func _merge( buffer: UnsafeMutablePointer, by areInIncreasingOrder: (Element, Element) throws -> Bool ) rethrows -> Bool { - let lowCount = mid - low - let highCount = high - mid + let lowCount = unsafe mid - low + let highCount = unsafe high - mid - var destLow = low // Lower bound of uninitialized storage - var bufferLow = buffer // Lower bound of the initialized buffer - var bufferHigh = buffer // Upper bound of the initialized buffer + var destLow = unsafe low // Lower bound of uninitialized storage + var bufferLow = unsafe buffer // Lower bound of the initialized buffer + var bufferHigh = unsafe buffer // Upper bound of the initialized buffer // When we exit the merge, move any remaining elements from the buffer back // into `destLow` (aka the collection we're sorting). The buffer can have // remaining elements if `areIncreasingOrder` throws, or more likely if the // merge runs out of elements from the array before exhausting the buffer. defer { - destLow.moveInitialize(from: bufferLow, count: bufferHigh - bufferLow) + unsafe destLow.moveInitialize(from: bufferLow, count: bufferHigh - bufferLow) } if lowCount < highCount { @@ -380,24 +380,24 @@ internal func _merge( // Buffer: [4, 4, 7, 8, 9, x, ...] // ^ ^ // bufferLow bufferHigh - buffer.moveInitialize(from: low, count: lowCount) - bufferHigh = bufferLow + lowCount + unsafe buffer.moveInitialize(from: low, count: lowCount) + unsafe bufferHigh = unsafe bufferLow + lowCount - var srcLow = mid + var srcLow = unsafe mid // Each iteration moves the element that compares lower into `destLow`, // preferring the buffer when equal to maintain stability. Elements are // moved from either `bufferLow` or `srcLow`, with those pointers // incrementing as elements are moved. - while bufferLow < bufferHigh && srcLow < high { - if try areInIncreasingOrder(srcLow.pointee, bufferLow.pointee) { - destLow.moveInitialize(from: srcLow, count: 1) - srcLow += 1 + while unsafe bufferLow < bufferHigh && srcLow < high { + if try unsafe areInIncreasingOrder(srcLow.pointee, bufferLow.pointee) { + unsafe destLow.moveInitialize(from: srcLow, count: 1) + unsafe srcLow += 1 } else { - destLow.moveInitialize(from: bufferLow, count: 1) - bufferLow += 1 + unsafe destLow.moveInitialize(from: bufferLow, count: 1) + unsafe bufferLow += 1 } - destLow += 1 + unsafe destLow += 1 } } else { // Move the higher group of elements into the buffer, then traverse from @@ -413,12 +413,12 @@ internal func _merge( // Buffer: [8, 8, 10, 12, 15, x, ...] // ^ ^ // bufferLow bufferHigh - buffer.moveInitialize(from: mid, count: highCount) - bufferHigh = bufferLow + highCount + unsafe buffer.moveInitialize(from: mid, count: highCount) + unsafe bufferHigh = unsafe bufferLow + highCount - var destHigh = high - var srcHigh = mid - destLow = mid + var destHigh = unsafe high + var srcHigh = unsafe mid + unsafe destLow = unsafe mid // Each iteration moves the element that compares higher into `destHigh`, // preferring the buffer when equal to maintain stability. Elements are @@ -427,20 +427,20 @@ internal func _merge( // // Note: At the start of each iteration, each `...High` pointer points one // past the element they're referring to. - while bufferHigh > bufferLow && srcHigh > low { - destHigh -= 1 - if try areInIncreasingOrder( + while unsafe bufferHigh > bufferLow && srcHigh > low { + unsafe destHigh -= 1 + if try unsafe areInIncreasingOrder( (bufferHigh - 1).pointee, (srcHigh - 1).pointee ) { - srcHigh -= 1 - destHigh.moveInitialize(from: srcHigh, count: 1) + unsafe srcHigh -= 1 + unsafe destHigh.moveInitialize(from: srcHigh, count: 1) // Moved an element from the lower initialized portion to the upper, // sorted, initialized portion, so `destLow` moves down one. - destLow -= 1 + unsafe destLow -= 1 } else { - bufferHigh -= 1 - destHigh.moveInitialize(from: bufferHigh, count: 1) + unsafe bufferHigh -= 1 + unsafe destHigh.moveInitialize(from: bufferHigh, count: 1) } } } @@ -530,20 +530,20 @@ extension UnsafeMutableBufferPointer { buffer: UnsafeMutablePointer, by areInIncreasingOrder: (Element, Element) throws -> Bool ) rethrows -> Bool { - _internalInvariant(runs[i - 1].upperBound == runs[i].lowerBound) - let low = runs[i - 1].lowerBound - let middle = runs[i].lowerBound - let high = runs[i].upperBound + unsafe _internalInvariant(runs[i - 1].upperBound == runs[i].lowerBound) + let low = unsafe runs[i - 1].lowerBound + let middle = unsafe runs[i].lowerBound + let high = unsafe runs[i].upperBound - try _merge( + try unsafe _merge( low: baseAddress! + low, mid: baseAddress! + middle, high: baseAddress! + high, buffer: buffer, by: areInIncreasingOrder) - runs[i - 1] = low.. 1 { - var lastIndex = runs.count - 1 + while unsafe runs.count > 1 { + var lastIndex = unsafe runs.count - 1 // Check for the three invariant-breaking conditions, and break out of // the while loop if none are met. - if lastIndex >= 3 && + if unsafe lastIndex >= 3 && (runs[lastIndex - 3].count <= runs[lastIndex - 2].count + runs[lastIndex - 1].count) { // Second-to-last three runs do not follow W > X + Y. // Always merge Y with the smaller of X or Z. - if runs[lastIndex - 2].count < runs[lastIndex].count { + if unsafe runs[lastIndex - 2].count < runs[lastIndex].count { lastIndex -= 1 } - } else if lastIndex >= 2 && + } else if unsafe lastIndex >= 2 && (runs[lastIndex - 2].count <= runs[lastIndex - 1].count + runs[lastIndex].count) { // Last three runs do not follow X > Y + Z. // Always merge Y with the smaller of X or Z. - if runs[lastIndex - 2].count < runs[lastIndex].count { + if unsafe runs[lastIndex - 2].count < runs[lastIndex].count { lastIndex -= 1 } - } else if runs[lastIndex - 1].count <= runs[lastIndex].count { + } else if unsafe runs[lastIndex - 1].count <= runs[lastIndex].count { // Last two runs do not follow Y > Z, so merge Y and Z. // This block is intentionally blank--the merge happens below. } else { @@ -620,7 +620,7 @@ extension UnsafeMutableBufferPointer { } // Merge the runs at `i` and `i - 1`. - try _mergeRuns( + try unsafe _mergeRuns( &runs, at: lastIndex, buffer: buffer, by: areInIncreasingOrder) } @@ -645,8 +645,8 @@ extension UnsafeMutableBufferPointer { buffer: UnsafeMutablePointer, by areInIncreasingOrder: (Element, Element) throws -> Bool ) rethrows -> Bool { - while runs.count > 1 { - try _mergeRuns( + while unsafe runs.count > 1 { + try unsafe _mergeRuns( &runs, at: runs.count - 1, buffer: buffer, by: areInIncreasingOrder) } @@ -664,7 +664,7 @@ extension UnsafeMutableBufferPointer { ) rethrows { let minimumRunLength = _minimumMergeRunLength(count) if count <= minimumRunLength { - try _insertionSort( + try unsafe _insertionSort( within: startIndex..(_unsafeUninitializedCapacity: count / 2) { + _ = try unsafe Array(_unsafeUninitializedCapacity: count / 2) { buffer, _ in - var runs: [Range] = [] + var runs: [Range] = unsafe [] var start = startIndex while start < endIndex { // Find the next consecutive run, reversing it if necessary. var (end, descending) = - try _findNextRun(in: self, from: start, by: areInIncreasingOrder) + unsafe try _findNextRun(in: self, from: start, by: areInIncreasingOrder) if descending { - _reverse(within: start.. UnsafeRawPointer { - _pointer._unsafelyUnwrappedUnchecked + unsafe _pointer._unsafelyUnwrappedUnchecked } /// The number of bytes in this `RawSpan`. @@ -73,7 +74,7 @@ public struct RawSpan: ~Escapable, Copyable, BitwiseCopyable { _unchecked pointer: UnsafeRawPointer?, byteCount: Int ) { - _pointer = pointer + _pointer = unsafe pointer _count = byteCount } } @@ -101,7 +102,7 @@ extension RawSpan { let span = RawSpan(_unchecked: baseAddress, byteCount: buffer.count) // As a trivial value, 'baseAddress' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `RawSpan` over initialized memory. @@ -117,11 +118,11 @@ extension RawSpan { public init( _unsafeBytes buffer: borrowing Slice ) { - let rawBuffer = UnsafeRawBufferPointer(rebasing: buffer) + let rawBuffer = unsafe UnsafeRawBufferPointer(rebasing: buffer) let span = RawSpan(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `RawSpan` over initialized memory. @@ -141,7 +142,7 @@ extension RawSpan { let span = RawSpan(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } @_alwaysEmitIntoClient @@ -150,11 +151,11 @@ extension RawSpan { _unsafeBytes buffer: borrowing Slice ) { let rawBuffer = - UnsafeRawBufferPointer(UnsafeMutableRawBufferPointer(rebasing: buffer)) + UnsafeRawBufferPointer(unsafe UnsafeMutableRawBufferPointer(rebasing: buffer)) let span = RawSpan(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `RawSpan` over initialized memory. @@ -194,7 +195,7 @@ extension RawSpan { let span = RawSpan(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `RawSpan` over initialized memory. @@ -210,11 +211,11 @@ extension RawSpan { public init( _unsafeElements buffer: borrowing Slice> ) { - let rawBuffer = UnsafeRawBufferPointer(UnsafeBufferPointer(rebasing: buffer)) + let rawBuffer = UnsafeRawBufferPointer(unsafe UnsafeBufferPointer(rebasing: buffer)) let span = RawSpan(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `RawSpan` over initialized memory. @@ -234,7 +235,7 @@ extension RawSpan { let span = RawSpan(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `RawSpan` over initialized memory. @@ -251,11 +252,11 @@ extension RawSpan { _unsafeElements buffer: borrowing Slice> ) { let rawBuffer = - UnsafeRawBufferPointer(UnsafeMutableBufferPointer(rebasing: buffer)) + UnsafeRawBufferPointer(unsafe UnsafeMutableBufferPointer(rebasing: buffer)) let span = RawSpan(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `RawSpan` over initialized memory. @@ -350,7 +351,7 @@ extension RawSpan { UInt(bitPattern: bounds.upperBound) <= UInt(bitPattern: _count), "Byte offset range out of bounds" ) - return _extracting(unchecked: bounds) + return unsafe _extracting(unchecked: bounds) } /// Constructs a new span over the bytes within the supplied range of @@ -372,9 +373,9 @@ extension RawSpan { @_alwaysEmitIntoClient @lifetime(self) public func _extracting(unchecked bounds: Range) -> Self { - let newStart = _pointer?.advanced(by: bounds.lowerBound) + let newStart = unsafe _pointer?.advanced(by: bounds.lowerBound) let newSpan = RawSpan(_unchecked: newStart, byteCount: bounds.count) - return _overrideLifetime(newSpan, copying: self) + return unsafe _overrideLifetime(newSpan, copying: self) } /// Constructs a new span over the bytes within the supplied range of @@ -417,7 +418,7 @@ extension RawSpan { public func _extracting( unchecked bounds: some RangeExpression ) -> Self { - _extracting(unchecked: bounds.relative(to: byteOffsets)) + unsafe _extracting(unchecked: bounds.relative(to: byteOffsets)) } /// Constructs a new span over all the bytes of this span. @@ -457,7 +458,7 @@ extension RawSpan { public func withUnsafeBytes( _ body: (_ buffer: UnsafeRawBufferPointer) throws(E) -> Result ) throws(E) -> Result { - try body(.init(start: _pointer, count: byteCount)) + try unsafe body(.init(start: _pointer, count: byteCount)) } } @@ -484,11 +485,11 @@ extension RawSpan { consuming public func _unsafeView( as type: T.Type ) -> Span { - let rawBuffer = UnsafeRawBufferPointer(start: _pointer, count: _count) + let rawBuffer = unsafe UnsafeRawBufferPointer(start: _pointer, count: _count) let newSpan = Span(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'self'. Make the dependence explicit. - return _overrideLifetime(newSpan, copying: self) + return unsafe _overrideLifetime(newSpan, copying: self) } } @@ -523,7 +524,7 @@ extension RawSpan { MemoryLayout.size <= (_count &- offset), "Byte offset range out of bounds" ) - return unsafeLoad(fromUncheckedByteOffset: offset, as: T.self) + return unsafe unsafeLoad(fromUncheckedByteOffset: offset, as: T.self) } /// Returns a new instance of the given type, constructed from the raw memory @@ -549,7 +550,7 @@ extension RawSpan { public func unsafeLoad( fromUncheckedByteOffset offset: Int, as: T.Type ) -> T { - _start().load(fromByteOffset: offset, as: T.self) + unsafe _start().load(fromByteOffset: offset, as: T.self) } /// Returns a new instance of the given type, constructed from the raw memory @@ -578,7 +579,7 @@ extension RawSpan { MemoryLayout.size <= (_count &- offset), "Byte offset range out of bounds" ) - return unsafeLoadUnaligned(fromUncheckedByteOffset: offset, as: T.self) + return unsafe unsafeLoadUnaligned(fromUncheckedByteOffset: offset, as: T.self) } /// Returns a new instance of the given type, constructed from the raw memory @@ -603,7 +604,7 @@ extension RawSpan { public func unsafeLoadUnaligned( fromUncheckedByteOffset offset: Int, as: T.Type ) -> T { - _start().loadUnaligned(fromByteOffset: offset, as: T.self) + unsafe _start().loadUnaligned(fromByteOffset: offset, as: T.self) } } @@ -613,7 +614,7 @@ extension RawSpan { /// refer to the same region in memory. @_alwaysEmitIntoClient public func isIdentical(to other: Self) -> Bool { - (self._pointer == other._pointer) && (self._count == other._count) + unsafe (self._pointer == other._pointer) && (self._count == other._count) } /// Returns the offsets where the memory of `span` is located within @@ -628,12 +629,12 @@ extension RawSpan { public func byteOffsets(of other: borrowing Self) -> Range? { if other._count > _count { return nil } guard let spanStart = other._pointer, _count > 0 else { - return _pointer == other._pointer ? Range(_uncheckedBounds: (0, 0)) : nil + return unsafe _pointer == other._pointer ? Range(_uncheckedBounds: (0, 0)) : nil } let start = _start() - let spanEnd = spanStart + other._count - if spanStart < start || (start + _count) < spanEnd { return nil } - let lower = start.distance(to: spanStart) + let spanEnd = unsafe spanStart + other._count + if unsafe spanStart < start || (start + _count) < spanEnd { return nil } + let lower = unsafe start.distance(to: spanStart) return Range(_uncheckedBounds: (lower, lower &+ other._count)) } } @@ -707,11 +708,11 @@ extension RawSpan { public func _extracting(last maxLength: Int) -> Self { _precondition(maxLength >= 0, "Can't have a suffix of negative length") let newCount = min(maxLength, byteCount) - let newStart = _pointer?.advanced(by: byteCount &- newCount) + let newStart = unsafe _pointer?.advanced(by: byteCount &- newCount) let newSpan = RawSpan(_unchecked: newStart, byteCount: newCount) // As a trivial value, 'newStart' does not formally depend on the // lifetime of 'self'. Make the dependence explicit. - return _overrideLifetime(newSpan, copying: self) + return unsafe _overrideLifetime(newSpan, copying: self) } /// Returns a span over all but the given number of initial bytes. @@ -733,10 +734,10 @@ extension RawSpan { public func _extracting(droppingFirst k: Int) -> Self { _precondition(k >= 0, "Can't drop a negative number of elements") let droppedCount = min(k, byteCount) - let newStart = _pointer?.advanced(by: droppedCount) + let newStart = unsafe _pointer?.advanced(by: droppedCount) let newSpan = RawSpan(_unchecked: newStart, byteCount: byteCount &- droppedCount) // As a trivial value, 'newStart' does not formally depend on the // lifetime of 'self'. Make the dependence explicit. - return _overrideLifetime(newSpan, copying: self) + return unsafe _overrideLifetime(newSpan, copying: self) } } diff --git a/stdlib/public/core/Span/Span.swift b/stdlib/public/core/Span/Span.swift index 7ec8b84e5ce93..2ec10c026d765 100644 --- a/stdlib/public/core/Span/Span.swift +++ b/stdlib/public/core/Span/Span.swift @@ -20,6 +20,7 @@ /// ensuring spcial safety and avoiding buffer overflow errors. @frozen @available(SwiftStdlib 6.1, *) +@safe public struct Span : ~Escapable, Copyable, BitwiseCopyable { @@ -35,7 +36,7 @@ public struct Span @_alwaysEmitIntoClient internal func _start() -> UnsafeRawPointer { - _pointer._unsafelyUnwrappedUnchecked + unsafe _pointer._unsafelyUnwrappedUnchecked } /// The number of elements in this `Span`. @@ -75,7 +76,7 @@ public struct Span _unchecked pointer: UnsafeRawPointer?, count: Int ) { - _pointer = pointer + _pointer = unsafe pointer _count = count } } @@ -100,8 +101,8 @@ extension Span where Element: ~Copyable { _unsafeElements buffer: UnsafeBufferPointer ) { //FIXME: Workaround for https://github.com/swiftlang/swift/issues/77235 - let baseAddress = UnsafeRawPointer(buffer.baseAddress) - _precondition( + let baseAddress = unsafe UnsafeRawPointer(buffer.baseAddress) + unsafe _precondition( ((Int(bitPattern: baseAddress) & (MemoryLayout.alignment &- 1)) == 0), "baseAddress must be properly aligned to access Element" @@ -109,7 +110,7 @@ extension Span where Element: ~Copyable { let span = Span(_unchecked: baseAddress, count: buffer.count) // As a trivial value, 'baseAddress' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `Span` over initialized memory. @@ -129,7 +130,7 @@ extension Span where Element: ~Copyable { let span = Span(_unsafeElements: buf) // As a trivial value, 'buf' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `Span` over initialized memory. @@ -149,11 +150,11 @@ extension Span where Element: ~Copyable { count: Int ) { _precondition(count >= 0, "Count must not be negative") - let buf = UnsafeBufferPointer(start: pointer, count: count) + let buf = unsafe UnsafeBufferPointer(start: pointer, count: count) let span = Span(_unsafeElements: buf) // As a trivial value, 'buf' does not formally depend on the // lifetime of 'pointer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: pointer) + self = unsafe _overrideLifetime(span, borrowing: pointer) } } @@ -173,11 +174,11 @@ extension Span { public init( _unsafeElements buffer: borrowing Slice> ) { - let buf = UnsafeBufferPointer(rebasing: buffer) + let buf = unsafe UnsafeBufferPointer(rebasing: buffer) let span = Span(_unsafeElements: buf) // As a trivial value, 'buf' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `Span` over initialized memory. @@ -193,11 +194,11 @@ extension Span { public init( _unsafeElements buffer: borrowing Slice> ) { - let buf = UnsafeBufferPointer(rebasing: buffer) + let buf = unsafe UnsafeBufferPointer(rebasing: buffer) let span = Span(_unsafeElements: buf) // As a trivial value, 'buf' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } } @@ -223,7 +224,7 @@ extension Span where Element: BitwiseCopyable { ) { //FIXME: Workaround for https://github.com/swiftlang/swift/issues/77235 let baseAddress = buffer.baseAddress - _precondition( + unsafe _precondition( ((Int(bitPattern: baseAddress) & (MemoryLayout.alignment &- 1)) == 0), "baseAddress must be properly aligned to access Element" @@ -236,7 +237,7 @@ extension Span where Element: BitwiseCopyable { let span = Span(_unchecked: baseAddress, count: count) // As a trivial value, 'baseAddress' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `Span` over initialized memory. @@ -260,7 +261,7 @@ extension Span where Element: BitwiseCopyable { let span = Span(_unsafeBytes: rawBuffer) // As a trivial value, 'buf' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `Span` over initialized memory. @@ -284,11 +285,11 @@ extension Span where Element: BitwiseCopyable { byteCount: Int ) { _precondition(byteCount >= 0, "Count must not be negative") - let rawBuffer = UnsafeRawBufferPointer(start: pointer, count: byteCount) + let rawBuffer = unsafe UnsafeRawBufferPointer(start: pointer, count: byteCount) let span = Span(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'pointer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: pointer) + self = unsafe _overrideLifetime(span, borrowing: pointer) } /// Unsafely create a `Span` over initialized memory. @@ -308,11 +309,11 @@ extension Span where Element: BitwiseCopyable { public init( _unsafeBytes buffer: borrowing Slice ) { - let rawBuffer = UnsafeRawBufferPointer(rebasing: buffer) + let rawBuffer = unsafe UnsafeRawBufferPointer(rebasing: buffer) let span = Span(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Unsafely create a `Span` over initialized memory. @@ -332,11 +333,11 @@ extension Span where Element: BitwiseCopyable { public init( _unsafeBytes buffer: borrowing Slice ) { - let rawBuffer = UnsafeRawBufferPointer(rebasing: buffer) + let rawBuffer = unsafe UnsafeRawBufferPointer(rebasing: buffer) let span = Span(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - self = _overrideLifetime(span, borrowing: buffer) + self = unsafe _overrideLifetime(span, borrowing: buffer) } /// Create a `Span` over the bytes represented by a `RawSpan` @@ -348,11 +349,11 @@ extension Span where Element: BitwiseCopyable { @lifetime(bytes) public init(_bytes bytes: consuming RawSpan) { let rawBuffer = - UnsafeRawBufferPointer(start: bytes._pointer, count: bytes.byteCount) + unsafe UnsafeRawBufferPointer(start: bytes._pointer, count: bytes.byteCount) let span = Span(_unsafeBytes: rawBuffer) // As a trivial value, 'rawBuffer' does not formally depend on the // lifetime of 'bytes'. Make the dependence explicit. - self = _overrideLifetime(span, copying: bytes) + self = unsafe _overrideLifetime(span, copying: bytes) } } @@ -401,7 +402,7 @@ extension Span where Element: ~Copyable { //FIXME: change to unsafeRawAddress when ready unsafeAddress { _precondition(indices.contains(position), "Index out of bounds") - return _unsafeAddressOfElement(unchecked: position) + return unsafe _unsafeAddressOfElement(unchecked: position) } } @@ -419,7 +420,7 @@ extension Span where Element: ~Copyable { public subscript(unchecked position: Index) -> Element { //FIXME: change to unsafeRawAddress when ready unsafeAddress { - _unsafeAddressOfElement(unchecked: position) + unsafe _unsafeAddressOfElement(unchecked: position) } } @@ -429,8 +430,8 @@ extension Span where Element: ~Copyable { unchecked position: Index ) -> UnsafePointer { let elementOffset = position &* MemoryLayout.stride - let address = _start().advanced(by: elementOffset) - return address.assumingMemoryBound(to: Element.self) + let address = unsafe _start().advanced(by: elementOffset) + return unsafe address.assumingMemoryBound(to: Element.self) } } @@ -450,7 +451,7 @@ extension Span where Element: BitwiseCopyable { UInt(bitPattern: position) < UInt(bitPattern: _count), "Index out of bounds" ) - return self[unchecked: position] + return unsafe self[unchecked: position] } } @@ -468,8 +469,8 @@ extension Span where Element: BitwiseCopyable { public subscript(unchecked position: Index) -> Element { get { let elementOffset = position &* MemoryLayout.stride - let address = _start().advanced(by: elementOffset) - return address.loadUnaligned(as: Element.self) + let address = unsafe _start().advanced(by: elementOffset) + return unsafe address.loadUnaligned(as: Element.self) } } } @@ -499,7 +500,7 @@ extension Span where Element: ~Copyable { UInt(bitPattern: bounds.upperBound) <= UInt(bitPattern: _count), "Index range out of bounds" ) - return _extracting(unchecked: bounds) + return unsafe _extracting(unchecked: bounds) } /// Constructs a new span over the items within the supplied range of @@ -522,11 +523,11 @@ extension Span where Element: ~Copyable { @lifetime(self) public func _extracting(unchecked bounds: Range) -> Self { let delta = bounds.lowerBound &* MemoryLayout.stride - let newStart = _pointer?.advanced(by: delta) + let newStart = unsafe _pointer?.advanced(by: delta) let newSpan = Span(_unchecked: newStart, count: bounds.count) // As a trivial value, 'newStart' does not formally depend on the // lifetime of 'self'. Make the dependence explicit. - return _overrideLifetime(newSpan, copying: self) + return unsafe _overrideLifetime(newSpan, copying: self) } /// Constructs a new span over the items within the supplied range of @@ -569,7 +570,7 @@ extension Span where Element: ~Copyable { public func _extracting( unchecked bounds: some RangeExpression ) -> Self { - _extracting(unchecked: bounds.relative(to: indices)) + unsafe _extracting(unchecked: bounds.relative(to: indices)) } /// Constructs a new span over all the items of this span. @@ -609,13 +610,13 @@ extension Span where Element: ~Copyable { _ body: (_ buffer: UnsafeBufferPointer) throws(E) -> Result ) throws(E) -> Result { guard let pointer = _pointer else { - return try body(.init(start: nil, count: 0)) + return try unsafe body(.init(start: nil, count: 0)) } let binding = Builtin.bindMemory( pointer._rawValue, count._builtinWordValue, Element.self ) defer { Builtin.rebindMemory(pointer._rawValue, binding) } - return try body(.init(start: .init(pointer._rawValue), count: count)) + return try unsafe body(.init(start: .init(pointer._rawValue), count: count)) } } @@ -640,7 +641,7 @@ extension Span where Element: BitwiseCopyable { public func withUnsafeBytes( _ body: (_ buffer: UnsafeRawBufferPointer) throws(E) -> Result ) throws(E) -> Result { - try body( + try unsafe body( .init(start: _pointer, count: _count * MemoryLayout.stride) ) } @@ -652,7 +653,7 @@ extension Span where Element: ~Copyable { /// refer to the same region in memory. @_alwaysEmitIntoClient public func isIdentical(to other: Self) -> Bool { - (self._pointer == other._pointer) && (self._count == other._count) + unsafe (self._pointer == other._pointer) && (self._count == other._count) } /// Returns the indices within `self` where the memory represented by `span` @@ -665,13 +666,13 @@ extension Span where Element: ~Copyable { public func indices(of other: borrowing Self) -> Range? { if other._count > _count { return nil } guard let spanStart = other._pointer, _count > 0 else { - return _pointer == other._pointer ? Range(_uncheckedBounds: (0, 0)) : nil + return unsafe _pointer == other._pointer ? Range(_uncheckedBounds: (0, 0)) : nil } let start = _start() let stride = MemoryLayout.stride - let spanEnd = spanStart + stride &* other._count - if spanStart < start || spanEnd > (start + stride &* _count) { return nil } - let byteOffset = start.distance(to: spanStart) + let spanEnd = unsafe spanStart + stride &* other._count + if unsafe spanStart < start || spanEnd > (start + stride &* _count) { return nil } + let byteOffset = unsafe start.distance(to: spanStart) let (lower, r) = byteOffset.quotientAndRemainder(dividingBy: stride) guard r == 0 else { return nil } return Range(_uncheckedBounds: (lower, lower &+ other._count)) @@ -748,11 +749,11 @@ extension Span where Element: ~Copyable { _precondition(maxLength >= 0, "Can't have a suffix of negative length") let newCount = min(maxLength, count) let offset = (count &- newCount) * MemoryLayout.stride - let newStart = _pointer?.advanced(by: offset) + let newStart = unsafe _pointer?.advanced(by: offset) let newSpan = Span(_unchecked: newStart, count: newCount) // As a trivial value, 'newStart' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - return _overrideLifetime(newSpan, copying: self) + return unsafe _overrideLifetime(newSpan, copying: self) } /// Returns a span over all but the given number of initial elements. @@ -775,10 +776,10 @@ extension Span where Element: ~Copyable { _precondition(k >= 0, "Can't drop a negative number of elements") let droppedCount = min(k, count) let offset = droppedCount * MemoryLayout.stride - let newStart = _pointer?.advanced(by: offset) + let newStart = unsafe _pointer?.advanced(by: offset) let newSpan = Span(_unchecked: newStart, count: count &- droppedCount) // As a trivial value, 'newStart' does not formally depend on the // lifetime of 'buffer'. Make the dependence explicit. - return _overrideLifetime(newSpan, copying: self) + return unsafe _overrideLifetime(newSpan, copying: self) } } diff --git a/stdlib/public/core/StaticBigInt.swift b/stdlib/public/core/StaticBigInt.swift index 4112d9ffb68f0..36fbde06e671a 100644 --- a/stdlib/public/core/StaticBigInt.swift +++ b/stdlib/public/core/StaticBigInt.swift @@ -150,11 +150,11 @@ extension StaticBigInt: CustomDebugStringConvertible { // Equivalent to `ceil(bitWidthExcludingSignBit / fourBitsPerHexDigit)`. // Underestimated for `-(16 ** y)` values (e.g. "-0x1", "-0x10", "-0x100"). let capacity = indicator.utf8.count + (((bitWidth - 1) + 3) / 4) - var result = String(unsafeUninitializedCapacity: capacity) { utf8 in + var result = unsafe String(unsafeUninitializedCapacity: capacity) { utf8 in // Pre-initialize with zeros, ignoring extra capacity. - var utf8 = utf8.prefix(capacity) - utf8.initialize(repeating: UInt8(ascii: "0")) + var utf8 = unsafe utf8.prefix(capacity) + unsafe utf8.initialize(repeating: UInt8(ascii: "0")) // Use a 32-bit element type, to generate small hexadecimal strings. typealias Element = UInt32 @@ -176,8 +176,8 @@ extension StaticBigInt: CustomDebugStringConvertible { // Overwrite trailing zeros with hexadecimal digits. let hexDigits = String(element, radix: 16, uppercase: true).utf8 - _ = utf8.suffix(hexDigits.count).update(fromContentsOf: hexDigits) - utf8 = utf8.dropLast(hexDigitsPerElement) + _ = unsafe utf8.suffix(hexDigits.count).update(fromContentsOf: hexDigits) + unsafe utf8 = unsafe utf8.dropLast(hexDigitsPerElement) } return capacity } diff --git a/stdlib/public/core/StaticString.swift b/stdlib/public/core/StaticString.swift index f84587e92ee5a..bf01fb9557339 100644 --- a/stdlib/public/core/StaticString.swift +++ b/stdlib/public/core/StaticString.swift @@ -133,7 +133,7 @@ public struct StaticString: Sendable { _precondition( hasPointerRepresentation, "StaticString should have pointer representation") - return UnsafePointer(bitPattern: UInt(_startPtrOrData))! + return unsafe UnsafePointer(bitPattern: UInt(_startPtrOrData))! } /// A single Unicode scalar value. @@ -198,17 +198,18 @@ public struct StaticString: Sendable { /// duration of the method's execution. /// - Returns: The return value, if any, of the `body` closure. @_transparent + @safe public func withUTF8Buffer( _ body: (UnsafeBufferPointer) -> R ) -> R { if hasPointerRepresentation { - return body(UnsafeBufferPointer( + return unsafe body(UnsafeBufferPointer( start: utf8Start, count: utf8CodeUnitCount)) } else { #if $Embedded fatalError("non-pointer representation not supported in embedded Swift") #else - return unicodeScalar.withUTF8CodeUnits { body($0) } + return unicodeScalar.withUTF8CodeUnits { unsafe body($0) } #endif } } @@ -300,7 +301,7 @@ extension StaticString: CustomStringConvertible { /// A textual representation of the static string. public var description: String { - return withUTF8Buffer { String._uncheckedFromUTF8($0) } + return withUTF8Buffer { unsafe String._uncheckedFromUTF8($0) } } } diff --git a/stdlib/public/core/String.swift b/stdlib/public/core/String.swift index aca5fde7dbe86..b4132341e8985 100644 --- a/stdlib/public/core/String.swift +++ b/stdlib/public/core/String.swift @@ -424,9 +424,9 @@ extension String { C: Collection >(_ input: C) -> (result: String, repairsMade: Bool) { _internalInvariant(C.Element.self == UInt8.self) - return Array(input).withUnsafeBufferPointer { - UnsafeRawBufferPointer($0).withMemoryRebound(to: UInt8.self) { - String._fromUTF8Repairing($0) + return unsafe Array(input).withUnsafeBufferPointer { + unsafe UnsafeRawBufferPointer($0).withMemoryRebound(to: UInt8.self) { + unsafe String._fromUTF8Repairing($0) } } } @@ -460,7 +460,7 @@ extension String { (buffer: UnsafeBufferPointer) -> String in Builtin.onFastPath() // encourage SIL Optimizer to inline this closure :-( let rawBufPtr = UnsafeRawBufferPointer(buffer) - return String._fromUTF8Repairing( + return unsafe String._fromUTF8Repairing( UnsafeBufferPointer( start: rawBufPtr.baseAddress?.assumingMemoryBound(to: UInt8.self), count: rawBufPtr.count)).0 @@ -476,7 +476,7 @@ extension String { { self = contigBytes.withUnsafeBytes { rawBufPtr in Builtin.onFastPath() // encourage SIL Optimizer to inline this closure - return String._fromUTF8Repairing( + return unsafe String._fromUTF8Repairing( UnsafeBufferPointer( start: rawBufPtr.baseAddress?.assumingMemoryBound(to: UInt8.self), count: rawBufPtr.count)).0 @@ -519,7 +519,7 @@ extension String { as encoding: Encoding.Type ) { let contiguousResult = codeUnits.withContiguousStorageIfAvailable { - String._validate($0, as: Encoding.self) + unsafe String._validate($0, as: Encoding.self) } if let validationResult = contiguousResult { guard let validatedString = validationResult else { @@ -545,8 +545,8 @@ extension String { } ) if error { return nil } - self = transcoded.withUnsafeBufferPointer{ - String._uncheckedFromUTF8($0, asciiPreScanResult: isASCII) + self = unsafe transcoded.withUnsafeBufferPointer{ + unsafe String._uncheckedFromUTF8($0, asciiPreScanResult: isASCII) } } @@ -581,8 +581,8 @@ extension String { as encoding: Encoding.Type ) where Encoding: Unicode.Encoding, Encoding.CodeUnit == UInt8 { let contiguousResult = codeUnits.withContiguousStorageIfAvailable { - $0.withMemoryRebound(to: UInt8.self) { - String._validate($0, as: Encoding.self) + unsafe $0.withMemoryRebound(to: UInt8.self) { + unsafe String._validate($0, as: Encoding.self) } } if let validationResult = contiguousResult { @@ -649,7 +649,7 @@ extension String { _ buffer: UnsafeMutableBufferPointer ) throws -> Int ) rethrows { - self = try String( + self = try unsafe String( _uninitializedCapacity: capacity, initializingUTF8With: initializer ) @@ -663,20 +663,20 @@ extension String { ) throws -> Int ) rethrows { if _fastPath(capacity <= _SmallString.capacity) { - let smol = try _SmallString(initializingUTF8With: { - try initializer(.init(start: $0.baseAddress, count: capacity)) + let smol = try unsafe _SmallString(initializingUTF8With: { + try unsafe initializer(.init(start: $0.baseAddress, count: capacity)) }) // Fast case where we fit in a _SmallString and don't need UTF8 validation if _fastPath(smol.isASCII) { self = String(_StringGuts(smol)) } else { // We succeeded in making a _SmallString, but may need to repair UTF8 - self = smol.withUTF8 { String._fromUTF8Repairing($0).result } + self = smol.withUTF8 { unsafe String._fromUTF8Repairing($0).result } } return } - self = try String._fromLargeUTF8Repairing( + self = try unsafe String._fromLargeUTF8Repairing( uninitializedCapacity: capacity, initializingWith: initializer) } @@ -698,7 +698,7 @@ extension String { public func withCString( _ body: (UnsafePointer) throws -> Result ) rethrows -> Result { - return try _guts.withCString(body) + return try unsafe _guts.withCString(body) } /// Calls the given closure with a pointer to the contents of the string, @@ -724,15 +724,15 @@ extension String { _ body: (UnsafePointer) throws -> Result ) rethrows -> Result { if targetEncoding == UTF8.self { - return try self.withCString { + return try unsafe self.withCString { (cPtr: UnsafePointer) -> Result in _internalInvariant(UInt8.self == TargetEncoding.CodeUnit.self) - let ptr = UnsafeRawPointer(cPtr).assumingMemoryBound( + let ptr = unsafe UnsafeRawPointer(cPtr).assumingMemoryBound( to: TargetEncoding.CodeUnit.self) - return try body(ptr) + return try unsafe body(ptr) } } - return try _slowWithCString(encodedAs: targetEncoding, body) + return try unsafe _slowWithCString(encodedAs: targetEncoding, body) } @usableFromInline @inline(never) // slow-path @@ -745,7 +745,7 @@ extension String { return try copy.withUTF8 { utf8 in var arg = Array() arg.reserveCapacity(1 &+ self._guts.count / 4) - let repaired = transcode( + let repaired = unsafe transcode( utf8.makeIterator(), from: UTF8.self, to: targetEncoding, @@ -753,7 +753,7 @@ extension String { into: { arg.append($0) }) arg.append(TargetEncoding.CodeUnit(0)) _internalInvariant(!repaired) - return try body(arg) + return try unsafe body(arg) } } } @@ -767,7 +767,7 @@ extension String: _ExpressibleByBuiltinUnicodeScalarLiteral { @inlinable @inline(__always) public init(_ scalar: Unicode.Scalar) { - self = scalar.withUTF8CodeUnits { String._uncheckedFromUTF8($0) } + self = scalar.withUTF8CodeUnits { unsafe String._uncheckedFromUTF8($0) } } } @@ -794,14 +794,14 @@ extension String: _ExpressibleByBuiltinStringLiteral { utf8CodeUnitCount: Builtin.Word, isASCII: Builtin.Int1 ) { - let bufPtr = UnsafeBufferPointer( + let bufPtr = unsafe UnsafeBufferPointer( start: UnsafeRawPointer(start).assumingMemoryBound(to: UInt8.self), count: Int(utf8CodeUnitCount)) - if let smol = _SmallString(bufPtr) { + if let smol = unsafe _SmallString(bufPtr) { self = String(_StringGuts(smol)) return } - self.init(_StringGuts(bufPtr, isASCII: Bool(isASCII))) + unsafe self.init(_StringGuts(bufPtr, isASCII: Bool(isASCII))) } } @@ -1025,10 +1025,10 @@ extension String { @_effects(releasenone) public func lowercased() -> String { if _fastPath(_guts.isFastASCII) { - return _guts.withFastUTF8 { utf8 in - return String(_uninitializedCapacity: utf8.count) { buffer in + return unsafe _guts.withFastUTF8 { utf8 in + return unsafe String(_uninitializedCapacity: utf8.count) { buffer in for i in 0 ..< utf8.count { - buffer[i] = _lowercaseASCII(utf8[i]) + unsafe buffer[i] = unsafe _lowercaseASCII(utf8[i]) } return utf8.count } @@ -1059,10 +1059,10 @@ extension String { @_effects(releasenone) public func uppercased() -> String { if _fastPath(_guts.isFastASCII) { - return _guts.withFastUTF8 { utf8 in - return String(_uninitializedCapacity: utf8.count) { buffer in + return unsafe _guts.withFastUTF8 { utf8 in + return unsafe String(_uninitializedCapacity: utf8.count) { buffer in for i in 0 ..< utf8.count { - buffer[i] = _uppercaseASCII(utf8[i]) + unsafe buffer[i] = unsafe _uppercaseASCII(utf8[i]) } return utf8.count } @@ -1149,8 +1149,8 @@ extension _StringGutsSlice { // Because we have access to the fastUTF8, we can go through that instead // of accessing the UTF8 view on String. if isNFCQC { - try withFastUTF8 { - for byte in $0 { + try unsafe withFastUTF8 { + for unsafe byte in unsafe $0 { try f(byte) } } @@ -1176,7 +1176,7 @@ extension _StringGutsSlice { for scalar in substring.unicodeScalars._internalNFC { try scalar.withUTF8CodeUnits { - for byte in $0 { + for unsafe byte in unsafe $0 { try f(byte) } } @@ -1184,16 +1184,16 @@ extension _StringGutsSlice { } internal func _fastNFCCheck(_ isNFCQC: inout Bool, _ prevCCC: inout UInt8) { - withFastUTF8 { utf8 in + unsafe withFastUTF8 { utf8 in var position = 0 while position < utf8.count { // If our first byte is less than 0xCC, then it means we're under the // 0x300 scalar value and everything up to 0x300 is NFC already. - if utf8[position] < 0xCC { + if unsafe utf8[position] < 0xCC { // If our first byte is less than 0xC0, then it means it is ASCII // and only takes up a single byte. - if utf8[position] < 0xC0 { + if unsafe utf8[position] < 0xC0 { position &+= 1 } else { // Otherwise, this is a 2 byte < 0x300 sequence. @@ -1205,7 +1205,7 @@ extension _StringGutsSlice { continue } - let (scalar, len) = _decodeScalar(utf8, startingAt: position) + let (scalar, len) = unsafe _decodeScalar(utf8, startingAt: position) if !_isScalarNFCQC(scalar, &prevCCC) { isNFCQC = false diff --git a/stdlib/public/core/StringBridge.swift b/stdlib/public/core/StringBridge.swift index d3cf810c9f738..cb4b53f4d6e93 100644 --- a/stdlib/public/core/StringBridge.swift +++ b/stdlib/public/core/StringBridge.swift @@ -76,12 +76,12 @@ internal typealias _CocoaString = AnyObject */ @inline(__always) private func _objc(_ str: _CocoaString) -> _StringSelectorHolder { - return unsafeBitCast(str, to: _StringSelectorHolder.self) + return unsafe unsafeBitCast(str, to: _StringSelectorHolder.self) } @_effects(releasenone) private func _copyNSString(_ str: _StringSelectorHolder) -> _CocoaString { - return str.copy(with: nil) + return unsafe str.copy(with: nil) } @usableFromInline // @testable @@ -112,7 +112,7 @@ internal func _isNSString(_ str:AnyObject) -> Bool { @_effects(readonly) private func _NSStringCharactersPtr(_ str: _StringSelectorHolder) -> UnsafeMutablePointer? { - return UnsafeMutablePointer(mutating: str._fastCharacterContents()) + return unsafe UnsafeMutablePointer(mutating: str._fastCharacterContents()) } @usableFromInline // @testable @@ -120,7 +120,7 @@ private func _NSStringCharactersPtr(_ str: _StringSelectorHolder) -> UnsafeMutab internal func _stdlib_binary_CFStringGetCharactersPtr( _ source: _CocoaString ) -> UnsafeMutablePointer? { - return _NSStringCharactersPtr(_objc(source)) + return unsafe _NSStringCharactersPtr(_objc(source)) } @_effects(releasenone) @@ -129,7 +129,7 @@ private func _NSStringGetCharacters( range: Range, into destination: UnsafeMutablePointer ) { - source.getCharacters(destination, range: _SwiftNSRange( + unsafe source.getCharacters(destination, range: _SwiftNSRange( location: range.startIndex, length: range.count) ) @@ -143,7 +143,7 @@ internal func _cocoaStringCopyCharacters( range: Range, into destination: UnsafeMutablePointer ) { - _NSStringGetCharacters(from: _objc(source), range: range, into: destination) + unsafe _NSStringGetCharacters(from: _objc(source), range: range, into: destination) } @_effects(readonly) @@ -166,11 +166,11 @@ private func _NSStringCopyBytes( encoding: UInt, into bufPtr: UnsafeMutableRawBufferPointer ) -> Int? { - let ptr = bufPtr.baseAddress._unsafelyUnwrappedUnchecked + let ptr = unsafe bufPtr.baseAddress._unsafelyUnwrappedUnchecked let len = o.length var remainingRange = _SwiftNSRange(location: 0, length: 0) var usedLen = 0 - let success = 0 != o.getBytes( + let success = unsafe 0 != o.getBytes( ptr, maxLength: bufPtr.count, usedLength: &usedLen, @@ -190,7 +190,7 @@ internal func _cocoaStringCopyUTF8( _ target: _CocoaString, into bufPtr: UnsafeMutableRawBufferPointer ) -> Int? { - return _NSStringCopyBytes( + return unsafe _NSStringCopyBytes( _objc(target), encoding: _cocoaUTF8Encoding, into: bufPtr @@ -202,7 +202,7 @@ internal func _cocoaStringCopyASCII( _ target: _CocoaString, into bufPtr: UnsafeMutableRawBufferPointer ) -> Int? { - return _NSStringCopyBytes( + return unsafe _NSStringCopyBytes( _objc(target), encoding: _cocoaASCIIEncoding, into: bufPtr @@ -216,7 +216,7 @@ private func _NSStringUTF8Count( ) -> Int? { var remainingRange = _SwiftNSRange(location: 0, length: 0) var usedLen = 0 - let success = 0 != o.getBytes( + let success = unsafe 0 != o.getBytes( UnsafeMutableRawPointer(Builtin.inttoptr_Word(0._builtinWordValue)), maxLength: 0, usedLength: &usedLen, @@ -267,7 +267,7 @@ internal func _cocoaHashString( internal func _cocoaHashASCIIBytes( _ bytes: UnsafePointer, length: Int ) -> UInt { - return _swift_stdlib_CFStringHashCString(bytes, length) + return unsafe _swift_stdlib_CFStringHashCString(bytes, length) } // These "trampolines" are effectively objc_msgSend_super. @@ -277,7 +277,7 @@ internal func _cocoaHashASCIIBytes( internal func _cocoaCStringUsingEncodingTrampoline( _ string: _CocoaString, _ encoding: UInt ) -> UnsafePointer? { - return _swift_stdlib_NSStringCStringUsingEncodingTrampoline(string, encoding) + return unsafe _swift_stdlib_NSStringCStringUsingEncodingTrampoline(string, encoding) } @_effects(releasenone) @@ -287,7 +287,7 @@ internal func _cocoaGetCStringTrampoline( _ maxLength: Int, _ encoding: UInt ) -> Int8 { - return Int8(_swift_stdlib_NSStringGetCStringTrampoline( + return unsafe Int8(_swift_stdlib_NSStringGetCStringTrampoline( string, buffer, maxLength, encoding)) } @@ -321,10 +321,10 @@ internal enum _KnownCocoaString { } #endif - switch unsafeBitCast(_swift_classOfObjCHeapObject(str), to: UInt.self) { - case unsafeBitCast(__StringStorage.self, to: UInt.self): + switch unsafe unsafeBitCast(_swift_classOfObjCHeapObject(str), to: UInt.self) { + case unsafe unsafeBitCast(__StringStorage.self, to: UInt.self): self = .storage - case unsafeBitCast(__SharedStringStorage.self, to: UInt.self): + case unsafe unsafeBitCast(__SharedStringStorage.self, to: UInt.self): self = .shared default: self = .cocoa @@ -343,7 +343,7 @@ internal func _bridgeTagged( intoUTF8 bufPtr: UnsafeMutableRawBufferPointer ) -> Int? { _internalInvariant(_isObjCTaggedPointer(cocoa)) - return _cocoaStringCopyUTF8(cocoa, into: bufPtr) + return unsafe _cocoaStringCopyUTF8(cocoa, into: bufPtr) } @_effects(releasenone) // @opaque @@ -352,7 +352,7 @@ internal func _bridgeTaggedASCII( intoUTF8 bufPtr: UnsafeMutableRawBufferPointer ) -> Int? { _internalInvariant(_isObjCTaggedPointer(cocoa)) - return _cocoaStringCopyASCII(cocoa, into: bufPtr) + return unsafe _cocoaStringCopyASCII(cocoa, into: bufPtr) } #endif @@ -360,20 +360,20 @@ internal func _bridgeTaggedASCII( private func _NSStringASCIIPointer(_ str: _StringSelectorHolder) -> UnsafePointer? { //TODO(String bridging): Unconditionally asking for nul-terminated contents is // overly conservative and hurts perf with some NSStrings - return str._fastCStringContents(1)?._asUInt8 + return unsafe str._fastCStringContents(1)?._asUInt8 } @_effects(readonly) private func _NSStringUTF8Pointer(_ str: _StringSelectorHolder) -> UnsafePointer? { //We don't have a way to ask for UTF8 here currently - return _NSStringASCIIPointer(str) + return unsafe _NSStringASCIIPointer(str) } @_effects(readonly) internal func _getNSCFConstantStringContentsPointer( _ cocoa: AnyObject ) -> UnsafePointer { - return unsafeBitCast( + return unsafe unsafeBitCast( cocoa, to: UnsafePointer<_swift_shims_builtin_CFString>.self ).pointee.str @@ -391,15 +391,15 @@ private func _withCocoaASCIIPointer( return nil // tagged pointer strings don't support _fastCStringContents } if let smol = _SmallString(taggedASCIICocoa: str) { - return _StringGuts(smol).withFastUTF8 { - work($0.baseAddress._unsafelyUnwrappedUnchecked) + return unsafe _StringGuts(smol).withFastUTF8 { + unsafe work($0.baseAddress._unsafelyUnwrappedUnchecked) } } } #endif defer { _fixLifetime(str) } - if let ptr = _NSStringASCIIPointer(_objc(str)) { - return work(ptr) + if let ptr = unsafe _NSStringASCIIPointer(_objc(str)) { + return unsafe work(ptr) } return nil } @@ -416,15 +416,15 @@ private func _withCocoaUTF8Pointer( return nil // tagged pointer strings don't support _fastCStringContents } if let smol = _SmallString(taggedCocoa: str) { - return _StringGuts(smol).withFastUTF8 { - work($0.baseAddress._unsafelyUnwrappedUnchecked) + return unsafe _StringGuts(smol).withFastUTF8 { + unsafe work($0.baseAddress._unsafelyUnwrappedUnchecked) } } } #endif defer { _fixLifetime(str) } - if let ptr = _NSStringUTF8Pointer(_objc(str)) { - return work(ptr) + if let ptr = unsafe _NSStringUTF8Pointer(_objc(str)) { + return unsafe work(ptr) } return nil } @@ -434,7 +434,7 @@ internal func withCocoaASCIIPointer( _ str: _CocoaString, work: (UnsafePointer) -> R? ) -> R? { - return _withCocoaASCIIPointer(str, requireStableAddress: false, work: work) + return unsafe _withCocoaASCIIPointer(str, requireStableAddress: false, work: work) } @_effects(readonly) // @opaque @@ -442,21 +442,22 @@ internal func withCocoaUTF8Pointer( _ str: _CocoaString, work: (UnsafePointer) -> R? ) -> R? { - return _withCocoaUTF8Pointer(str, requireStableAddress: false, work: work) + return unsafe _withCocoaUTF8Pointer(str, requireStableAddress: false, work: work) } @_effects(readonly) internal func stableCocoaASCIIPointer(_ str: _CocoaString) -> UnsafePointer? { - return _withCocoaASCIIPointer(str, requireStableAddress: true, work: { $0 }) + return unsafe _withCocoaASCIIPointer(str, requireStableAddress: true, work: { unsafe $0 }) } @_effects(readonly) internal func stableCocoaUTF8Pointer(_ str: _CocoaString) -> UnsafePointer? { - return _withCocoaUTF8Pointer(str, requireStableAddress: true, work: { $0 }) + return unsafe _withCocoaUTF8Pointer(str, requireStableAddress: true, work: { unsafe $0 }) } +@unsafe private enum CocoaStringPointer { case ascii(UnsafePointer) case utf8(UnsafePointer) @@ -468,12 +469,12 @@ private enum CocoaStringPointer { private func _getCocoaStringPointer( _ cfImmutableValue: _CocoaString ) -> CocoaStringPointer { - if let ascii = stableCocoaASCIIPointer(cfImmutableValue) { - return .ascii(ascii) + if let ascii = unsafe stableCocoaASCIIPointer(cfImmutableValue) { + return unsafe .ascii(ascii) } // We could ask for UTF16 here via _stdlib_binary_CFStringGetCharactersPtr, // but we currently have no use for it - return .none + return unsafe .none } #if !$Embedded @@ -482,10 +483,10 @@ private func _getCocoaStringPointer( internal func _bridgeCocoaString(_ cocoaString: _CocoaString) -> _StringGuts { switch _KnownCocoaString(cocoaString) { case .storage: - return _unsafeUncheckedDowncast( + return unsafe _unsafeUncheckedDowncast( cocoaString, to: __StringStorage.self).asString._guts case .shared: - return _unsafeUncheckedDowncast( + return unsafe _unsafeUncheckedDowncast( cocoaString, to: __SharedStringStorage.self).asString._guts #if _pointerBitWidth(_64) case .tagged: @@ -512,14 +513,14 @@ internal func _bridgeCocoaString(_ cocoaString: _CocoaString) -> _StringGuts { if _isObjCTaggedPointer(immutableCopy) { // Copying a tagged pointer can produce a tagged pointer, but only if it's // small enough to definitely fit in a _SmallString - return _StringGuts( + return unsafe _StringGuts( _SmallString(taggedCocoa: immutableCopy).unsafelyUnwrapped ) } #endif let (fastUTF8, isASCII): (Bool, Bool) - switch _getCocoaStringPointer(immutableCopy) { + switch unsafe _getCocoaStringPointer(immutableCopy) { case .ascii(_): (fastUTF8, isASCII) = (true, true) case .utf8(_): (fastUTF8, isASCII) = (true, false) default: (fastUTF8, isASCII) = (false, false) @@ -574,7 +575,7 @@ private func _createNSString( _ count: Int, _ encoding: UInt32 ) -> AnyObject? { - return receiver.createTaggedString(bytes: ptr, count: count) + return unsafe receiver.createTaggedString(bytes: ptr, count: count) } @_effects(releasenone) @@ -583,7 +584,7 @@ private func _createCFString( _ count: Int, _ encoding: UInt32 ) -> AnyObject? { - return _createNSString( + return unsafe _createNSString( unsafeBitCast(__StringStorage.self as AnyClass, to: _StringSelectorHolder.self), ptr, count, @@ -602,7 +603,7 @@ extension String { // Smol ASCII a) may bridge to tagged pointers, b) can't contain a BOM if _guts.isSmallASCII { let maybeTagged = _guts.asSmall.withUTF8 { bufPtr in - return _createCFString( + return unsafe _createCFString( bufPtr.baseAddress._unsafelyUnwrappedUnchecked, bufPtr.count, kCFStringEncodingUTF8 @@ -625,7 +626,7 @@ extension String { // TODO: We'd rather emit a valid ObjC object statically than create a // shared string class instance. let gutsCountAndFlags = _guts._object._countAndFlags - return __SharedStringStorage( + return unsafe __SharedStringStorage( immortal: _guts._object.fastUTF8.baseAddress!, countAndFlags: _StringObject.CountAndFlags( sharedCount: _guts.count, isASCII: gutsCountAndFlags.isASCII)) @@ -648,17 +649,17 @@ internal func _SwiftCreateBridgedString_DoNotCall( length: Int, encoding: _swift_shims_CFStringEncoding ) -> Unmanaged { - let bufPtr = UnsafeBufferPointer(start: bytes, count: length) + let bufPtr = unsafe UnsafeBufferPointer(start: bytes, count: length) let str:String switch encoding { case kCFStringEncodingUTF8: - str = String(decoding: bufPtr, as: Unicode.UTF8.self) + str = unsafe String(decoding: bufPtr, as: Unicode.UTF8.self) case kCFStringEncodingASCII: - str = String(decoding: bufPtr, as: Unicode.ASCII.self) + str = unsafe String(decoding: bufPtr, as: Unicode.ASCII.self) default: fatalError("Unsupported encoding in shim") } - return Unmanaged.passRetained(str._bridgeToObjectiveCImpl()) + return unsafe Unmanaged.passRetained(str._bridgeToObjectiveCImpl()) } @available(SwiftStdlib 6.1, *) @@ -666,9 +667,9 @@ internal func _SwiftCreateBridgedString_DoNotCall( buffer: UnsafeBufferPointer, isASCII: Bool ) -> String? { - switch validateUTF8(buffer) { + switch unsafe validateUTF8(buffer) { case .success(let extraInfo): - return String(_StringGuts(buffer, isASCII: extraInfo.isASCII)) + return unsafe String(_StringGuts(buffer, isASCII: extraInfo.isASCII)) default: return nil } @@ -701,7 +702,7 @@ public func _getDescription(_ x: T) -> AnyObject { @available(SwiftStdlib 5.2, *) internal func _NSStringFromUTF8(_ s: UnsafePointer, _ len: Int) -> AnyObject { - return String( + return unsafe String( decoding: UnsafeBufferPointer(start: s, count: len), as: UTF8.self )._bridgeToObjectiveCImpl() @@ -737,31 +738,31 @@ extension StringProtocol { public // SPI(Foundation) func _toUTF16Offsets(_ indices: Range) -> Range { if Self.self == String.self { - let s = unsafeBitCast(self, to: String.self) + let s = unsafe unsafeBitCast(self, to: String.self) return s.utf16._offsetRange(for: indices, from: s.startIndex) } if Self.self == Substring.self { - let s = unsafeBitCast(self, to: Substring.self) + let s = unsafe unsafeBitCast(self, to: Substring.self) return s._slice._base.utf16._offsetRange(for: indices, from: s.startIndex) } let startOffset = _toUTF16Offset(indices.lowerBound) let endOffset = _toUTF16Offset(indices.upperBound) - return Range(uncheckedBounds: (lower: startOffset, upper: endOffset)) + return unsafe Range(uncheckedBounds: (lower: startOffset, upper: endOffset)) } public // SPI(Foundation) func _toUTF16Indices(_ range: Range) -> Range { if Self.self == String.self { - let s = unsafeBitCast(self, to: String.self) + let s = unsafe unsafeBitCast(self, to: String.self) return s.utf16._indexRange(for: range, from: s.startIndex) } if Self.self == Substring.self { - let s = unsafeBitCast(self, to: Substring.self) + let s = unsafe unsafeBitCast(self, to: Substring.self) return s._slice._base.utf16._indexRange(for: range, from: s.startIndex) } let lowerbound = _toUTF16Index(range.lowerBound) let upperbound = _toUTF16Index(range.upperBound) - return Range(uncheckedBounds: (lower: lowerbound, upper: upperbound)) + return unsafe Range(uncheckedBounds: (lower: lowerbound, upper: upperbound)) } } @@ -773,6 +774,6 @@ extension String { ) { _internalInvariant(buffer.count >= range.count) let indexRange = self._toUTF16Indices(range) - self.utf16._nativeCopy(into: buffer, alignedRange: indexRange) + unsafe self.utf16._nativeCopy(into: buffer, alignedRange: indexRange) } } diff --git a/stdlib/public/core/StringComparison.swift b/stdlib/public/core/StringComparison.swift index d46383f94f1e6..d218b63ad0292 100644 --- a/stdlib/public/core/StringComparison.swift +++ b/stdlib/public/core/StringComparison.swift @@ -53,9 +53,9 @@ internal func _stringCompareInternal( } let isNFC = lhs.isNFC && rhs.isNFC - return lhs.withFastUTF8 { lhsUTF8 in - return rhs.withFastUTF8 { rhsUTF8 in - return _stringCompareFastUTF8( + return unsafe lhs.withFastUTF8 { lhsUTF8 in + return unsafe rhs.withFastUTF8 { rhsUTF8 in + return unsafe _stringCompareFastUTF8( lhsUTF8, rhsUTF8, expecting: expecting, bothNFC: isNFC) } } @@ -88,9 +88,9 @@ internal func _stringCompareInternal( } let isNFC = lhs.isNFC && rhs.isNFC - return lhs.withFastUTF8(range: lhsRange) { lhsUTF8 in - return rhs.withFastUTF8(range: rhsRange) { rhsUTF8 in - return _stringCompareFastUTF8( + return unsafe lhs.withFastUTF8(range: lhsRange) { lhsUTF8 in + return unsafe rhs.withFastUTF8(range: rhsRange) { rhsUTF8 in + return unsafe _stringCompareFastUTF8( lhsUTF8, rhsUTF8, expecting: expecting, bothNFC: isNFC) } } @@ -115,11 +115,11 @@ private func _stringCompareFastUTF8( if expecting == .equal && utf8Left.count != utf8Right.count { return false } - let cmp = _binaryCompare(utf8Left, utf8Right) + let cmp = unsafe _binaryCompare(utf8Left, utf8Right) return _lexicographicalCompare(cmp, 0, expecting: expecting) } - return _stringCompareFastUTF8Abnormal( + return unsafe _stringCompareFastUTF8Abnormal( utf8Left, utf8Right, expecting: expecting) } @@ -130,7 +130,7 @@ private func _stringCompareFastUTF8Abnormal( expecting: _StringComparisonResult ) -> Bool { // Do a binary-equality prefix scan, to skip over long common prefixes. - guard let diffIdx = _findDiffIdx(utf8Left, utf8Right) else { + guard let diffIdx = unsafe _findDiffIdx(utf8Left, utf8Right) else { // We finished one of our inputs. // // TODO: This gives us a consistent and good ordering, but technically it @@ -140,17 +140,17 @@ private func _stringCompareFastUTF8Abnormal( utf8Left.count, utf8Right.count, expecting: expecting) } - let scalarDiffIdx = _scalarAlign(utf8Left, diffIdx) - _internalInvariant(scalarDiffIdx == _scalarAlign(utf8Right, diffIdx)) + let scalarDiffIdx = unsafe _scalarAlign(utf8Left, diffIdx) + unsafe _internalInvariant(scalarDiffIdx == _scalarAlign(utf8Right, diffIdx)) - let (leftScalar, leftLen) = _decodeScalar(utf8Left, startingAt: scalarDiffIdx) - let (rightScalar, rightLen) = _decodeScalar( + let (leftScalar, leftLen) = unsafe _decodeScalar(utf8Left, startingAt: scalarDiffIdx) + let (rightScalar, rightLen) = unsafe _decodeScalar( utf8Right, startingAt: scalarDiffIdx) // Very frequent fast-path: point of binary divergence is a NFC single-scalar // segment. Check that we diverged at the start of a segment, and the next // scalar is both NFC and its own segment. - if _fastPath( + if unsafe _fastPath( leftScalar._isNFCStarter && rightScalar._isNFCStarter && utf8Left.hasNormalizationBoundary(before: scalarDiffIdx &+ leftLen) && utf8Right.hasNormalizationBoundary(before: scalarDiffIdx &+ rightLen) @@ -166,12 +166,12 @@ private func _stringCompareFastUTF8Abnormal( // Back up to the nearest normalization boundary before doing a slow // normalizing compare. - let boundaryIdx = Swift.min( + let boundaryIdx = unsafe Swift.min( _findBoundary(utf8Left, before: diffIdx), _findBoundary(utf8Right, before: diffIdx)) _internalInvariant(boundaryIdx <= diffIdx) - return _stringCompareSlow( + return unsafe _stringCompareSlow( UnsafeBufferPointer(rebasing: utf8Left[boundaryIdx...]), UnsafeBufferPointer(rebasing: utf8Right[boundaryIdx...]), expecting: expecting) @@ -206,8 +206,8 @@ private func _stringCompareSlow( ) -> Bool { // TODO: Just call the normalizer directly - let left = _StringGutsSlice(_StringGuts(leftUTF8, isASCII: false)) - let right = _StringGutsSlice(_StringGuts(rightUTF8, isASCII: false)) + let left = unsafe _StringGutsSlice(_StringGuts(leftUTF8, isASCII: false)) + let right = unsafe _StringGutsSlice(_StringGuts(rightUTF8, isASCII: false)) return left.compare(with: right, expecting: expecting) } @@ -220,7 +220,7 @@ private func _findDiffIdx( let count = Swift.min(left.count, right.count) var idx = 0 while idx < count { - guard left[_unchecked: idx] == right[_unchecked: idx] else { + guard unsafe left[_unchecked: idx] == right[_unchecked: idx] else { return idx } idx &+= 1 @@ -250,20 +250,20 @@ private func _findBoundary( } // Back up to scalar boundary - while UTF8.isContinuation(utf8[_unchecked: idx]) { + while unsafe UTF8.isContinuation(utf8[_unchecked: idx]) { idx &-= 1 } while true { if idx == 0 { return 0 } - let scalar = _decodeScalar(utf8, startingAt: idx).0 + let scalar = unsafe _decodeScalar(utf8, startingAt: idx).0 if scalar._isNFCStarter { return idx } - idx &-= _utf8ScalarLength(utf8, endingAt: idx) + unsafe idx &-= _utf8ScalarLength(utf8, endingAt: idx) } } @@ -297,7 +297,7 @@ internal enum _StringComparisonResult { internal func _binaryCompare( _ lhs: UnsafeBufferPointer, _ rhs: UnsafeBufferPointer ) -> Int { - var cmp = Int(truncatingIfNeeded: + var cmp = unsafe Int(truncatingIfNeeded: _swift_stdlib_memcmp( lhs.baseAddress._unsafelyUnwrappedUnchecked, rhs.baseAddress._unsafelyUnwrappedUnchecked, @@ -316,9 +316,9 @@ extension _StringGutsSlice { ) -> Bool { if _fastPath(self.isFastUTF8 && other.isFastUTF8) { Builtin.onFastPath() // aggressively inline / optimize - let isEqual = self.withFastUTF8 { utf8Self in - return other.withFastUTF8 { utf8Other in - return 0 == _binaryCompare(utf8Self, utf8Other) + let isEqual = unsafe self.withFastUTF8 { utf8Self in + return unsafe other.withFastUTF8 { utf8Other in + return unsafe 0 == _binaryCompare(utf8Self, utf8Other) } } if isEqual { return expecting == .equal } diff --git a/stdlib/public/core/StringCreate.swift b/stdlib/public/core/StringCreate.swift index 2aaf097629ebc..dafb24b5f85d2 100644 --- a/stdlib/public/core/StringCreate.swift +++ b/stdlib/public/core/StringCreate.swift @@ -20,52 +20,52 @@ internal func _allASCII(_ input: UnsafeBufferPointer) -> Bool { // TODO(String performance): SIMD-ize // let count = input.count - var ptr = UnsafeRawPointer(input.baseAddress._unsafelyUnwrappedUnchecked) + var ptr = unsafe UnsafeRawPointer(input.baseAddress._unsafelyUnwrappedUnchecked) let asciiMask64 = 0x8080_8080_8080_8080 as UInt64 let asciiMask32 = UInt32(truncatingIfNeeded: asciiMask64) let asciiMask16 = UInt16(truncatingIfNeeded: asciiMask64) let asciiMask8 = UInt8(truncatingIfNeeded: asciiMask64) - let end128 = ptr + count & ~(MemoryLayout<(UInt64, UInt64)>.stride &- 1) - let end64 = ptr + count & ~(MemoryLayout.stride &- 1) - let end32 = ptr + count & ~(MemoryLayout.stride &- 1) - let end16 = ptr + count & ~(MemoryLayout.stride &- 1) - let end = ptr + count + let end128 = unsafe ptr + count & ~(MemoryLayout<(UInt64, UInt64)>.stride &- 1) + let end64 = unsafe ptr + count & ~(MemoryLayout.stride &- 1) + let end32 = unsafe ptr + count & ~(MemoryLayout.stride &- 1) + let end16 = unsafe ptr + count & ~(MemoryLayout.stride &- 1) + let end = unsafe ptr + count - while ptr < end128 { - let pair = ptr.loadUnaligned(as: (UInt64, UInt64).self) + while unsafe ptr < end128 { + let pair = unsafe ptr.loadUnaligned(as: (UInt64, UInt64).self) let result = (pair.0 | pair.1) & asciiMask64 guard result == 0 else { return false } - ptr = ptr + MemoryLayout<(UInt64, UInt64)>.stride + unsafe ptr = unsafe ptr + MemoryLayout<(UInt64, UInt64)>.stride } // If we had enough bytes for two iterations of this, we would have hit // the loop above, so we only need to do this once - if ptr < end64 { - let value = ptr.loadUnaligned(as: UInt64.self) + if unsafe ptr < end64 { + let value = unsafe ptr.loadUnaligned(as: UInt64.self) guard value & asciiMask64 == 0 else { return false } - ptr = ptr + MemoryLayout.stride + unsafe ptr = unsafe ptr + MemoryLayout.stride } - if ptr < end32 { - let value = ptr.loadUnaligned(as: UInt32.self) + if unsafe ptr < end32 { + let value = unsafe ptr.loadUnaligned(as: UInt32.self) guard value & asciiMask32 == 0 else { return false } - ptr = ptr + MemoryLayout.stride + unsafe ptr = unsafe ptr + MemoryLayout.stride } - if ptr < end16 { - let value = ptr.loadUnaligned(as: UInt16.self) + if unsafe ptr < end16 { + let value = unsafe ptr.loadUnaligned(as: UInt16.self) guard value & asciiMask16 == 0 else { return false } - ptr = ptr + MemoryLayout.stride + unsafe ptr = unsafe ptr + MemoryLayout.stride } - if ptr < end { - let value = ptr.loadUnaligned(fromByteOffset: 0, as: UInt8.self) + if unsafe ptr < end { + let value = unsafe ptr.loadUnaligned(fromByteOffset: 0, as: UInt8.self) guard value & asciiMask8 == 0 else { return false } } - _internalInvariant(ptr == end || ptr + 1 == end) + unsafe _internalInvariant(ptr == end || ptr + 1 == end) return true } @@ -74,11 +74,11 @@ extension String { internal static func _uncheckedFromASCII( _ input: UnsafeBufferPointer ) -> String { - if let smol = _SmallString(input) { + if let smol = unsafe _SmallString(input) { return String(_StringGuts(smol)) } - let storage = __StringStorage.create(initializingFrom: input, isASCII: true) + let storage = unsafe __StringStorage.create(initializingFrom: input, isASCII: true) return storage.asString } @@ -86,39 +86,39 @@ extension String { internal static func _fromASCII( _ input: UnsafeBufferPointer ) -> String { - _internalInvariant(_allASCII(input), "not actually ASCII") - return _uncheckedFromASCII(input) + unsafe _internalInvariant(_allASCII(input), "not actually ASCII") + return unsafe _uncheckedFromASCII(input) } internal static func _fromASCIIValidating( _ input: UnsafeBufferPointer ) -> String? { - if _fastPath(_allASCII(input)) { - return _uncheckedFromASCII(input) + if unsafe _fastPath(_allASCII(input)) { + return unsafe _uncheckedFromASCII(input) } return nil } public // SPI(Foundation) static func _tryFromUTF8(_ input: UnsafeBufferPointer) -> String? { - guard case .success(let extraInfo) = validateUTF8(input) else { + guard case .success(let extraInfo) = unsafe validateUTF8(input) else { return nil } - return String._uncheckedFromUTF8(input, isASCII: extraInfo.isASCII) + return unsafe String._uncheckedFromUTF8(input, isASCII: extraInfo.isASCII) } @usableFromInline internal static func _fromUTF8Repairing( _ input: UnsafeBufferPointer ) -> (result: String, repairsMade: Bool) { - switch validateUTF8(input) { + switch unsafe validateUTF8(input) { case .success(let extraInfo): - return (String._uncheckedFromUTF8( + return unsafe (String._uncheckedFromUTF8( input, asciiPreScanResult: extraInfo.isASCII ), false) case .error(let initialRange): - return (repairUTF8(input, firstKnownBrokenRange: initialRange), true) + return unsafe (repairUTF8(input, firstKnownBrokenRange: initialRange), true) } } @@ -128,11 +128,11 @@ extension String { _ buffer: UnsafeMutableBufferPointer ) throws -> Int ) rethrows -> String { - let result = try __StringStorage.create( + let result = try unsafe __StringStorage.create( uninitializedCodeUnitCapacity: capacity, initializingUncheckedUTF8With: initializer) - switch validateUTF8(result.codeUnits) { + switch unsafe validateUTF8(result.codeUnits) { case .success(let info): result._updateCountAndFlags( newCount: result.count, @@ -142,7 +142,7 @@ extension String { case .error(let initialRange): defer { _fixLifetime(result) } //This could be optimized to use excess tail capacity - return repairUTF8(result.codeUnits, firstKnownBrokenRange: initialRange) + return unsafe repairUTF8(result.codeUnits, firstKnownBrokenRange: initialRange) } } @@ -150,7 +150,7 @@ extension String { internal static func _uncheckedFromUTF8( _ input: UnsafeBufferPointer ) -> String { - return _uncheckedFromUTF8(input, isASCII: _allASCII(input)) + return unsafe _uncheckedFromUTF8(input, isASCII: _allASCII(input)) } @usableFromInline @@ -158,11 +158,11 @@ extension String { _ input: UnsafeBufferPointer, isASCII: Bool ) -> String { - if let smol = _SmallString(input) { + if let smol = unsafe _SmallString(input) { return String(_StringGuts(smol)) } - let storage = __StringStorage.create( + let storage = unsafe __StringStorage.create( initializingFrom: input, isASCII: isASCII) return storage.asString } @@ -172,12 +172,12 @@ extension String { internal static func _uncheckedFromUTF8( _ input: UnsafeBufferPointer, asciiPreScanResult: Bool ) -> String { - if let smol = _SmallString(input) { + if let smol = unsafe _SmallString(input) { return String(_StringGuts(smol)) } let isASCII = asciiPreScanResult - let storage = __StringStorage.create( + let storage = unsafe __StringStorage.create( initializingFrom: input, isASCII: isASCII) return storage.asString } @@ -192,7 +192,7 @@ extension String { // into a StringStorage space. var contents: [UInt8] = [] contents.reserveCapacity(input.count) - let repaired = transcode( + let repaired = unsafe transcode( input.makeIterator(), from: UTF16.self, to: UTF8.self, @@ -200,7 +200,7 @@ extension String { into: { contents.append($0) }) _internalInvariant(!repaired, "Error present") - return contents.withUnsafeBufferPointer { String._uncheckedFromUTF8($0) } + return unsafe contents.withUnsafeBufferPointer { unsafe String._uncheckedFromUTF8($0) } } @inline(never) // slow path @@ -227,7 +227,7 @@ extension String { into: { contents.append($0) }) guard repair || !repaired else { return nil } - let str = contents.withUnsafeBufferPointer { String._uncheckedFromUTF8($0) } + let str = unsafe contents.withUnsafeBufferPointer { unsafe String._uncheckedFromUTF8($0) } return (str, repaired) } @@ -262,10 +262,10 @@ extension String { if let contigBytes = input as? _HasContiguousBytes, contigBytes._providesContiguousBytesNoCopy { return resultOrSlow(contigBytes.withUnsafeBytes { rawBufPtr in - let buffer = UnsafeBufferPointer( + let buffer = unsafe UnsafeBufferPointer( start: rawBufPtr.baseAddress?.assumingMemoryBound(to: UInt8.self), count: rawBufPtr.count) - return String._fromASCIIValidating(buffer) + return unsafe String._fromASCIIValidating(buffer) }) } #endif @@ -273,15 +273,15 @@ extension String { // Fast path for user-defined Collections if let strOpt = input.withContiguousStorageIfAvailable({ (buffer: UnsafeBufferPointer) -> String? in - return String._fromASCIIValidating( + return unsafe String._fromASCIIValidating( UnsafeRawBufferPointer(buffer).bindMemory(to: UInt8.self)) }) { return resultOrSlow(strOpt) } - return resultOrSlow(Array(input).withUnsafeBufferPointer { - let buffer = UnsafeRawBufferPointer($0).bindMemory(to: UInt8.self) - return String._fromASCIIValidating(buffer) + return unsafe resultOrSlow(Array(input).withUnsafeBufferPointer { + let buffer = unsafe UnsafeRawBufferPointer($0).bindMemory(to: UInt8.self) + return unsafe String._fromASCIIValidating(buffer) }) } @@ -289,7 +289,7 @@ extension String { static func _fromInvalidUTF16( _ utf16: UnsafeBufferPointer ) -> String { - return String._fromCodeUnits(utf16, encoding: UTF16.self, repair: true)!.0 + return unsafe String._fromCodeUnits(utf16, encoding: UTF16.self, repair: true)!.0 } @usableFromInline @@ -312,12 +312,12 @@ extension String { @inline(never) // slow-path internal static func _copying(_ str: Substring) -> String { if _fastPath(str._wholeGuts.isFastUTF8) { - return str._wholeGuts.withFastUTF8(range: str._offsetRange) { - String._uncheckedFromUTF8($0) + return unsafe str._wholeGuts.withFastUTF8(range: str._offsetRange) { + unsafe String._uncheckedFromUTF8($0) } } - return Array(str.utf8).withUnsafeBufferPointer { - String._uncheckedFromUTF8($0) + return unsafe Array(str.utf8).withUnsafeBufferPointer { + unsafe String._uncheckedFromUTF8($0) } } @@ -328,28 +328,28 @@ extension String { as encoding: Encoding.Type ) -> String? { if encoding.CodeUnit.self == UInt8.self { - let bytes = _identityCast(input, to: UnsafeBufferPointer.self) + let bytes = unsafe _identityCast(input, to: UnsafeBufferPointer.self) if encoding.self == UTF8.self { - guard case .success(let info) = validateUTF8(bytes) else { return nil } - return String._uncheckedFromUTF8(bytes, asciiPreScanResult: info.isASCII) + guard case .success(let info) = unsafe validateUTF8(bytes) else { return nil } + return unsafe String._uncheckedFromUTF8(bytes, asciiPreScanResult: info.isASCII) } else if encoding.self == Unicode.ASCII.self { - guard _allASCII(bytes) else { return nil } - return String._uncheckedFromASCII(bytes) + guard unsafe _allASCII(bytes) else { return nil } + return unsafe String._uncheckedFromASCII(bytes) } } // slow-path var isASCII = true var buffer: UnsafeMutableBufferPointer - buffer = UnsafeMutableBufferPointer.allocate(capacity: input.count*3) + unsafe buffer = UnsafeMutableBufferPointer.allocate(capacity: input.count*3) var written = buffer.startIndex var parser = Encoding.ForwardParser() - var input = input.makeIterator() + var input = unsafe input.makeIterator() transcodingLoop: while true { - switch parser.parseScalar(from: &input) { + switch unsafe parser.parseScalar(from: &input) { case .valid(let s): let scalar = Encoding.decode(s) guard let utf8 = Unicode.UTF8.encode(scalar) else { @@ -359,30 +359,30 @@ extension String { if buffer.count < written + utf8.count { let newCapacity = buffer.count + (buffer.count >> 1) let copy: UnsafeMutableBufferPointer - copy = UnsafeMutableBufferPointer.allocate(capacity: newCapacity) - let copied = copy.moveInitialize( + unsafe copy = UnsafeMutableBufferPointer.allocate(capacity: newCapacity) + let copied = unsafe copy.moveInitialize( fromContentsOf: buffer.prefix(upTo: written) ) - buffer.deallocate() - buffer = copy + unsafe buffer.deallocate() + unsafe buffer = unsafe copy written = copied } if isASCII && utf8.count > 1 { isASCII = false } - written = buffer.suffix(from: written).initialize(fromContentsOf: utf8) + written = unsafe buffer.suffix(from: written).initialize(fromContentsOf: utf8) break case .error: // validation error: clean up and return nil - buffer.prefix(upTo: written).deinitialize() - buffer.deallocate() + unsafe buffer.prefix(upTo: written).deinitialize() + unsafe buffer.deallocate() return nil case .emptyInput: break transcodingLoop } } - let storage = buffer.baseAddress.map { + let storage = unsafe buffer.baseAddress.map { __SharedStringStorage( _mortal: $0, countAndFlags: _StringObject.CountAndFlags( diff --git a/stdlib/public/core/StringGraphemeBreaking.swift b/stdlib/public/core/StringGraphemeBreaking.swift index 32bc0df7f25f1..9ceb2030a3b8b 100644 --- a/stdlib/public/core/StringGraphemeBreaking.swift +++ b/stdlib/public/core/StringGraphemeBreaking.swift @@ -201,9 +201,9 @@ extension _StringGuts { internal func _opaqueCharacterStride(startingAt i: Int) -> Int { _internalInvariant(i < endIndex._encodedOffset) if isFastUTF8 { - let fast = withFastUTF8 { utf8 in + let fast = unsafe withFastUTF8 { utf8 in if i &+ 1 == utf8.count { return true } - let pair = UnsafeRawPointer( + let pair = unsafe UnsafeRawPointer( utf8.baseAddress.unsafelyUnwrapped ).loadUnaligned(fromByteOffset: i, as: UInt16.self) //& 0x8080 == 0 is "both not ASCII", != 0x0A0D is "not CRLF" @@ -224,11 +224,11 @@ extension _StringGuts { return _foreignOpaqueCharacterStride(startingAt: i) } - let nextIdx = withFastUTF8 { utf8 in + let nextIdx = unsafe withFastUTF8 { utf8 in nextBoundary(startingAt: i) { j in _internalInvariant(j >= 0) guard j < utf8.count else { return nil } - let (scalar, len) = _decodeScalar(utf8, startingAt: j) + let (scalar, len) = unsafe _decodeScalar(utf8, startingAt: j) return (scalar, j &+ len) } } @@ -250,8 +250,8 @@ extension _StringGuts { return i } if isFastUTF8 { - let fast = withFastUTF8 { utf8 in - let pair = UnsafeRawPointer( + let fast = unsafe withFastUTF8 { utf8 in + let pair = unsafe UnsafeRawPointer( utf8.baseAddress.unsafelyUnwrapped ).loadUnaligned(fromByteOffset: i &- 2, as: UInt16.self) //& 0x8080 == 0 is "both not ASCII", != 0x0A0D is "not CRLF" @@ -272,11 +272,11 @@ extension _StringGuts { return _foreignOpaqueCharacterStride(endingAt: i) } - let previousIdx = withFastUTF8 { utf8 in + let previousIdx = unsafe withFastUTF8 { utf8 in previousBoundary(endingAt: i) { j in _internalInvariant(j <= utf8.count) guard j > 0 else { return nil } - let (scalar, len) = _decodeScalar(utf8, endingAt: j) + let (scalar, len) = unsafe _decodeScalar(utf8, endingAt: j) return (scalar, j &- len) } } @@ -301,11 +301,11 @@ extension _StringGuts { return _foreignOpaqueCharacterStride(endingAt: i, in: bounds) } - let previousIdx = withFastUTF8 { utf8 in + let previousIdx = unsafe withFastUTF8 { utf8 in previousBoundary(endingAt: i) { j in _internalInvariant(j <= bounds.upperBound) guard j > bounds.lowerBound else { return nil } - let (scalar, len) = _decodeScalar(utf8, endingAt: j) + let (scalar, len) = unsafe _decodeScalar(utf8, endingAt: j) return (scalar, j &- len) } } @@ -597,7 +597,7 @@ extension Unicode { ) -> Range? { var i = start while i < buffer.endIndex { - let (next, n) = _decodeScalar(buffer, startingAt: i) + let (next, n) = unsafe _decodeScalar(buffer, startingAt: i) if hasBreak(before: next) { return Range(_uncheckedBounds: (i, i &+ n)) } diff --git a/stdlib/public/core/StringGuts.swift b/stdlib/public/core/StringGuts.swift index 9b46c039d6e5e..5d41f3ababcc6 100644 --- a/stdlib/public/core/StringGuts.swift +++ b/stdlib/public/core/StringGuts.swift @@ -52,7 +52,7 @@ extension _StringGuts { @inlinable @inline(__always) internal init(_ bufPtr: UnsafeBufferPointer, isASCII: Bool) { - self.init(_StringObject(immortal: bufPtr, isASCII: isASCII)) + unsafe self.init(_StringObject(immortal: bufPtr, isASCII: isASCII)) } @inline(__always) @@ -166,7 +166,7 @@ extension _StringGuts { if self.isSmall { return try _SmallString(_object).withUTF8(f) } defer { _fixLifetime(self) } - return try f(_object.fastUTF8) + return try unsafe f(_object.fastUTF8) } @inlinable @inline(__always) @@ -174,8 +174,8 @@ extension _StringGuts { range: Range, _ f: (UnsafeBufferPointer) throws -> R ) rethrows -> R { - return try self.withFastUTF8 { wholeUTF8 in - return try f(UnsafeBufferPointer(rebasing: wholeUTF8[range])) + return try unsafe self.withFastUTF8 { wholeUTF8 in + return try unsafe f(unsafe UnsafeBufferPointer(rebasing: wholeUTF8[range])) } } @@ -183,8 +183,8 @@ extension _StringGuts { internal func withFastCChar( _ f: (UnsafeBufferPointer) throws -> R ) rethrows -> R { - return try self.withFastUTF8 { utf8 in - return try utf8.withMemoryRebound(to: CChar.self, f) + return try unsafe self.withFastUTF8 { utf8 in + return try unsafe utf8.withMemoryRebound(to: CChar.self, f) } } } @@ -222,11 +222,11 @@ extension _StringGuts { _ body: (UnsafePointer) throws -> Result ) rethrows -> Result { if _slowPath(!_object.isFastZeroTerminated) { - return try _slowWithCString(body) + return try unsafe _slowWithCString(body) } - return try self.withFastCChar { - return try body($0.baseAddress._unsafelyUnwrappedUnchecked) + return try unsafe self.withFastCChar { + return try unsafe body($0.baseAddress._unsafelyUnwrappedUnchecked) } } @@ -237,8 +237,8 @@ extension _StringGuts { ) rethrows -> Result { _internalInvariant(!_object.isFastZeroTerminated) return try String(self).utf8CString.withUnsafeBufferPointer { - let ptr = $0.baseAddress._unsafelyUnwrappedUnchecked - return try body(ptr) + let ptr = unsafe $0.baseAddress._unsafelyUnwrappedUnchecked + return try unsafe body(ptr) } } } @@ -248,18 +248,18 @@ extension _StringGuts { // Contents of the buffer are unspecified if nil is returned. @inlinable internal func copyUTF8(into mbp: UnsafeMutableBufferPointer) -> Int? { - let ptr = mbp.baseAddress._unsafelyUnwrappedUnchecked + let ptr = unsafe mbp.baseAddress._unsafelyUnwrappedUnchecked if _fastPath(self.isFastUTF8) { - return self.withFastUTF8 { utf8 in + return unsafe self.withFastUTF8 { utf8 in guard utf8.count <= mbp.count else { return nil } - let utf8Start = utf8.baseAddress._unsafelyUnwrappedUnchecked - ptr.initialize(from: utf8Start, count: utf8.count) + let utf8Start = unsafe utf8.baseAddress._unsafelyUnwrappedUnchecked + unsafe ptr.initialize(from: utf8Start, count: utf8.count) return utf8.count } } - return _foreignCopyUTF8(into: mbp) + return unsafe _foreignCopyUTF8(into: mbp) } @_effects(releasenone) @usableFromInline @inline(never) // slow-path @@ -269,19 +269,19 @@ extension _StringGuts { #if _runtime(_ObjC) // Currently, foreign means NSString let res = _object.withCocoaObject { - _cocoaStringCopyUTF8($0, into: UnsafeMutableRawBufferPointer(mbp)) + unsafe _cocoaStringCopyUTF8($0, into: UnsafeMutableRawBufferPointer(mbp)) } if let res { return res } // If the NSString contains invalid UTF8 (e.g. unpaired surrogates), we // can get nil from cocoaStringCopyUTF8 in situations where a character by // character loop would get something more useful like repaired contents - var ptr = mbp.baseAddress._unsafelyUnwrappedUnchecked + var ptr = unsafe mbp.baseAddress._unsafelyUnwrappedUnchecked var numWritten = 0 for cu in String(self).utf8 { guard numWritten < mbp.count else { return nil } - ptr.initialize(to: cu) - ptr += 1 + unsafe ptr.initialize(to: cu) + unsafe ptr += 1 numWritten += 1 } @@ -433,7 +433,7 @@ extension _StringGuts { // FIXME: Mark as obsoleted. Still used by swift-corelibs-foundation. @available(*, deprecated) public var startASCII: UnsafeMutablePointer { - return UnsafeMutablePointer(mutating: _object.fastUTF8.baseAddress!) + return unsafe UnsafeMutablePointer(mutating: _object.fastUTF8.baseAddress!) } // FIXME: Previously used by swift-corelibs-foundation. Aging for removal. @@ -446,10 +446,10 @@ extension _StringGuts { // FIXME: Previously used by swift-corelibs-foundation. Aging for removal. @available(*, unavailable) public func _persistCString(_ p: UnsafePointer?) -> [CChar]? { - guard let s = p else { return nil } - let bytesToCopy = UTF8._nullCodeUnitOffset(in: s) + 1 // +1 for the terminating NUL - let result = [CChar](unsafeUninitializedCapacity: bytesToCopy) { buf, initedCount in - buf.baseAddress!.update(from: s, count: bytesToCopy) + guard let s = unsafe p else { return nil } + let bytesToCopy = unsafe UTF8._nullCodeUnitOffset(in: s) + 1 // +1 for the terminating NUL + let result = unsafe [CChar](unsafeUninitializedCapacity: bytesToCopy) { buf, initedCount in + unsafe buf.baseAddress!.update(from: s, count: bytesToCopy) initedCount = bytesToCopy } return result diff --git a/stdlib/public/core/StringGutsRangeReplaceable.swift b/stdlib/public/core/StringGutsRangeReplaceable.swift index 6fa75b682e57b..a96d3dca3182c 100644 --- a/stdlib/public/core/StringGutsRangeReplaceable.swift +++ b/stdlib/public/core/StringGutsRangeReplaceable.swift @@ -124,8 +124,8 @@ extension _StringGuts { // strings or foreign strings that provide contiguous UTF-8 access. if _fastPath(isFastUTF8) { let isASCII = self.isASCII - let storage = self.withFastUTF8 { - __StringStorage.create( + let storage = unsafe self.withFastUTF8 { + unsafe __StringStorage.create( initializingFrom: $0, codeUnitCapacity: growthTarget, isASCII: isASCII) @@ -140,8 +140,8 @@ extension _StringGuts { @inline(never) // slow-path private mutating func _foreignGrow(_ n: Int) { - let newString = String(_uninitializedCapacity: n) { buffer in - guard let count = _foreignCopyUTF8(into: buffer) else { + let newString = unsafe String(_uninitializedCapacity: n) { buffer in + guard let count = unsafe _foreignCopyUTF8(into: buffer) else { fatalError("String capacity was smaller than required") } return count @@ -204,8 +204,8 @@ extension _StringGuts { @inline(never) @_effects(readonly) private func _foreignConvertedToSmall() -> _SmallString { - let smol = String(_uninitializedCapacity: _SmallString.capacity) { buffer in - guard let count = _foreignCopyUTF8(into: buffer) else { + let smol = unsafe String(_uninitializedCapacity: _SmallString.capacity) { buffer in + guard let count = unsafe _foreignCopyUTF8(into: buffer) else { fatalError("String capacity was smaller than required") } return count @@ -220,7 +220,7 @@ extension _StringGuts { return asSmall } if isFastUTF8 { - return withFastUTF8 { _SmallString($0)! } + return unsafe withFastUTF8 { unsafe _SmallString($0)! } } return _foreignConvertedToSmall() } @@ -271,8 +271,8 @@ extension _StringGuts { if slicedOther.isFastUTF8 { let otherIsASCII = slicedOther.isASCII - slicedOther.withFastUTF8 { otherUTF8 in - self.appendInPlace(otherUTF8, isASCII: otherIsASCII) + unsafe slicedOther.withFastUTF8 { otherUTF8 in + unsafe self.appendInPlace(otherUTF8, isASCII: otherIsASCII) } return } @@ -283,7 +283,7 @@ extension _StringGuts { internal mutating func appendInPlace( _ other: UnsafeBufferPointer, isASCII: Bool ) { - updateNativeStorage { $0.appendInPlace(other, isASCII: isASCII) } + updateNativeStorage { unsafe $0.appendInPlace(other, isASCII: isASCII) } } @inline(never) // slow-path @@ -337,15 +337,15 @@ extension _StringGuts { if isUniqueNative { if let repl = newElements as? String { if repl._guts.isFastUTF8 { - return repl._guts.withFastUTF8 { - uniqueNativeReplaceSubrange( + return unsafe repl._guts.withFastUTF8 { + unsafe uniqueNativeReplaceSubrange( bounds, with: $0, isASCII: repl._guts.isASCII) } } } else if let repl = newElements as? Substring { if repl._wholeGuts.isFastUTF8 { - return repl._wholeGuts.withFastUTF8(range: repl._offsetRange) { - uniqueNativeReplaceSubrange( + return unsafe repl._wholeGuts.withFastUTF8(range: repl._offsetRange) { + unsafe uniqueNativeReplaceSubrange( bounds, with: $0, isASCII: repl._wholeGuts.isASCII) } } @@ -380,15 +380,15 @@ extension _StringGuts { if isUniqueNative { if let repl = newElements as? String.UnicodeScalarView { if repl._guts.isFastUTF8 { - return repl._guts.withFastUTF8 { - uniqueNativeReplaceSubrange( + return unsafe repl._guts.withFastUTF8 { + unsafe uniqueNativeReplaceSubrange( bounds, with: $0, isASCII: repl._guts.isASCII) } } } else if let repl = newElements as? Substring.UnicodeScalarView { if repl._wholeGuts.isFastUTF8 { - return repl._wholeGuts.withFastUTF8(range: repl._offsetRange) { - uniqueNativeReplaceSubrange( + return unsafe repl._wholeGuts.withFastUTF8(range: repl._offsetRange) { + unsafe uniqueNativeReplaceSubrange( bounds, with: $0, isASCII: repl._wholeGuts.isASCII) } } @@ -402,7 +402,7 @@ extension _StringGuts { var utf8: [UInt8] = [] utf8.reserveCapacity(c) utf8 = newElements.reduce(into: utf8) { utf8, next in - next.withUTF8CodeUnits { utf8.append(contentsOf: $0) } + next.withUTF8CodeUnits { unsafe utf8.append(contentsOf: $0) } } return uniqueNativeReplaceSubrange(bounds, with: utf8) } @@ -441,7 +441,7 @@ extension _StringGuts { let start = bounds.lowerBound._encodedOffset let end = bounds.upperBound._encodedOffset updateNativeStorage { - $0.replace(from: start, to: end, with: codeUnits) + unsafe $0.replace(from: start, to: end, with: codeUnits) } return Range(_uncheckedBounds: (start, start + codeUnits.count)) } diff --git a/stdlib/public/core/StringGutsSlice.swift b/stdlib/public/core/StringGutsSlice.swift index 3f92c40ab408d..ae46230f8bc9f 100644 --- a/stdlib/public/core/StringGutsSlice.swift +++ b/stdlib/public/core/StringGutsSlice.swift @@ -82,7 +82,7 @@ internal struct _StringGutsSlice { internal func withFastUTF8( _ f: (UnsafeBufferPointer) throws -> R ) rethrows -> R { - return try _guts.withFastUTF8(range: _offsetRange, f) + return try unsafe _guts.withFastUTF8(range: _offsetRange, f) } @_effects(releasenone) diff --git a/stdlib/public/core/StringHashable.swift b/stdlib/public/core/StringHashable.swift index 04beb4ac3ddbe..c30971d53d6e4 100644 --- a/stdlib/public/core/StringHashable.swift +++ b/stdlib/public/core/StringHashable.swift @@ -20,8 +20,8 @@ extension String: Hashable { /// of this instance. public func hash(into hasher: inout Hasher) { if _fastPath(self._guts.isNFCFastUTF8) { - self._guts.withFastUTF8 { - hasher.combine(bytes: UnsafeRawBufferPointer($0)) + unsafe self._guts.withFastUTF8 { + unsafe hasher.combine(bytes: UnsafeRawBufferPointer($0)) } hasher.combine(0xFF as UInt8) // terminator } else { @@ -47,8 +47,8 @@ extension _StringGutsSlice { @_effects(releasenone) @inline(never) // slow-path internal func _normalizedHash(into hasher: inout Hasher) { if self.isNFCFastUTF8 { - self.withFastUTF8 { - hasher.combine(bytes: UnsafeRawBufferPointer($0)) + unsafe self.withFastUTF8 { + unsafe hasher.combine(bytes: UnsafeRawBufferPointer($0)) } } else { _withNFCCodeUnits { diff --git a/stdlib/public/core/StringInterpolation.swift b/stdlib/public/core/StringInterpolation.swift index 55e4472125beb..e664f521e4091 100644 --- a/stdlib/public/core/StringInterpolation.swift +++ b/stdlib/public/core/StringInterpolation.swift @@ -211,7 +211,7 @@ extension DefaultStringInterpolation: TextOutputStream { } public mutating func _writeASCII(_ buffer: UnsafeBufferPointer) { - _storage._guts.append(_StringGuts(buffer, isASCII: true)) + unsafe _storage._guts.append(_StringGuts(buffer, isASCII: true)) } } diff --git a/stdlib/public/core/StringLegacy.swift b/stdlib/public/core/StringLegacy.swift index 30f13ae928901..d9e0bc938af25 100644 --- a/stdlib/public/core/StringLegacy.swift +++ b/stdlib/public/core/StringLegacy.swift @@ -124,10 +124,10 @@ extension String { public func hasPrefix(_ prefix: String) -> Bool { if _fastPath(self._guts.isNFCFastUTF8 && prefix._guts.isNFCFastUTF8) { guard prefix._guts.count <= self._guts.count else { return false } - let isPrefix = prefix._guts.withFastUTF8 { nfcPrefix in + let isPrefix = unsafe prefix._guts.withFastUTF8 { nfcPrefix in let prefixEnd = nfcPrefix.count - return self._guts.withFastUTF8(range: 0..= 0 else { return false } - let isSuffix = suffix._guts.withFastUTF8 { nfcSuffix in - return self._guts.withFastUTF8(range: suffixStart.. { _internalInvariant(largeFastIsTailAllocated) - return UnsafePointer( + return unsafe UnsafePointer( bitPattern: largeAddressBits &+ _StringObject.nativeBias )._unsafelyUnwrappedUnchecked } @@ -962,7 +962,7 @@ extension _StringObject { @inlinable @inline(__always) internal var nativeUTF8: UnsafeBufferPointer { _internalInvariant(largeFastIsTailAllocated) - return UnsafeBufferPointer(start: nativeUTF8Start, count: largeCount) + return unsafe UnsafeBufferPointer(start: nativeUTF8Start, count: largeCount) } // Resilient way to fetch a pointer @@ -972,25 +972,25 @@ extension _StringObject { _internalInvariant(largeFastIsShared) #if _runtime(_ObjC) if largeFastIsConstantCocoa { - return withCocoaObject { - _getNSCFConstantStringContentsPointer($0) + return unsafe withCocoaObject { + unsafe _getNSCFConstantStringContentsPointer($0) } } if largeIsCocoa { - return withCocoaObject { - stableCocoaUTF8Pointer($0)._unsafelyUnwrappedUnchecked + return unsafe withCocoaObject { + unsafe stableCocoaUTF8Pointer($0)._unsafelyUnwrappedUnchecked } } #endif - return withSharedStorage { $0.start } + return unsafe withSharedStorage { $0.start } } @usableFromInline internal var sharedUTF8: UnsafeBufferPointer { @_effects(releasenone) @inline(never) get { _internalInvariant(largeFastIsShared) - let start = self.getSharedUTF8Start() - return UnsafeBufferPointer(start: start, count: largeCount) + let start = unsafe self.getSharedUTF8Start() + return unsafe UnsafeBufferPointer(start: start, count: largeCount) } } @@ -998,8 +998,8 @@ extension _StringObject { internal var nativeStorage: __StringStorage { #if _pointerBitWidth(_64) _internalInvariant(hasNativeStorage) - let unmanaged = Unmanaged<__StringStorage>.fromOpaque(largeAddress) - return unmanaged.takeUnretainedValue() + let unmanaged = unsafe Unmanaged<__StringStorage>.fromOpaque(largeAddress) + return unsafe unmanaged.takeUnretainedValue() #elseif _pointerBitWidth(_32) || _pointerBitWidth(_16) guard case .native(let storage) = _variant else { _internalInvariantFailure() @@ -1022,8 +1022,8 @@ extension _StringObject { ) -> R { #if _pointerBitWidth(_64) _internalInvariant(hasNativeStorage) - let unmanaged = Unmanaged<__StringStorage>.fromOpaque(largeAddress) - return unmanaged._withUnsafeGuaranteedRef { body($0) } + let unmanaged = unsafe Unmanaged<__StringStorage>.fromOpaque(largeAddress) + return unsafe unmanaged._withUnsafeGuaranteedRef { body($0) } #elseif _pointerBitWidth(_32) || _pointerBitWidth(_16) // FIXME: Do this properly. return body(nativeStorage) @@ -1037,8 +1037,8 @@ extension _StringObject { #if _pointerBitWidth(_64) _internalInvariant(largeFastIsShared && !largeIsCocoa) _internalInvariant(hasSharedStorage) - let unmanaged = Unmanaged<__SharedStringStorage>.fromOpaque(largeAddress) - return unmanaged.takeUnretainedValue() + let unmanaged = unsafe Unmanaged<__SharedStringStorage>.fromOpaque(largeAddress) + return unsafe unmanaged.takeUnretainedValue() #elseif _pointerBitWidth(_32) || _pointerBitWidth(_16) guard case .native(let storage) = _variant else { _internalInvariantFailure() @@ -1060,8 +1060,8 @@ extension _StringObject { #if _pointerBitWidth(_64) _internalInvariant(largeFastIsShared && !largeIsCocoa) _internalInvariant(hasSharedStorage) - let unmanaged = Unmanaged<__SharedStringStorage>.fromOpaque(largeAddress) - return unmanaged._withUnsafeGuaranteedRef { body($0) } + let unmanaged = unsafe Unmanaged<__SharedStringStorage>.fromOpaque(largeAddress) + return unsafe unmanaged._withUnsafeGuaranteedRef { body($0) } #elseif _pointerBitWidth(_32) || _pointerBitWidth(_16) // FIXME: Do this properly. return body(sharedStorage) @@ -1077,8 +1077,8 @@ extension _StringObject { fatalError("unreachable in embedded Swift") #elseif _pointerBitWidth(_64) _internalInvariant(largeIsCocoa && !isImmortal) - let unmanaged = Unmanaged.fromOpaque(largeAddress) - return unmanaged.takeUnretainedValue() + let unmanaged = unsafe Unmanaged.fromOpaque(largeAddress) + return unsafe unmanaged.takeUnretainedValue() #elseif _pointerBitWidth(_32) || _pointerBitWidth(_16) guard case .bridged(let object) = _variant else { _internalInvariantFailure() @@ -1102,8 +1102,8 @@ extension _StringObject { _internalInvariant( (largeIsCocoa && !isImmortal) || largeFastIsConstantCocoa ) - let unmanaged = Unmanaged.fromOpaque(largeAddress) - return unmanaged._withUnsafeGuaranteedRef { body($0) } + let unmanaged = unsafe Unmanaged.fromOpaque(largeAddress) + return unsafe unmanaged._withUnsafeGuaranteedRef { body($0) } #elseif _pointerBitWidth(_32) || _pointerBitWidth(_16) // FIXME: Do this properly. return body(cocoaObject) @@ -1119,8 +1119,8 @@ extension _StringObject { internal var owner: AnyObject? { guard self.isMortal else { return nil } #if !$Embedded - let unmanaged = Unmanaged.fromOpaque(largeAddress) - return unmanaged.takeUnretainedValue() + let unmanaged = unsafe Unmanaged.fromOpaque(largeAddress) + return unsafe unmanaged.takeUnretainedValue() #else fatalError("unreachable in embedded Swift") #endif @@ -1183,9 +1183,9 @@ extension _StringObject { internal var fastUTF8: UnsafeBufferPointer { _internalInvariant(self.isLarge && self.providesFastUTF8) guard _fastPath(self.largeFastIsTailAllocated) else { - return sharedUTF8 + return unsafe sharedUTF8 } - return UnsafeBufferPointer( + return unsafe UnsafeBufferPointer( _uncheckedStart: self.nativeUTF8Start, count: self.largeCount) } @@ -1206,8 +1206,8 @@ extension _StringObject { fatalError("unreachable in embedded Swift") #else _internalInvariant(hasObjCBridgeableObject) - let unmanaged = Unmanaged.fromOpaque(largeAddress) - return unmanaged.takeUnretainedValue() + let unmanaged = unsafe Unmanaged.fromOpaque(largeAddress) + return unsafe unmanaged.takeUnretainedValue() #endif } @@ -1235,7 +1235,7 @@ extension _StringObject { immortalCount: bufPtr.count, isASCII: isASCII) #if _pointerBitWidth(_64) // We bias to align code paths for mortal and immortal strings - let biasedAddress = UInt( + let biasedAddress = unsafe UInt( bitPattern: bufPtr.baseAddress._unsafelyUnwrappedUnchecked ) &- _StringObject.nativeBias @@ -1422,8 +1422,8 @@ extension _StringObject { } if _countAndFlags.isNativelyStored { #if !$Embedded - let unmanaged = Unmanaged.fromOpaque(largeAddress) - let anyObj = unmanaged.takeUnretainedValue() + let unmanaged = unsafe Unmanaged.fromOpaque(largeAddress) + let anyObj = unsafe unmanaged.takeUnretainedValue() _internalInvariant(anyObj is __StringStorage) #endif } @@ -1468,7 +1468,7 @@ extension _StringObject { case ._small: _SmallString(self)._dump() case ._immortal(address: let address): - print(""" + print(unsafe """ Immortal(\ start: \(UnsafeRawPointer(bitPattern: address)!), \ count: \(repr._count)) @@ -1481,8 +1481,8 @@ extension _StringObject { capacity: \(repr._capacity)) """) case ._cocoa(object: let object): - let address: UnsafeRawPointer = Builtin.reinterpretCast(object) - print("Cocoa(address: \(address))") + let address: UnsafeRawPointer = unsafe Builtin.reinterpretCast(object) + print(unsafe "Cocoa(address: \(address))") } #endif // INTERNAL_CHECKS_ENABLED } diff --git a/stdlib/public/core/StringProtocol.swift b/stdlib/public/core/StringProtocol.swift index 196596ff6b521..2661041ff1e35 100644 --- a/stdlib/public/core/StringProtocol.swift +++ b/stdlib/public/core/StringProtocol.swift @@ -210,11 +210,12 @@ extension String { /// Complexity: O(n) if non-contiguous, O(1) if already contiguous /// @_alwaysEmitIntoClient + @safe public mutating func withUTF8( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { makeContiguousUTF8() - return try _guts.withFastUTF8(body) + return try unsafe _guts.withFastUTF8(body) } } @@ -276,10 +277,11 @@ extension Substring { /// Complexity: O(n) if non-contiguous, O(1) if already contiguous /// @_alwaysEmitIntoClient + @safe public mutating func withUTF8( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { makeContiguousUTF8() - return try _wholeGuts.withFastUTF8(range: _offsetRange, body) + return try unsafe _wholeGuts.withFastUTF8(range: _offsetRange, body) } } diff --git a/stdlib/public/core/StringStorage.swift b/stdlib/public/core/StringStorage.swift index e7caa81d7c4e3..c1fda68c202c6 100644 --- a/stdlib/public/core/StringStorage.swift +++ b/stdlib/public/core/StringStorage.swift @@ -211,7 +211,7 @@ fileprivate func _allocate( let allocSize: Int? #if !$Embedded - allocSize = _mallocSize(ofAllocation: UnsafeRawPointer(Builtin.bridgeToRawPointer(object))) + allocSize = unsafe _mallocSize(ofAllocation: UnsafeRawPointer(Builtin.bridgeToRawPointer(object))) #else allocSize = nil #endif @@ -302,7 +302,7 @@ final internal class __StringStorage deinit { if hasBreadcrumbs { - _breadcrumbsAddress.deinitialize(count: 1) + unsafe _breadcrumbsAddress.deinitialize(count: 1) } } } @@ -342,10 +342,10 @@ extension __StringStorage { storage.unusedCapacity == capAndFlags.capacity - countAndFlags.count) if storage.hasBreadcrumbs { - storage._breadcrumbsAddress.initialize(to: nil) + unsafe storage._breadcrumbsAddress.initialize(to: nil) } - storage.terminator.pointee = 0 // nul-terminated + unsafe storage.terminator.pointee = 0 // nul-terminated // We can check layout invariants, but our code units have not yet been // initialized so we can't verify e.g. ASCII-ness @@ -365,9 +365,9 @@ extension __StringStorage { codeUnitCapacity: capacity, countAndFlags: _CountAndFlags(mortalCount: 0, isASCII: false) ) - let buffer = UnsafeMutableBufferPointer(start: storage.mutableStart, + let buffer = unsafe UnsafeMutableBufferPointer(start: storage.mutableStart, count: capacity) - let count = try initializer(buffer) + let count = try unsafe initializer(buffer) let countAndFlags = _CountAndFlags(mortalCount: count, isASCII: false) #if _pointerBitWidth(_64) @@ -379,7 +379,7 @@ extension __StringStorage { #error("Unknown platform") #endif - storage.terminator.pointee = 0 // nul-terminated + unsafe storage.terminator.pointee = 0 // nul-terminated return storage } @@ -394,8 +394,8 @@ extension __StringStorage { _internalInvariant(capacity >= bufPtr.count) let storage = __StringStorage.create( codeUnitCapacity: capacity, countAndFlags: countAndFlags) - let addr = bufPtr.baseAddress._unsafelyUnwrappedUnchecked - storage.mutableStart.initialize(from: addr, count: bufPtr.count) + let addr = unsafe bufPtr.baseAddress._unsafelyUnwrappedUnchecked + unsafe storage.mutableStart.initialize(from: addr, count: bufPtr.count) storage._invariantCheck() return storage } @@ -404,7 +404,7 @@ extension __StringStorage { internal static func create( initializingFrom bufPtr: UnsafeBufferPointer, isASCII: Bool ) -> __StringStorage { - __StringStorage.create( + unsafe __StringStorage.create( initializingFrom: bufPtr, codeUnitCapacity: bufPtr.count, isASCII: isASCII) @@ -417,32 +417,32 @@ extension __StringStorage { @inline(__always) internal var mutableStart: UnsafeMutablePointer { - UnsafeMutablePointer(Builtin.projectTailElems(self, UInt8.self)) + unsafe UnsafeMutablePointer(Builtin.projectTailElems(self, UInt8.self)) } @inline(__always) private var mutableEnd: UnsafeMutablePointer { - mutableStart + count + unsafe mutableStart + count } @inline(__always) internal var start: UnsafePointer { - UnsafePointer(mutableStart) + unsafe UnsafePointer(mutableStart) } @inline(__always) private final var end: UnsafePointer { - UnsafePointer(mutableEnd) + unsafe UnsafePointer(mutableEnd) } // Point to the nul-terminator. @inline(__always) internal final var terminator: UnsafeMutablePointer { - mutableEnd + unsafe mutableEnd } @inline(__always) internal var codeUnits: UnsafeBufferPointer { - UnsafeBufferPointer(start: start, count: count) + unsafe UnsafeBufferPointer(start: start, count: count) } // The address after the last bytes of capacity @@ -450,7 +450,7 @@ extension __StringStorage { // If breadcrumbs are present, this will point to them, otherwise it will // point to the end of the allocation (as far as Swift is concerned). fileprivate var _realCapacityEnd: Builtin.RawPointer { - Builtin.getTailAddr_Word( + unsafe Builtin.getTailAddr_Word( start._rawValue, _capacityAndFlags._realCapacity._builtinWordValue, UInt8.self, @@ -461,7 +461,7 @@ extension __StringStorage { fileprivate var _breadcrumbsAddress: UnsafeMutablePointer<_StringBreadcrumbs?> { _precondition( hasBreadcrumbs, "Internal error: string breadcrumbs not present") - return UnsafeMutablePointer(_realCapacityEnd) + return unsafe UnsafeMutablePointer(_realCapacityEnd) } // The total capacity available for code units. Note that this excludes the @@ -476,7 +476,7 @@ extension __StringStorage { // TODO: Refactoring or removing. Excluding the last byte is awkward. @inline(__always) private var unusedStorage: UnsafeMutableBufferPointer { - UnsafeMutableBufferPointer( + unsafe UnsafeMutableBufferPointer( start: mutableEnd, count: unusedCapacity) } @@ -489,22 +489,22 @@ extension __StringStorage { #else internal func _invariantCheck(initialized: Bool = true) { let rawSelf = UnsafeRawPointer(Builtin.bridgeToRawPointer(self)) - let rawStart = UnsafeRawPointer(start) + let rawStart = unsafe UnsafeRawPointer(start) _internalInvariant(unusedCapacity >= 0) _internalInvariant(count <= capacity) - _internalInvariant(rawSelf + Int(_StringObject.nativeBias) == rawStart) + unsafe _internalInvariant(rawSelf + Int(_StringObject.nativeBias) == rawStart) _internalInvariant( self._capacityAndFlags._realCapacity > self.count, "no room for nul-terminator") - _internalInvariant(self.terminator.pointee == 0, "not nul terminated") + unsafe _internalInvariant(self.terminator.pointee == 0, "not nul terminated") let str = asString _internalInvariant(str._guts._object.isPreferredRepresentation) _countAndFlags._invariantCheck() if isASCII && initialized { - _internalInvariant(_allASCII(self.codeUnits)) + unsafe _internalInvariant(_allASCII(self.codeUnits)) } - if hasBreadcrumbs, let crumbs = _breadcrumbsAddress.pointee { + if hasBreadcrumbs, let crumbs = unsafe _breadcrumbsAddress.pointee { crumbs._invariantCheck(for: self.asString) } _internalInvariant(_countAndFlags.isNativelyStored) @@ -512,7 +512,7 @@ extension __StringStorage { // Check that capacity end matches our notion of unused storage, and also // checks that breadcrumbs were dutifully aligned. - _internalInvariant(UnsafeMutablePointer(_realCapacityEnd) + unsafe _internalInvariant(UnsafeMutablePointer(_realCapacityEnd) == unusedStorage.baseAddress! + (unusedStorage.count + 1)) } #endif // INTERNAL_CHECKS_ENABLED @@ -533,11 +533,11 @@ extension __StringStorage { #else #error("Unknown platform") #endif - self.terminator.pointee = 0 + unsafe self.terminator.pointee = 0 // TODO(String performance): Consider updating breadcrumbs when feasible. if hasBreadcrumbs { - self._breadcrumbsAddress.pointee = nil + unsafe self._breadcrumbsAddress.pointee = nil } _invariantCheck() } @@ -547,10 +547,10 @@ extension __StringStorage { private func _postAppendAdjust( appendedCount: Int, appendedIsASCII isASCII: Bool ) { - let oldTerminator = self.terminator + let oldTerminator = unsafe self.terminator _updateCountAndFlags( newCount: self.count + appendedCount, newIsASCII: self.isASCII && isASCII) - _internalInvariant(oldTerminator + appendedCount == self.terminator) + unsafe _internalInvariant(oldTerminator + appendedCount == self.terminator) } @_effects(releasenone) @@ -558,9 +558,9 @@ extension __StringStorage { _ other: UnsafeBufferPointer, isASCII: Bool ) { _internalInvariant(self.capacity >= other.count) - let srcAddr = other.baseAddress._unsafelyUnwrappedUnchecked + let srcAddr = unsafe other.baseAddress._unsafelyUnwrappedUnchecked let srcCount = other.count - self.mutableEnd.initialize(from: srcAddr, count: srcCount) + unsafe self.mutableEnd.initialize(from: srcAddr, count: srcCount) _postAppendAdjust(appendedCount: srcCount, appendedIsASCII: isASCII) } @@ -571,7 +571,7 @@ extension __StringStorage { var srcCount = 0 while let cu = other.next() { _internalInvariant(self.unusedCapacity >= 1) - unusedStorage[srcCount] = cu + unsafe unusedStorage[srcCount] = cu srcCount += 1 } _postAppendAdjust(appendedCount: srcCount, appendedIsASCII: isASCII) @@ -588,10 +588,10 @@ extension __StringStorage { internal func remove(from lower: Int, to upper: Int) { _internalInvariant(lower <= upper) - let lowerPtr = mutableStart + lower - let upperPtr = mutableStart + upper - let tailCount = mutableEnd - upperPtr - lowerPtr.moveInitialize(from: upperPtr, count: tailCount) + let lowerPtr = unsafe mutableStart + lower + let upperPtr = unsafe mutableStart + upper + let tailCount = unsafe mutableEnd - upperPtr + unsafe lowerPtr.moveInitialize(from: upperPtr, count: tailCount) _updateCountAndFlags( newCount: self.count &- (upper &- lower), newIsASCII: self.isASCII) @@ -604,9 +604,9 @@ extension __StringStorage { src: UnsafeMutablePointer, dst: UnsafeMutablePointer ) -> Int { - _internalInvariant(dst >= mutableStart && src <= mutableEnd) - let tailCount = mutableEnd - src - dst.moveInitialize(from: src, count: tailCount) + unsafe _internalInvariant(dst >= mutableStart && src <= mutableEnd) + let tailCount = unsafe mutableEnd - src + unsafe dst.moveInitialize(from: src, count: tailCount) return tailCount } @@ -619,17 +619,17 @@ extension __StringStorage { _internalInvariant(replCount - (upper - lower) <= unusedCapacity) // Position the tail. - let lowerPtr = mutableStart + lower - let tailCount = _slideTail( + let lowerPtr = unsafe mutableStart + lower + let tailCount = unsafe _slideTail( src: mutableStart + upper, dst: lowerPtr + replCount) // Copy in the contents. - lowerPtr.moveInitialize( + unsafe lowerPtr.moveInitialize( from: UnsafeMutablePointer( mutating: replacement.baseAddress._unsafelyUnwrappedUnchecked), count: replCount) - let isASCII = self.isASCII && _allASCII(replacement) + let isASCII = unsafe self.isASCII && _allASCII(replacement) _updateCountAndFlags(newCount: lower + replCount + tailCount, newIsASCII: isASCII) } @@ -645,8 +645,8 @@ extension __StringStorage { _internalInvariant(replCount - (upper - lower) <= unusedCapacity) // Position the tail. - let lowerPtr = mutableStart + lower - let tailCount = _slideTail( + let lowerPtr = unsafe mutableStart + lower + let tailCount = unsafe _slideTail( src: mutableStart + upper, dst: lowerPtr + replCount) // Copy in the contents. @@ -654,7 +654,7 @@ extension __StringStorage { var srcCount = 0 for cu in replacement { if cu >= 0x80 { isASCII = false } - lowerPtr[srcCount] = cu + unsafe lowerPtr[srcCount] = cu srcCount += 1 } _internalInvariant(srcCount == replCount) @@ -668,6 +668,7 @@ extension __StringStorage { // NOTE: older runtimes called this class _SharedStringStorage. The two // must coexist without conflicting ObjC class names, so it was // renamed. The old name must not be used in the new runtime. +@safe final internal class __SharedStringStorage : __SwiftNativeNSString, _AbstractStringStorage { internal var _owner: AnyObject? @@ -698,7 +699,7 @@ final internal class __SharedStringStorage countAndFlags: _StringObject.CountAndFlags ) { self._owner = nil - self.start = ptr + self.start = unsafe ptr self.immortal = true #if _pointerBitWidth(_64) self._countAndFlags = countAndFlags @@ -727,7 +728,7 @@ final internal class __SharedStringStorage ) { // ptr *must* be the start of an allocation self._owner = nil - self.start = ptr + self.start = unsafe ptr self.immortal = false #if _pointerBitWidth(_64) self._countAndFlags = countAndFlags @@ -743,7 +744,7 @@ final internal class __SharedStringStorage deinit { if (_owner == nil) && !immortal { - start.deallocate() + unsafe start.deallocate() } } } @@ -784,18 +785,18 @@ extension _StringGuts { let mutPtr: UnsafeMutablePointer<_StringBreadcrumbs?> if hasNativeStorage { - mutPtr = _object.withNativeStorage { $0._breadcrumbsAddress } + unsafe mutPtr = unsafe _object.withNativeStorage { unsafe $0._breadcrumbsAddress } } else { - mutPtr = _object.withSharedStorage { - UnsafeMutablePointer(Builtin.addressof(&$0._breadcrumbs)) + unsafe mutPtr = unsafe _object.withSharedStorage { + unsafe UnsafeMutablePointer(Builtin.addressof(&$0._breadcrumbs)) } } - if let breadcrumbs = _stdlib_atomicAcquiringLoadARCRef(object: mutPtr) { - return breadcrumbs + if let breadcrumbs = unsafe _stdlib_atomicAcquiringLoadARCRef(object: mutPtr) { + return unsafe breadcrumbs } let desired = _StringBreadcrumbs(String(self)) - return _stdlib_atomicAcquiringInitializeARCRef( + return unsafe _stdlib_atomicAcquiringInitializeARCRef( object: mutPtr, desired: desired) } } diff --git a/stdlib/public/core/StringStorageBridge.swift b/stdlib/public/core/StringStorageBridge.swift index 26cc5f730f4f1..c94c4800dbe88 100644 --- a/stdlib/public/core/StringStorageBridge.swift +++ b/stdlib/public/core/StringStorageBridge.swift @@ -24,12 +24,12 @@ extension String { let knownOther = _KnownCocoaString(_nativeStorage) switch knownOther { case .storage: - self = _unsafeUncheckedDowncast( + self = unsafe _unsafeUncheckedDowncast( _nativeStorage, to: __StringStorage.self ).asString case .shared: - self = _unsafeUncheckedDowncast( + self = unsafe _unsafeUncheckedDowncast( _nativeStorage, to: __SharedStringStorage.self ).asString @@ -57,7 +57,7 @@ extension _AbstractStringStorage { let range = Range( _uncheckedBounds: (aRange.location, aRange.location+aRange.length)) let str = asString - str._copyUTF16CodeUnits( + unsafe str._copyUTF16CodeUnits( into: UnsafeMutableBufferPointer(start: buffer, count: range.count), range: range) } @@ -71,11 +71,11 @@ extension _AbstractStringStorage { case (_cocoaASCIIEncoding, true), (_cocoaUTF8Encoding, _): guard maxLength >= count + 1 else { return 0 } - outputPtr.initialize(from: start, count: count) - outputPtr[count] = 0 + unsafe outputPtr.initialize(from: start, count: count) + unsafe outputPtr[count] = 0 return 1 default: - return _cocoaGetCStringTrampoline(self, outputPtr, maxLength, encoding) + return unsafe _cocoaGetCStringTrampoline(self, outputPtr, maxLength, encoding) } } @@ -85,9 +85,9 @@ extension _AbstractStringStorage { switch (encoding, isASCII) { case (_cocoaASCIIEncoding, true), (_cocoaUTF8Encoding, _): - return start + return unsafe start default: - return _cocoaCStringUsingEncodingTrampoline(self, encoding) + return unsafe _cocoaCStringUsingEncodingTrampoline(self, encoding) } } @@ -98,7 +98,7 @@ extension _AbstractStringStorage { if count != nativeOther.count { return 0 } - return (start == nativeOther.start || + return unsafe (start == nativeOther.start || (memcmp(start, nativeOther.start, count) == 0)) ? 1 : 0 } @@ -118,10 +118,10 @@ extension _AbstractStringStorage { let knownOther = _KnownCocoaString(other) switch knownOther { case .storage: - return _nativeIsEqual( + return unsafe _nativeIsEqual( _unsafeUncheckedDowncast(other, to: __StringStorage.self)) case .shared: - return _nativeIsEqual( + return unsafe _nativeIsEqual( _unsafeUncheckedDowncast(other, to: __SharedStringStorage.self)) default: // We're allowed to crash, but for compatibility reasons NSCFString allows @@ -135,10 +135,10 @@ extension _AbstractStringStorage { // CFString will only give us ASCII bytes here, but that's fine. // We already handled non-ASCII UTF8 strings earlier since they're Swift. - if let asciiEqual = withCocoaASCIIPointer(other, work: { (ascii) -> Bool in + if let asciiEqual = unsafe withCocoaASCIIPointer(other, work: { (ascii) -> Bool in // UTF16 length == UTF8 length iff ASCII if otherUTF16Length == self.count { - return (start == ascii || (memcmp(start, ascii, self.count) == 0)) + return unsafe (start == ascii || (memcmp(start, ascii, self.count) == 0)) } return false }) { @@ -171,7 +171,7 @@ extension __StringStorage { final internal var hash: UInt { @_effects(readonly) get { if isASCII { - return _cocoaHashASCIIBytes(start, length: count) + return unsafe _cocoaHashASCIIBytes(start, length: count) } return _cocoaHashString(self) } @@ -189,7 +189,7 @@ extension __StringStorage { final internal func getCharacters( _ buffer: UnsafeMutablePointer, range aRange: _SwiftNSRange ) { - _getCharacters(buffer, aRange) + unsafe _getCharacters(buffer, aRange) } @objc(_fastCStringContents:) @@ -198,7 +198,7 @@ extension __StringStorage { _ requiresNulTermination: Int8 ) -> UnsafePointer? { if isASCII { - return start._asCChar + return unsafe start._asCChar } return nil } @@ -206,13 +206,13 @@ extension __StringStorage { @objc(UTF8String) @_effects(readonly) final internal func _utf8String() -> UnsafePointer? { - return start + return unsafe start } @objc(cStringUsingEncoding:) @_effects(readonly) final internal func cString(encoding: UInt) -> UnsafePointer? { - return _cString(encoding: encoding) + return unsafe _cString(encoding: encoding) } @objc(getCString:maxLength:encoding:) @@ -220,7 +220,7 @@ extension __StringStorage { final internal func getCString( _ outputPtr: UnsafeMutablePointer, maxLength: Int, encoding: UInt ) -> Int8 { - return _getCString(outputPtr, maxLength, encoding) + return unsafe _getCString(outputPtr, maxLength, encoding) } @objc @@ -267,7 +267,7 @@ extension __SharedStringStorage { final internal var hash: UInt { @_effects(readonly) get { if isASCII { - return _cocoaHashASCIIBytes(start, length: count) + return unsafe _cocoaHashASCIIBytes(start, length: count) } return _cocoaHashString(self) } @@ -285,7 +285,7 @@ extension __SharedStringStorage { final internal func getCharacters( _ buffer: UnsafeMutablePointer, range aRange: _SwiftNSRange ) { - _getCharacters(buffer, aRange) + unsafe _getCharacters(buffer, aRange) } @objc @@ -304,7 +304,7 @@ extension __SharedStringStorage { _ requiresNulTermination: Int8 ) -> UnsafePointer? { if isASCII { - return start._asCChar + return unsafe start._asCChar } return nil } @@ -318,7 +318,7 @@ extension __SharedStringStorage { @objc(cStringUsingEncoding:) @_effects(readonly) final internal func cString(encoding: UInt) -> UnsafePointer? { - return _cString(encoding: encoding) + return unsafe _cString(encoding: encoding) } @objc(getCString:maxLength:encoding:) @@ -326,7 +326,7 @@ extension __SharedStringStorage { final internal func getCString( _ outputPtr: UnsafeMutablePointer, maxLength: Int, encoding: UInt ) -> Int8 { - return _getCString(outputPtr, maxLength, encoding) + return unsafe _getCString(outputPtr, maxLength, encoding) } @objc(isEqualToString:) diff --git a/stdlib/public/core/StringSwitch.swift b/stdlib/public/core/StringSwitch.swift index c9583efc1871a..49befc52b106d 100644 --- a/stdlib/public/core/StringSwitch.swift +++ b/stdlib/public/core/StringSwitch.swift @@ -23,7 +23,7 @@ func _findStringSwitchCase( string: String) -> Int { for (idx, s) in cases.enumerated() { - if String(_builtinStringLiteral: s.utf8Start._rawValue, + if unsafe String(_builtinStringLiteral: s.utf8Start._rawValue, utf8CodeUnitCount: s._utf8CodeUnitCount, isASCII: s.isASCII._value) == string { return idx @@ -41,6 +41,7 @@ struct _OpaqueStringSwitchCache { internal typealias _StringSwitchCache = Dictionary +@unsafe internal struct _StringSwitchContext { internal let cases: [StaticString] internal let cachePtr: UnsafeMutablePointer<_StringSwitchCache> @@ -49,8 +50,8 @@ internal struct _StringSwitchContext { cases: [StaticString], cachePtr: UnsafeMutablePointer<_StringSwitchCache> ){ - self.cases = cases - self.cachePtr = cachePtr + unsafe self.cases = cases + unsafe self.cachePtr = unsafe cachePtr } } @@ -73,15 +74,15 @@ func _findStringSwitchCaseWithCache( #else let ptr = UnsafeMutableRawPointer(Builtin.addressof(&cache)) #endif - let oncePtr = ptr - let cacheRawPtr = oncePtr + MemoryLayout.stride - let cachePtr = cacheRawPtr.bindMemory(to: _StringSwitchCache.self, capacity: 1) - var context = _StringSwitchContext(cases: cases, cachePtr: cachePtr) - withUnsafeMutablePointer(to: &context) { (context) -> () in + let oncePtr = unsafe ptr + let cacheRawPtr = unsafe oncePtr + MemoryLayout.stride + let cachePtr = unsafe cacheRawPtr.bindMemory(to: _StringSwitchCache.self, capacity: 1) + var context = unsafe _StringSwitchContext(cases: cases, cachePtr: cachePtr) + unsafe withUnsafeMutablePointer(to: &context) { (context) -> () in Builtin.onceWithContext(oncePtr._rawValue, _createStringTableCache, context._rawValue) } - let cache = cachePtr.pointee; + let cache = unsafe cachePtr.pointee; if let idx = cache[string] { return idx } @@ -90,18 +91,18 @@ func _findStringSwitchCaseWithCache( /// Builds the string switch case. internal func _createStringTableCache(_ cacheRawPtr: Builtin.RawPointer) { - let context = UnsafePointer<_StringSwitchContext>(cacheRawPtr).pointee + let context = unsafe UnsafePointer<_StringSwitchContext>(cacheRawPtr).pointee var cache = _StringSwitchCache() - cache.reserveCapacity(context.cases.count) + unsafe cache.reserveCapacity(context.cases.count) _internalInvariant( MemoryLayout<_StringSwitchCache>.size <= MemoryLayout.size) - for (idx, s) in context.cases.enumerated() { - let key = String(_builtinStringLiteral: s.utf8Start._rawValue, + for (idx, s) in unsafe context.cases.enumerated() { + let key = unsafe String(_builtinStringLiteral: s.utf8Start._rawValue, utf8CodeUnitCount: s._utf8CodeUnitCount, isASCII: s.isASCII._value) cache[key] = idx } - context.cachePtr.initialize(to: cache) + unsafe context.cachePtr.initialize(to: cache) } diff --git a/stdlib/public/core/StringTesting.swift b/stdlib/public/core/StringTesting.swift index 552f50a7c0ed0..930e704ce4786 100644 --- a/stdlib/public/core/StringTesting.swift +++ b/stdlib/public/core/StringTesting.swift @@ -72,7 +72,7 @@ extension String { usesScratch: Bool, allocatedMemory: Bool ) { - _guts._deconstructUTF8(scratch: scratch) + unsafe _guts._deconstructUTF8(scratch: scratch) } #endif } @@ -99,7 +99,7 @@ extension _StringGuts { // TODO: shared native _internalInvariant(_object.providesFastUTF8) if _object.isImmortal { - result._form = ._immortal( + result._form = unsafe ._immortal( address: UInt(bitPattern: _object.nativeUTF8Start)) return result } @@ -155,15 +155,14 @@ extension _StringGuts { // If we're small, try to copy into the scratch space provided if self.isSmall { let smol = self.asSmall - if let scratch = scratch, scratch.count > smol.count { - let scratchStart = - scratch.baseAddress! + if let scratch = unsafe scratch, scratch.count > smol.count { + let scratchStart = scratch.baseAddress! smol.withUTF8 { smolUTF8 -> () in - scratchStart.initializeMemory( + unsafe scratchStart.initializeMemory( as: UInt8.self, from: smolUTF8.baseAddress!, count: smolUTF8.count) } - scratch[smol.count] = 0 - return ( + unsafe scratch[smol.count] = 0 + return unsafe ( owner: nil, _convertPointerToPointerArgument(scratchStart), length: smol.count, @@ -171,7 +170,7 @@ extension _StringGuts { } } else if _fastPath(self.isFastUTF8) { let ptr: ToPointer = - _convertPointerToPointerArgument(self._object.fastUTF8.baseAddress!) + unsafe _convertPointerToPointerArgument(self._object.fastUTF8.baseAddress!) return ( owner: self._object.owner, ptr, @@ -179,8 +178,8 @@ extension _StringGuts { usesScratch: false, allocatedMemory: false) } - let (object, ptr, len) = self._allocateForDeconstruct() - return ( + let (object, ptr, len) = unsafe self._allocateForDeconstruct() + return unsafe ( owner: object, _convertPointerToPointerArgument(ptr), length: len, @@ -198,10 +197,10 @@ extension _StringGuts { ) { let utf8 = Array(String(self).utf8) + [0] let (owner, ptr): (AnyObject?, UnsafeRawPointer) = - _convertConstArrayToPointerArgument(utf8) + unsafe _convertConstArrayToPointerArgument(utf8) // Array's owner cannot be nil, even though it is declared optional... - return (owner: owner!, ptr, length: utf8.count - 1) + return unsafe (owner: owner!, ptr, length: utf8.count - 1) } #endif diff --git a/stdlib/public/core/StringUTF16View.swift b/stdlib/public/core/StringUTF16View.swift index fbf5be6d8857b..7ef3949b3f6fe 100644 --- a/stdlib/public/core/StringUTF16View.swift +++ b/stdlib/public/core/StringUTF16View.swift @@ -333,19 +333,19 @@ extension String.UTF16View: BidirectionalCollection { if _slowPath(_guts.isForeign) { let lower = self.index(start, offsetBy: offsets.lowerBound) let upper = _foreignIndex(lower, offsetBy: offsets.count) - return Range(uncheckedBounds: (lower, upper)) + return unsafe Range(uncheckedBounds: (lower, upper)) } if _guts.isASCII { let lower = self.index(start, offsetBy: offsets.lowerBound) let upper = self.index(lower, offsetBy: offsets.count) - return Range(uncheckedBounds: (lower, upper)) + return unsafe Range(uncheckedBounds: (lower, upper)) } if offsets.count < _breadcrumbStride / 2 { let lower = self.index(start, offsetBy: offsets.lowerBound) let upper = _index(lower, offsetBy: offsets.count)._knownUTF8 - return Range(uncheckedBounds: (lower, upper)) + return unsafe Range(uncheckedBounds: (lower, upper)) } let bias = _nativeGetOffset(for: start) @@ -354,7 +354,7 @@ extension String.UTF16View: BidirectionalCollection { ? _index(start, offsetBy: offsets.lowerBound) : _nativeGetIndex(for: bias + offsets.lowerBound)) let upper = _nativeGetIndex(for: bias + offsets.upperBound) - return Range(uncheckedBounds: (lower, upper)) + return unsafe Range(uncheckedBounds: (lower, upper)) } internal func _offsetRange( @@ -377,14 +377,14 @@ extension String.UTF16View: BidirectionalCollection { if _slowPath(_guts.isForeign) { let lowerOffset = _foreignDistance(from: start, to: lower) let distance = _foreignDistance(from: lower, to: upper) - return Range(uncheckedBounds: (lowerOffset, lowerOffset + distance)) + return unsafe Range(uncheckedBounds: (lowerOffset, lowerOffset + distance)) } let utf8Distance = upper._encodedOffset - lower._encodedOffset if _guts.isASCII { let lowerOffset = lower._encodedOffset - start._encodedOffset - return Range(uncheckedBounds: (lowerOffset, lowerOffset + utf8Distance)) + return unsafe Range(uncheckedBounds: (lowerOffset, lowerOffset + utf8Distance)) } if utf8Distance.magnitude <= _breadcrumbStride / 2 { @@ -392,7 +392,7 @@ extension String.UTF16View: BidirectionalCollection { upper = _utf16AlignNativeIndex(upper) let lowerOffset = distance(from: start, to: lower) let distance = _utf16Distance(from: lower, to: upper) - return Range(uncheckedBounds: (lowerOffset, lowerOffset + distance)) + return unsafe Range(uncheckedBounds: (lowerOffset, lowerOffset + distance)) } let bias = _nativeGetOffset(for: start) @@ -402,7 +402,7 @@ extension String.UTF16View: BidirectionalCollection { ? _utf16Distance(from: start, to: lower) : _nativeGetOffset(for: lower) - bias) let upperOffset = _nativeGetOffset(for: upper) - bias - return Range(uncheckedBounds: (lowerOffset, upperOffset)) + return unsafe Range(uncheckedBounds: (lowerOffset, upperOffset)) } /// Accesses the code unit at the given position. @@ -732,14 +732,14 @@ extension String.UTF16View { ) -> Int where U.Scalar == UInt8, S.Scalar == Int8 { var utf16Count = 0 - while readPtr + MemoryLayout.stride < endPtr { + while unsafe readPtr + MemoryLayout.stride < endPtr { //Find the number of continuations (0b10xxxxxx) - let sValue = readPtr.loadUnaligned(as: S.self) + let sValue = unsafe readPtr.loadUnaligned(as: S.self) let continuations = S.zero.replacing(with: S.one, where: sValue .< -65 + 1) //Find the number of 4 byte code points (0b11110xxx) - let uValue = readPtr.loadUnaligned(as: U.self) - let fourBytes = S.zero.replacing( + let uValue = unsafe readPtr.loadUnaligned(as: U.self) + let fourBytes = unsafe S.zero.replacing( with: S.one, where: unsafeBitCast( uValue .>= 0b11110000, @@ -749,7 +749,7 @@ extension String.UTF16View { utf16Count &+= U.scalarCount + Int((fourBytes &- continuations).wrappedSum()) - readPtr += MemoryLayout.stride + unsafe readPtr += MemoryLayout.stride } return utf16Count @@ -759,31 +759,31 @@ extension String.UTF16View { internal func _utf16Distance(from start: Index, to end: Index) -> Int { _internalInvariant(end.transcodedOffset == 0 || end.transcodedOffset == 1) - return (end.transcodedOffset - start.transcodedOffset) + _guts.withFastUTF8( + return unsafe (end.transcodedOffset - start.transcodedOffset) + _guts.withFastUTF8( range: start._encodedOffset ..< end._encodedOffset ) { utf8 in let rawBuffer = UnsafeRawBufferPointer(utf8) guard rawBuffer.count > 0 else { return 0 } var utf16Count = 0 - var readPtr = rawBuffer.baseAddress.unsafelyUnwrapped - let initialReadPtr = readPtr - let endPtr = readPtr + rawBuffer.count + var readPtr = unsafe rawBuffer.baseAddress.unsafelyUnwrapped + let initialReadPtr = unsafe readPtr + let endPtr = unsafe readPtr + rawBuffer.count //eat leading continuations - while readPtr < endPtr { - let byte = readPtr.load(as: UInt8.self) + while unsafe readPtr < endPtr { + let byte = unsafe readPtr.load(as: UInt8.self) if !UTF8.isContinuation(byte) { break } - readPtr += 1 + unsafe readPtr += 1 } #if SWIFT_STDLIB_ENABLE_VECTOR_TYPES // TODO: Currently, using SIMD sizes above SIMD8 is slower // Once that's fixed we should go up to SIMD64 here - utf16Count &+= _utf16Length( + unsafe utf16Count &+= _utf16Length( readPtr: &readPtr, endPtr: endPtr, unsignedSIMDType: SIMD8.self, @@ -794,16 +794,16 @@ extension String.UTF16View { //back up to the start of the current scalar if we may have a trailing //incomplete scalar - if utf16Count > 0 && UTF8.isContinuation(readPtr.load(as: UInt8.self)) { - while readPtr > initialReadPtr && UTF8.isContinuation(readPtr.load(as: UInt8.self)) { - readPtr -= 1 + if unsafe utf16Count > 0 && UTF8.isContinuation(readPtr.load(as: UInt8.self)) { + while unsafe readPtr > initialReadPtr && UTF8.isContinuation(readPtr.load(as: UInt8.self)) { + unsafe readPtr -= 1 } //The trailing scalar may be incomplete, subtract it out and check below - let byte = readPtr.load(as: UInt8.self) + let byte = unsafe readPtr.load(as: UInt8.self) let len = _utf8ScalarLength(byte) utf16Count &-= len == 4 ? 2 : 1 - if readPtr == initialReadPtr { + if unsafe readPtr == initialReadPtr { //if we backed up all the way and didn't hit a non-continuation, then //we don't have any complete scalars, and we should bail. return 0 @@ -812,15 +812,15 @@ extension String.UTF16View { #endif //trailing bytes - while readPtr < endPtr { - let byte = readPtr.load(as: UInt8.self) + while unsafe readPtr < endPtr { + let byte = unsafe readPtr.load(as: UInt8.self) let len = _utf8ScalarLength(byte) // if we don't have enough bytes left, we don't have a complete scalar, // so don't add it to the count. - if readPtr + len <= endPtr { + if unsafe readPtr + len <= endPtr { utf16Count &+= len == 4 ? 2 : 1 } - readPtr += len + unsafe readPtr += len } return utf16Count @@ -852,14 +852,14 @@ extension String.UTF16View { return _utf16Distance(from: startIndex, to: idx) } - let breadcrumbs = _guts.loadUnmanagedBreadcrumbs() + let breadcrumbs = unsafe _guts.loadUnmanagedBreadcrumbs() // Simple and common: endIndex aka `length`. if idx == endIndex { - return breadcrumbs._withUnsafeGuaranteedRef { $0.utf16Length } + return unsafe breadcrumbs._withUnsafeGuaranteedRef { $0.utf16Length } } - return breadcrumbs._withUnsafeGuaranteedRef { crumbs in + return unsafe breadcrumbs._withUnsafeGuaranteedRef { crumbs in // Otherwise, find the nearest lower-bound breadcrumb and count from there // FIXME: Starting from the upper-bound crumb when that is closer would // cut the average cost of the subsequent iteration by 50%. @@ -896,20 +896,20 @@ extension String.UTF16View { } // Simple and common: endIndex aka `length`. - let breadcrumbs = _guts.loadUnmanagedBreadcrumbs() - let utf16Count = breadcrumbs._withUnsafeGuaranteedRef { $0.utf16Length } + let breadcrumbs = unsafe _guts.loadUnmanagedBreadcrumbs() + let utf16Count = unsafe breadcrumbs._withUnsafeGuaranteedRef { $0.utf16Length } if offset == utf16Count { return endIndex } // Otherwise, find the nearest lower-bound breadcrumb and advance that // FIXME: Starting from the upper-bound crumb when that is closer would cut // the average cost of the subsequent iteration by 50%. - let (crumb, remaining) = breadcrumbs._withUnsafeGuaranteedRef { + let (crumb, remaining) = unsafe breadcrumbs._withUnsafeGuaranteedRef { $0.getBreadcrumb(forOffset: offset) } _internalInvariant(crumb._canBeUTF8 && crumb._encodedOffset <= _guts.count) if remaining == 0 { return crumb } - return _guts.withFastUTF8 { utf8 in + return unsafe _guts.withFastUTF8 { utf8 in var readIdx = crumb._encodedOffset let readEnd = utf8.count _internalInvariant(readIdx < readEnd) @@ -926,7 +926,7 @@ extension String.UTF16View { while true { _precondition(readIdx < readEnd, "String index is out of bounds") - let len = _utf8ScalarLength(utf8[_unchecked: readIdx]) + let len = unsafe _utf8ScalarLength(utf8[_unchecked: readIdx]) let utf16Len = len == 4 ? 2 : 1 utf16I &+= utf16Len @@ -965,7 +965,7 @@ extension String.UTF16View { if _slowPath(range.isEmpty) { return } let isASCII = _guts.isASCII - return _guts.withFastUTF8 { utf8 in + return unsafe _guts.withFastUTF8 { utf8 in var writeIdx = 0 let writeEnd = buffer.count var readIdx = range.lowerBound._encodedOffset @@ -975,8 +975,8 @@ extension String.UTF16View { _internalInvariant(range.lowerBound.transcodedOffset == 0) _internalInvariant(range.upperBound.transcodedOffset == 0) while readIdx < readEnd { - _internalInvariant(utf8[readIdx] < 0x80) - buffer[_unchecked: writeIdx] = UInt16( + unsafe _internalInvariant(utf8[readIdx] < 0x80) + unsafe buffer[_unchecked: writeIdx] = unsafe UInt16( truncatingIfNeeded: utf8[_unchecked: readIdx]) readIdx &+= 1 writeIdx &+= 1 @@ -987,24 +987,24 @@ extension String.UTF16View { // Handle mid-transcoded-scalar initial index if _slowPath(range.lowerBound.transcodedOffset != 0) { _internalInvariant(range.lowerBound.transcodedOffset == 1) - let (scalar, len) = _decodeScalar(utf8, startingAt: readIdx) + let (scalar, len) = unsafe _decodeScalar(utf8, startingAt: readIdx) // Note: this is intentionally not using the _unchecked subscript. // (We rely on debug assertions to catch out of bounds access.) - buffer[writeIdx] = scalar.utf16[1] + unsafe buffer[writeIdx] = scalar.utf16[1] readIdx &+= len writeIdx &+= 1 } // Transcode middle while readIdx < readEnd { - let (scalar, len) = _decodeScalar(utf8, startingAt: readIdx) - buffer[writeIdx] = scalar.utf16[0] + let (scalar, len) = unsafe _decodeScalar(utf8, startingAt: readIdx) + unsafe buffer[writeIdx] = scalar.utf16[0] readIdx &+= len writeIdx &+= 1 if _slowPath(scalar.utf16.count == 2) { // Note: this is intentionally not using the _unchecked subscript. // (We rely on debug assertions to catch out of bounds access.) - buffer[writeIdx] = scalar.utf16[1] + unsafe buffer[writeIdx] = scalar.utf16[1] writeIdx &+= 1 } } @@ -1012,12 +1012,12 @@ extension String.UTF16View { // Handle mid-transcoded-scalar final index if _slowPath(range.upperBound.transcodedOffset == 1) { _internalInvariant(writeIdx < writeEnd) - let (scalar, _) = _decodeScalar(utf8, startingAt: readIdx) + let (scalar, _) = unsafe _decodeScalar(utf8, startingAt: readIdx) _internalInvariant(scalar.utf16.count == 2) // Note: this is intentionally not using the _unchecked subscript. // (We rely on debug assertions to catch out of bounds access.) - buffer[writeIdx] = scalar.utf16[0] + unsafe buffer[writeIdx] = scalar.utf16[0] writeIdx &+= 1 } _internalInvariant(writeIdx <= writeEnd) diff --git a/stdlib/public/core/StringUTF8Validation.swift b/stdlib/public/core/StringUTF8Validation.swift index b648edd27ceb4..cd7003e0a0896 100644 --- a/stdlib/public/core/StringUTF8Validation.swift +++ b/stdlib/public/core/StringUTF8Validation.swift @@ -44,15 +44,15 @@ extension UTF8ValidationResult: Equatable {} private struct UTF8ValidationError: Error {} internal func validateUTF8(_ buf: UnsafeBufferPointer) -> UTF8ValidationResult { - if _allASCII(buf) { + if unsafe _allASCII(buf) { return .success(UTF8ExtraInfo(isASCII: true)) } - var iter = buf.makeIterator() + var iter = unsafe buf.makeIterator() var lastValidIndex = buf.startIndex @inline(__always) func guaranteeIn(_ f: (UInt8) -> Bool) throws(UTF8ValidationError) { - guard let cu = iter.next() else { throw UTF8ValidationError() } + guard let cu = unsafe iter.next() else { throw UTF8ValidationError() } guard f(cu) else { throw UTF8ValidationError() } } @inline(__always) func guaranteeContinuation() throws(UTF8ValidationError) { @@ -84,42 +84,42 @@ internal func validateUTF8(_ buf: UnsafeBufferPointer) -> UTF8ValidationR func _legacyNarrowIllegalRange(buf: Slice>) -> Range { var reversePacked: UInt32 = 0 - if let third = buf.dropFirst(2).first { - reversePacked |= UInt32(third) + if let third = unsafe buf.dropFirst(2).first { + unsafe reversePacked |= UInt32(third) reversePacked <<= 8 } - if let second = buf.dropFirst().first { - reversePacked |= UInt32(second) + if let second = unsafe buf.dropFirst().first { + unsafe reversePacked |= UInt32(second) reversePacked <<= 8 } - reversePacked |= UInt32(buf.first!) + unsafe reversePacked |= UInt32(buf.first!) let _buffer: (_storage: UInt32, x: ()) = (reversePacked, ()) let invalids = _legacyInvalidLengthCalculation(_buffer) - return buf.startIndex ..< buf.startIndex + invalids + return unsafe buf.startIndex ..< buf.startIndex + invalids } func findInvalidRange(_ buf: Slice>) -> Range { - var endIndex = buf.startIndex - var iter = buf.makeIterator() - _ = iter.next() - while let cu = iter.next(), UTF8.isContinuation(cu) { - endIndex += 1 + var endIndex = unsafe buf.startIndex + var iter = unsafe buf.makeIterator() + _ = unsafe iter.next() + while let cu = unsafe iter.next(), unsafe UTF8.isContinuation(cu) { + unsafe endIndex += 1 // Unicode's Maximal subpart of an ill-formed subsequence will yield // at most 3 bytes of error. - if buf.distance(from: buf.startIndex, to: endIndex) >= 3 { + if unsafe buf.distance(from: buf.startIndex, to: endIndex) >= 3 { break } } - let illegalRange = Range(buf.startIndex...endIndex) - _internalInvariant(illegalRange.clamped(to: (buf.startIndex..) -> UTF8ValidationR } return .success(UTF8ExtraInfo(isASCII: isASCII)) } catch { - return .error(toBeReplaced: findInvalidRange(buf[lastValidIndex...])) + return unsafe .error(toBeReplaced: findInvalidRange(buf[lastValidIndex...])) } } @@ -192,27 +192,27 @@ internal func repairUTF8(_ input: UnsafeBufferPointer, firstKnownBrokenRa result.reserveCapacity(input.count + 5 * replacementCharacterCount) // extra space for some replacement characters var brokenRange: Range = firstKnownBrokenRange - var remainingInput = input + var remainingInput = unsafe input repeat { _internalInvariant(!brokenRange.isEmpty, "broken range empty") _internalInvariant(!remainingInput.isEmpty, "empty remaining input doesn't need to be repaired") - let goodChunk = remainingInput[.. UTF8.CodeUnit { if _fastPath(_guts.isFastUTF8) { - return _guts.withFastUTF8 { utf8 in utf8[_unchecked: i._encodedOffset] } + return unsafe _guts.withFastUTF8 { utf8 in unsafe utf8[_unchecked: i._encodedOffset] } } return _foreignSubscript(position: i) @@ -278,7 +278,7 @@ extension String { @_effects(readonly) @_semantics("string.getUTF8CString") get { if _fastPath(_guts.isFastUTF8) { - var result = _guts.withFastCChar { ContiguousArray($0) } + var result = unsafe _guts.withFastCChar { ContiguousArray($0) } result.append(0) return result } @@ -422,17 +422,17 @@ extension String.UTF8View { public func _copyContents( initializing buffer: UnsafeMutableBufferPointer ) -> (Iterator, UnsafeMutableBufferPointer.Index) { - guard buffer.baseAddress != nil else { + guard unsafe buffer.baseAddress != nil else { _preconditionFailure( "Attempt to copy string contents into nil buffer pointer") } - guard let written = _guts.copyUTF8(into: buffer) else { + guard let written = unsafe _guts.copyUTF8(into: buffer) else { _preconditionFailure( "Insufficient space allocated to copy string contents") } let it = String().utf8.makeIterator() - return (it, buffer.index(buffer.startIndex, offsetBy: written)) + return (it, unsafe buffer.index(buffer.startIndex, offsetBy: written)) } } @@ -580,6 +580,6 @@ extension String.UTF8View { _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { guard _guts.isFastUTF8 else { return nil } - return try _guts.withFastUTF8(body) + return unsafe try _guts.withFastUTF8(body) } } diff --git a/stdlib/public/core/StringUnicodeScalarView.swift b/stdlib/public/core/StringUnicodeScalarView.swift index e6a7a70e257ce..271cd9b1ece77 100644 --- a/stdlib/public/core/StringUnicodeScalarView.swift +++ b/stdlib/public/core/StringUnicodeScalarView.swift @@ -139,8 +139,8 @@ extension String.UnicodeScalarView: BidirectionalCollection { internal func _uncheckedIndex(before i: Index) -> Index { // TODO(String performance): isASCII fast-path if _fastPath(_guts.isFastUTF8) { - let len = _guts.withFastUTF8 { utf8 in - _utf8ScalarLength(utf8, endingAt: i._encodedOffset) + let len = unsafe _guts.withFastUTF8 { utf8 in + unsafe _utf8ScalarLength(utf8, endingAt: i._encodedOffset) } _internalInvariant(len <= 4, "invalid UTF8") return i.encoded(offsetBy: 0 &- len)._scalarAligned._knownUTF8 diff --git a/stdlib/public/core/StringWordBreaking.swift b/stdlib/public/core/StringWordBreaking.swift index 9ee6aca7a3413..bc98e590e5d76 100644 --- a/stdlib/public/core/StringWordBreaking.swift +++ b/stdlib/public/core/StringWordBreaking.swift @@ -39,7 +39,7 @@ extension _StringGuts { return _foreignNextWordIndex(startingAt: i) } - return withFastUTF8 { utf8 in + return unsafe withFastUTF8 { utf8 in nextWordBoundary(startingAt: i) { _internalInvariant($0 >= 0) @@ -47,7 +47,7 @@ extension _StringGuts { return nil } - let (scalar, len) = _decodeScalar(utf8, startingAt: $0) + let (scalar, len) = unsafe _decodeScalar(utf8, startingAt: $0) return (scalar, $0 &+ len) } } @@ -80,7 +80,7 @@ extension _StringGuts { return _foreignPreviousWordIndex(endingAt: i) } - return withFastUTF8 { utf8 in + return unsafe withFastUTF8 { utf8 in previousWordBoundary(endingAt: i) { _internalInvariant($0 <= count) @@ -88,7 +88,7 @@ extension _StringGuts { return nil } - let (scalar, len) = _decodeScalar(utf8, endingAt: $0) + let (scalar, len) = unsafe _decodeScalar(utf8, endingAt: $0) return (scalar, $0 &- len) } } diff --git a/stdlib/public/core/Substring.swift b/stdlib/public/core/Substring.swift index 1ccdcc2e7b0c2..f3b9271527479 100644 --- a/stdlib/public/core/Substring.swift +++ b/stdlib/public/core/Substring.swift @@ -483,7 +483,7 @@ extension Substring: StringProtocol { /// - Parameter nullTerminatedUTF8: A pointer to a sequence of contiguous, /// UTF-8 encoded bytes ending just before the first zero byte. public init(cString nullTerminatedUTF8: UnsafePointer) { - self.init(String(cString: nullTerminatedUTF8)) + unsafe self.init(String(cString: nullTerminatedUTF8)) } /// Creates a string from the null-terminated sequence of bytes at the given @@ -500,7 +500,7 @@ extension Substring: StringProtocol { decodingCString nullTerminatedCodeUnits: UnsafePointer, as sourceEncoding: Encoding.Type ) { - self.init( + unsafe self.init( String(decodingCString: nullTerminatedCodeUnits, as: sourceEncoding)) } @@ -522,7 +522,7 @@ extension Substring: StringProtocol { _ body: (UnsafePointer) throws -> Result) rethrows -> Result { // TODO(String performance): Detect when we cover the rest of a nul- // terminated String, and thus can avoid a copy. - return try String(self).withCString(body) + return try unsafe String(self).withCString(body) } /// Calls the given closure with a pointer to the contents of the string, @@ -548,7 +548,7 @@ extension Substring: StringProtocol { ) rethrows -> Result { // TODO(String performance): Detect when we cover the rest of a nul- // terminated String, and thus can avoid a copy. - return try String(self).withCString(encodedAs: targetEncoding, body) + return try unsafe String(self).withCString(encodedAs: targetEncoding, body) } } @@ -708,7 +708,7 @@ extension Substring.UTF8View: BidirectionalCollection { public func withContiguousStorageIfAvailable( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { - return try _slice.withContiguousStorageIfAvailable(body) + return try unsafe _slice.withContiguousStorageIfAvailable(body) } @inlinable diff --git a/stdlib/public/core/SwiftNativeNSArray.swift b/stdlib/public/core/SwiftNativeNSArray.swift index 3c4f93b2a0034..ffbc557cda7ce 100644 --- a/stdlib/public/core/SwiftNativeNSArray.swift +++ b/stdlib/public/core/SwiftNativeNSArray.swift @@ -66,37 +66,37 @@ private let NSNotFound: Int = .max // Implement the APIs required by NSArray extension __SwiftNativeNSArrayWithContiguousStorage { @objc internal var count: Int { - return withUnsafeBufferOfObjects { $0.count } + return unsafe withUnsafeBufferOfObjects { $0.count } } @inline(__always) @_effects(readonly) @nonobjc private func _objectAt(_ index: Int) -> Unmanaged { - return withUnsafeBufferOfObjects { + return unsafe withUnsafeBufferOfObjects { objects in _precondition( _isValidArraySubscript(index, count: objects.count), "Array index out of range") - return Unmanaged.passUnretained(objects[index]) + return unsafe Unmanaged.passUnretained(objects[index]) } } @objc(objectAtIndexedSubscript:) @_effects(readonly) dynamic internal func objectAtSubscript(_ index: Int) -> Unmanaged { - return _objectAt(index) + return unsafe _objectAt(index) } @objc(objectAtIndex:) @_effects(readonly) dynamic internal func objectAt(_ index: Int) -> Unmanaged { - return _objectAt(index) + return unsafe _objectAt(index) } @objc internal func getObjects( _ aBuffer: UnsafeMutablePointer, range: _SwiftNSRange ) { - return withUnsafeBufferOfObjects { + return unsafe withUnsafeBufferOfObjects { objects in _precondition( _isValidArrayIndex(range.location, count: objects.count), @@ -112,7 +112,7 @@ extension __SwiftNativeNSArrayWithContiguousStorage { // These objects are "returned" at +0, so treat them as pointer values to // avoid retains. Copy bytes via a raw pointer to circumvent reference // counting while correctly aliasing with all other pointer types. - UnsafeMutableRawPointer(aBuffer).copyMemory( + unsafe UnsafeMutableRawPointer(aBuffer).copyMemory( from: objects.baseAddress! + range.location, byteCount: range.length * MemoryLayout.stride) } @@ -123,19 +123,19 @@ extension __SwiftNativeNSArrayWithContiguousStorage { with state: UnsafeMutablePointer<_SwiftNSFastEnumerationState>, objects: UnsafeMutablePointer?, count: Int ) -> Int { - var enumerationState = state.pointee + var enumerationState = unsafe state.pointee - if enumerationState.state != 0 { + if unsafe enumerationState.state != 0 { return 0 } - return withUnsafeBufferOfObjects { + return unsafe withUnsafeBufferOfObjects { objects in - enumerationState.mutationsPtr = _fastEnumerationStorageMutationsPtr - enumerationState.itemsPtr = - AutoreleasingUnsafeMutablePointer(objects.baseAddress) - enumerationState.state = 1 - state.pointee = enumerationState + unsafe enumerationState.mutationsPtr = _fastEnumerationStorageMutationsPtr + unsafe enumerationState.itemsPtr = + unsafe AutoreleasingUnsafeMutablePointer(objects.baseAddress) + unsafe enumerationState.state = 1 + unsafe state.pointee = enumerationState return objects.count } } @@ -166,20 +166,20 @@ extension __SwiftNativeNSArrayWithContiguousStorage { @_effects(readonly) dynamic internal func objectAtSubscript(_ index: Int) -> Unmanaged { //TODO: exception instead of precondition, once that's possible - return Unmanaged.passUnretained(contents[index]) + return unsafe Unmanaged.passUnretained(contents[index]) } @objc(objectAtIndex:) @_effects(readonly) dynamic internal func objectAt(_ index: Int) -> Unmanaged { //TODO: exception instead of precondition, once that's possible - return Unmanaged.passUnretained(contents[index]) + return unsafe Unmanaged.passUnretained(contents[index]) } @objc internal func getObjects( _ aBuffer: UnsafeMutablePointer, range: _SwiftNSRange ) { - return contents.withContiguousStorageIfAvailable { objects in + return unsafe contents.withContiguousStorageIfAvailable { objects in //TODO: exceptions instead of preconditions, once that's possible _precondition( @@ -196,7 +196,7 @@ extension __SwiftNativeNSArrayWithContiguousStorage { // These objects are "returned" at +0, so treat them as pointer values to // avoid retains. Copy bytes via a raw pointer to circumvent reference // counting while correctly aliasing with all other pointer types. - UnsafeMutableRawPointer(aBuffer).copyMemory( + unsafe UnsafeMutableRawPointer(aBuffer).copyMemory( from: objects.baseAddress! + range.location, byteCount: range.length * MemoryLayout.stride) }! @@ -207,19 +207,19 @@ extension __SwiftNativeNSArrayWithContiguousStorage { with state: UnsafeMutablePointer<_SwiftNSFastEnumerationState>, objects: UnsafeMutablePointer?, count: Int ) -> Int { - var enumerationState = state.pointee + var enumerationState = unsafe state.pointee - if enumerationState.state != 0 { + if unsafe enumerationState.state != 0 { return 0 } - return contents.withContiguousStorageIfAvailable { + return unsafe contents.withContiguousStorageIfAvailable { objects in - enumerationState.mutationsPtr = _fastEnumerationStorageMutationsPtr - enumerationState.itemsPtr = - AutoreleasingUnsafeMutablePointer(objects.baseAddress) - enumerationState.state = 1 - state.pointee = enumerationState + unsafe enumerationState.mutationsPtr = _fastEnumerationStorageMutationsPtr + unsafe enumerationState.itemsPtr = + unsafe AutoreleasingUnsafeMutablePointer(objects.baseAddress) + unsafe enumerationState.state = 1 + unsafe state.pointee = enumerationState return objects.count }! } @@ -269,13 +269,13 @@ extension __SwiftNativeNSArrayWithContiguousStorage { with objects: UnsafePointer, count: Int) { let range = range.location ..< range.location + range.length - let buf = UnsafeBufferPointer(start: objects, count: count) + let buf = unsafe UnsafeBufferPointer(start: objects, count: count) if range == contents.startIndex.., count: Int, at index: Int) { - let buf = UnsafeBufferPointer(start: objects, count: count) - contents.insert(contentsOf: buf, at: index) + let buf = unsafe UnsafeBufferPointer(start: objects, count: count) + unsafe contents.insert(contentsOf: buf, at: index) } @objc(indexOfObjectIdenticalTo:) @@ -347,14 +347,14 @@ extension __SwiftNativeNSArrayWithContiguousStorage { @nonobjc internal final var _heapBufferBridgedPtr: UnsafeMutablePointer { - return _getUnsafePointerToStoredProperties(self).assumingMemoryBound( + return unsafe _getUnsafePointerToStoredProperties(self).assumingMemoryBound( to: Optional.self) } internal final var _heapBufferBridged: __BridgingBufferStorage? { if let ref = - _stdlib_atomicLoadARCRef(object: _heapBufferBridgedPtr) { - return unsafeBitCast(ref, to: __BridgingBufferStorage.self) + unsafe _stdlib_atomicLoadARCRef(object: _heapBufferBridgedPtr) { + return unsafe unsafeBitCast(ref, to: __BridgingBufferStorage.self) } return nil } @@ -370,7 +370,7 @@ extension __SwiftNativeNSArrayWithContiguousStorage { withExtendedLifetime(bridgedStorage) { let buffer = _BridgingBuffer(bridgedStorage) let count = buffer.count - buffer.baseAddress.deinitialize(count: count) + unsafe buffer.baseAddress.deinitialize(count: count) } } } @@ -388,34 +388,34 @@ extension __SwiftNativeNSArrayWithContiguousStorage { // If we've already got a buffer of bridged objects, just use it if let bridgedStorage = _heapBufferBridged { let bridgingBuffer = _BridgingBuffer(bridgedStorage) - buffer = UnsafeBufferPointer( + unsafe buffer = unsafe UnsafeBufferPointer( start: bridgingBuffer.baseAddress, count: bridgingBuffer.count) } // If elements are bridged verbatim, the native buffer is all we // need, so return that. - else if let buf = _nativeStorage._withVerbatimBridgedUnsafeBuffer( - { $0 } + else if let buf = unsafe _nativeStorage._withVerbatimBridgedUnsafeBuffer( + { unsafe $0 } ) { - buffer = buf + unsafe buffer = unsafe buf } else { // Create buffer of bridged objects. let objects = _nativeStorage._getNonVerbatimBridgingBuffer() // Atomically store a reference to that buffer in self. - if !_stdlib_atomicInitializeARCRef( + if unsafe !_stdlib_atomicInitializeARCRef( object: _heapBufferBridgedPtr, desired: objects.storage!) { // Another thread won the race. Throw out our buffer. - _destroyBridgedStorage( + unsafe _destroyBridgedStorage( unsafeDowncast(objects.storage!, to: __BridgingBufferStorage.self)) } continue // Try again } defer { _fixLifetime(self) } - return try body(buffer) + return try unsafe body(buffer) } } @@ -447,7 +447,7 @@ internal final class __SwiftDeferredStaticNSArray // If we've already got a buffer of bridged objects, just use it if let bridgedStorage = _heapBufferBridged { let bridgingBuffer = _BridgingBuffer(bridgedStorage) - buffer = UnsafeBufferPointer( + unsafe buffer = unsafe UnsafeBufferPointer( start: bridgingBuffer.baseAddress, count: bridgingBuffer.count) } else { @@ -458,18 +458,18 @@ internal final class __SwiftDeferredStaticNSArray let objects = getNonVerbatimBridgingBuffer() // Atomically store a reference to that buffer in self. - if !_stdlib_atomicInitializeARCRef( + if unsafe !_stdlib_atomicInitializeARCRef( object: _heapBufferBridgedPtr, desired: objects.storage!) { // Another thread won the race. Throw out our buffer. - _destroyBridgedStorage( + unsafe _destroyBridgedStorage( unsafeDowncast(objects.storage!, to: __BridgingBufferStorage.self)) } continue // Try again } defer { _fixLifetime(self) } - return try body(buffer) + return try unsafe body(buffer) } } @@ -479,10 +479,10 @@ internal final class __SwiftDeferredStaticNSArray "Verbatim bridging should be handled separately") let count = _nativeStorage.countAndCapacity.count let result = _BridgingBuffer(count) - let resultPtr = result.baseAddress - let p = UnsafeMutablePointer(Builtin.projectTailElems(_nativeStorage, Element.self)) + let resultPtr = unsafe result.baseAddress + let p = unsafe UnsafeMutablePointer(Builtin.projectTailElems(_nativeStorage, Element.self)) for i in 0..( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R { - if let result = try _withVerbatimBridgedUnsafeBuffer(body) { + if let result = try unsafe _withVerbatimBridgedUnsafeBuffer(body) { return result } _internalInvariantFailure( diff --git a/stdlib/public/core/TemporaryAllocation.swift b/stdlib/public/core/TemporaryAllocation.swift index 7ebbcf97b61b2..fa2cf2df2d217 100644 --- a/stdlib/public/core/TemporaryAllocation.swift +++ b/stdlib/public/core/TemporaryAllocation.swift @@ -220,7 +220,7 @@ internal func _fallBackToHeapAllocation( alignment: alignment ) defer { - buffer.deallocate() + unsafe buffer.deallocate() } return try body(buffer._rawValue) } @@ -269,11 +269,11 @@ public func withUnsafeTemporaryAllocation( capacity: byteCount, alignment: alignment ) { pointer in - let buffer = UnsafeMutableRawBufferPointer( + let buffer = unsafe UnsafeMutableRawBufferPointer( start: .init(pointer), count: byteCount ) - return try body(buffer) + return try unsafe body(buffer) } } @@ -293,11 +293,11 @@ public func _withUnprotectedUnsafeTemporaryAllocation( capacity: byteCount, alignment: alignment ) { pointer in - let buffer = UnsafeMutableRawBufferPointer( + let buffer = unsafe UnsafeMutableRawBufferPointer( start: .init(pointer), count: byteCount ) - return try body(buffer) + return try unsafe body(buffer) } } @@ -346,11 +346,11 @@ public func withUnsafeTemporaryAllocation< alignment: MemoryLayout.alignment ) { pointer in Builtin.bindMemory(pointer, capacity._builtinWordValue, type) - let buffer = UnsafeMutableBufferPointer( + let buffer = unsafe UnsafeMutableBufferPointer( start: .init(pointer), count: capacity ) - return try body(buffer) + return try unsafe body(buffer) } } @@ -373,10 +373,10 @@ public func _withUnprotectedUnsafeTemporaryAllocation< alignment: MemoryLayout.alignment ) { pointer in Builtin.bindMemory(pointer, capacity._builtinWordValue, type) - let buffer = UnsafeMutableBufferPointer( + let buffer = unsafe UnsafeMutableBufferPointer( start: .init(pointer), count: capacity ) - return try body(buffer) + return try unsafe body(buffer) } } diff --git a/stdlib/public/core/ThreadLocalStorage.swift b/stdlib/public/core/ThreadLocalStorage.swift index 9b6bb008734f1..189365a8b7814 100644 --- a/stdlib/public/core/ThreadLocalStorage.swift +++ b/stdlib/public/core/ThreadLocalStorage.swift @@ -20,18 +20,18 @@ internal class _TLSAtomicInt { internal init() { self.value = 0 } internal var valuePtr: UnsafeMutablePointer { - return _getUnsafePointerToStoredProperties(self).assumingMemoryBound( + return unsafe _getUnsafePointerToStoredProperties(self).assumingMemoryBound( to: Int.self) } internal func increment() { - _ = _swift_stdlib_atomicFetchAddInt( + _ = unsafe _swift_stdlib_atomicFetchAddInt( object: valuePtr, operand: 1) } internal func load() -> Int { - return _swift_stdlib_atomicLoadInt(object: valuePtr) + return unsafe _swift_stdlib_atomicLoadInt(object: valuePtr) } } @@ -59,7 +59,7 @@ internal struct _ThreadLocalStorage { internal static func getPointer() -> UnsafeMutablePointer<_ThreadLocalStorage> { - return _swift_stdlib_threadLocalStorageGet().assumingMemoryBound( + return unsafe _swift_stdlib_threadLocalStorageGet().assumingMemoryBound( to: _ThreadLocalStorage.self) } } @@ -68,11 +68,11 @@ internal struct _ThreadLocalStorage { // owned. @_silgen_name("_stdlib_destroyTLS") internal func _destroyTLS(_ ptr: UnsafeMutableRawPointer?) { - _internalInvariant(ptr != nil, + unsafe _internalInvariant(ptr != nil, "_destroyTLS was called, but with nil...") - let tlsPtr = ptr!.assumingMemoryBound(to: _ThreadLocalStorage.self) - tlsPtr.deinitialize(count: 1) - tlsPtr.deallocate() + let tlsPtr = unsafe ptr!.assumingMemoryBound(to: _ThreadLocalStorage.self) + unsafe tlsPtr.deinitialize(count: 1) + unsafe tlsPtr.deallocate() #if INTERNAL_CHECKS_ENABLED // Log the fact we've destroyed our storage @@ -88,7 +88,7 @@ internal func _createThreadLocalStorage() = UnsafeMutablePointer<_ThreadLocalStorage>.allocate( capacity: 1 ) - tlsPtr.initialize(to: _ThreadLocalStorage()) + unsafe tlsPtr.initialize(to: _ThreadLocalStorage()) - return tlsPtr + return unsafe tlsPtr } diff --git a/stdlib/public/core/UInt128.swift b/stdlib/public/core/UInt128.swift index 6ff263d7fb02f..3e222daf60b05 100644 --- a/stdlib/public/core/UInt128.swift +++ b/stdlib/public/core/UInt128.swift @@ -45,7 +45,7 @@ public struct UInt128: Sendable { @_transparent public init(_low: UInt64, _high: UInt64) { #if _endian(little) - self = unsafeBitCast((_low, _high), to: Self.self) + self = unsafe unsafeBitCast((_low, _high), to: Self.self) #else self = unsafeBitCast((_high, _low), to: Self.self) #endif @@ -532,8 +532,8 @@ extension UInt128.Words: RandomAccessCollection { #else let index = count - 1 - position #endif - return _withUnprotectedUnsafePointer(to: &value) { - $0.withMemoryRebound(to: UInt.self, capacity: count) { $0[index] } + return unsafe _withUnprotectedUnsafePointer(to: &value) { + unsafe $0.withMemoryRebound(to: UInt.self, capacity: count) { unsafe $0[index] } } } } diff --git a/stdlib/public/core/UTF16.swift b/stdlib/public/core/UTF16.swift index c2f263f82e961..d14575282c1d1 100644 --- a/stdlib/public/core/UTF16.swift +++ b/stdlib/public/core/UTF16.swift @@ -161,15 +161,15 @@ extension Unicode.UTF16 { count: Int ) { if MemoryLayout.stride == MemoryLayout.stride { - _memcpy( + unsafe _memcpy( dest: UnsafeMutablePointer(destination), src: UnsafeMutablePointer(source), size: UInt(count) * UInt(MemoryLayout.stride)) } else { for i in 0.. ) -> Int { - return Int(_swift_stdlib_strlen_unsigned(input)) + return unsafe Int(_swift_stdlib_strlen_unsigned(input)) } // Support parsing C strings as-if they are UTF8 strings. @inlinable public static func _nullCodeUnitOffset( in input: UnsafePointer ) -> Int { - return Int(_swift_stdlib_strlen(input)) + return unsafe Int(_swift_stdlib_strlen(input)) } } @@ -649,7 +649,7 @@ extension UnicodeCodec { in input: UnsafePointer ) -> Int { var length = 0 - while input[length] != 0 { + while unsafe input[length] != 0 { length += 1 } return length diff --git a/stdlib/public/core/UnicodeData.swift b/stdlib/public/core/UnicodeData.swift index 7973a1c828471..58243bb75748c 100644 --- a/stdlib/public/core/UnicodeData.swift +++ b/stdlib/public/core/UnicodeData.swift @@ -174,12 +174,12 @@ extension Unicode { // A buffer pointer to the UTF8 decomposition string. var utf8: UnsafeBufferPointer { - let decompPtr = _swift_stdlib_nfd_decompositions._unsafelyUnwrappedUnchecked + let decompPtr = unsafe _swift_stdlib_nfd_decompositions._unsafelyUnwrappedUnchecked // This size is the utf8 length of the decomposition. - let size = Int(truncatingIfNeeded: decompPtr[index]) + let size = unsafe Int(truncatingIfNeeded: decompPtr[index]) - return UnsafeBufferPointer( + return unsafe UnsafeBufferPointer( // We add 1 here to skip the size byte. start: decompPtr + index + 1, count: size diff --git a/stdlib/public/core/UnicodeHelpers.swift b/stdlib/public/core/UnicodeHelpers.swift index 39ce3667f6c67..79f8af7c7f83a 100644 --- a/stdlib/public/core/UnicodeHelpers.swift +++ b/stdlib/public/core/UnicodeHelpers.swift @@ -65,15 +65,15 @@ internal func _decodeUTF8( internal func _decodeScalar( _ utf8: UnsafeBufferPointer, startingAt i: Int ) -> (Unicode.Scalar, scalarLength: Int) { - let cu0 = utf8[_unchecked: i] + let cu0 = unsafe utf8[_unchecked: i] let len = _utf8ScalarLength(cu0) switch len { case 1: return (_decodeUTF8(cu0), len) - case 2: return (_decodeUTF8(cu0, utf8[_unchecked: i &+ 1]), len) - case 3: return (_decodeUTF8( + case 2: return unsafe (_decodeUTF8(cu0, utf8[_unchecked: i &+ 1]), len) + case 3: return unsafe (_decodeUTF8( cu0, utf8[_unchecked: i &+ 1], utf8[_unchecked: i &+ 2]), len) case 4: - return (_decodeUTF8( + return unsafe (_decodeUTF8( cu0, utf8[_unchecked: i &+ 1], utf8[_unchecked: i &+ 2], @@ -87,8 +87,8 @@ internal func _decodeScalar( internal func _decodeScalar( _ utf8: UnsafeBufferPointer, endingAt i: Int ) -> (Unicode.Scalar, scalarLength: Int) { - let len = _utf8ScalarLength(utf8, endingAt: i) - let (scalar, scalarLen) = _decodeScalar(utf8, startingAt: i &- len) + let len = unsafe _utf8ScalarLength(utf8, endingAt: i) + let (scalar, scalarLen) = unsafe _decodeScalar(utf8, startingAt: i &- len) _internalInvariant(len == scalarLen) return (scalar, len) } @@ -106,10 +106,10 @@ internal func _utf8ScalarLength( _ utf8: UnsafeBufferPointer, endingAt i: Int ) -> Int { var len = 1 - while UTF8.isContinuation(utf8[_unchecked: i &- len]) { + while unsafe UTF8.isContinuation(utf8[_unchecked: i &- len]) { len &+= 1 } - _internalInvariant(len == _utf8ScalarLength(utf8[i &- len])) + unsafe _internalInvariant(len == _utf8ScalarLength(utf8[i &- len])) return len } @@ -126,7 +126,7 @@ internal func _scalarAlign( guard _fastPath(idx != utf8.count) else { return idx } var i = idx - while _slowPath(UTF8.isContinuation(utf8[_unchecked: i])) { + while unsafe _slowPath(UTF8.isContinuation(utf8[_unchecked: i])) { i &-= 1 _internalInvariant(i >= 0, "Malformed contents: starts with continuation byte") @@ -178,15 +178,15 @@ extension _StringGuts { return foreignIdx } - return String.Index(_encodedOffset: - self.withFastUTF8 { _scalarAlign($0, idx._encodedOffset) } + return unsafe String.Index(_encodedOffset: + self.withFastUTF8 { unsafe _scalarAlign($0, idx._encodedOffset) } ) } @inlinable internal func fastUTF8ScalarLength(startingAt i: Int) -> Int { _internalInvariant(isFastUTF8) - let len = _utf8ScalarLength(self.withFastUTF8 { $0[_unchecked: i] }) + let len = unsafe _utf8ScalarLength(self.withFastUTF8 { unsafe $0[_unchecked: i] }) _internalInvariant((1...4) ~= len) return len } @@ -195,10 +195,10 @@ extension _StringGuts { internal func fastUTF8ScalarLength(endingAt i: Int) -> Int { _internalInvariant(isFastUTF8) - return self.withFastUTF8 { utf8 in - _internalInvariant(i == utf8.count || !UTF8.isContinuation(utf8[i])) + return unsafe self.withFastUTF8 { utf8 in + unsafe _internalInvariant(i == utf8.count || !UTF8.isContinuation(utf8[i])) var len = 1 - while UTF8.isContinuation(utf8[i &- len]) { + while unsafe UTF8.isContinuation(utf8[i &- len]) { _internalInvariant(i &- len > 0) len += 1 } @@ -210,7 +210,7 @@ extension _StringGuts { @inlinable internal func fastUTF8Scalar(startingAt i: Int) -> Unicode.Scalar { _internalInvariant(isFastUTF8) - return self.withFastUTF8 { _decodeScalar($0, startingAt: i).0 } + return unsafe self.withFastUTF8 { unsafe _decodeScalar($0, startingAt: i).0 } } @_alwaysEmitIntoClient @@ -230,8 +230,8 @@ extension _StringGuts { if i == self.startIndex || i == self.endIndex { return true } if _fastPath(isFastUTF8) { - return self.withFastUTF8 { - return !UTF8.isContinuation($0[_unchecked: i._encodedOffset]) + return unsafe self.withFastUTF8 { + return unsafe !UTF8.isContinuation($0[_unchecked: i._encodedOffset]) } } @@ -383,17 +383,17 @@ extension _StringGuts { ).0)) } - return withUnsafeTemporaryAllocation( + return unsafe withUnsafeTemporaryAllocation( of: UInt16.self, capacity: count ) { buffer in self._object.withCocoaObject { - _cocoaStringCopyCharacters( + unsafe _cocoaStringCopyCharacters( from: $0, range: start.. (Unicode.Scalar, scalarLength: Int) { if _fastPath(isFastUTF8) { - return withFastUTF8 { _decodeScalar($0, startingAt: i) } + return unsafe withFastUTF8 { unsafe _decodeScalar($0, startingAt: i) } } return foreignErrorCorrectedScalar( startingAt: String.Index(_encodedOffset: i)) @@ -420,8 +420,8 @@ extension _StringGuts { startingAt start: Int, endingAt end: Int ) -> Character { if _fastPath(isFastUTF8) { - return withFastUTF8(range: start.. UTF8.CodeUnit { _precondition(position >= startIndex && position < endIndex, "Unicode.Scalar.UTF8View index is out of bounds") - return value.withUTF8CodeUnits { $0[position] } + return value.withUTF8CodeUnits { unsafe $0[position] } } } @@ -502,6 +502,7 @@ extension Unicode.Scalar { // Access the underlying code units extension Unicode.Scalar { // Access the scalar as encoded in UTF-16 + @safe internal func withUTF16CodeUnits( _ body: (UnsafeBufferPointer) throws -> Result ) rethrows -> Result { @@ -511,15 +512,16 @@ extension Unicode.Scalar { _internalInvariant(utf16Count == 2) codeUnits.1 = self.utf16[1] } - return try Swift.withUnsafePointer(to: &codeUnits) { - return try $0.withMemoryRebound(to: UInt16.self, capacity: 2) { - return try body(UnsafeBufferPointer(start: $0, count: utf16Count)) + return try unsafe Swift.withUnsafePointer(to: &codeUnits) { + return try unsafe $0.withMemoryRebound(to: UInt16.self, capacity: 2) { + return try unsafe body(UnsafeBufferPointer(start: $0, count: utf16Count)) } } } // Access the scalar as encoded in UTF-8 @inlinable + @safe internal func withUTF8CodeUnits( _ body: (UnsafeBufferPointer) throws -> Result ) rethrows -> Result { @@ -528,9 +530,9 @@ extension Unicode.Scalar { // The first code unit is in the least significant byte of codeUnits. codeUnits = codeUnits.littleEndian - return try Swift._withUnprotectedUnsafePointer(to: &codeUnits) { - return try $0.withMemoryRebound(to: UInt8.self, capacity: 4) { - return try body(UnsafeBufferPointer(start: $0, count: utf8Count)) + return try unsafe Swift._withUnprotectedUnsafePointer(to: &codeUnits) { + return try unsafe $0.withMemoryRebound(to: UInt8.self, capacity: 4) { + return try unsafe body(UnsafeBufferPointer(start: $0, count: utf8Count)) } } } diff --git a/stdlib/public/core/UnicodeScalarProperties.swift b/stdlib/public/core/UnicodeScalarProperties.swift index 81cfc4488b2d4..8f3add43aabf1 100644 --- a/stdlib/public/core/UnicodeScalarProperties.swift +++ b/stdlib/public/core/UnicodeScalarProperties.swift @@ -756,19 +756,19 @@ extension Unicode.Scalar.Properties { // more than 1 scalar. var specialMappingLength = 0 - let specialMappingPtr = _swift_stdlib_getSpecialMapping( + let specialMappingPtr = unsafe _swift_stdlib_getSpecialMapping( _scalar.value, mapping.rawValue, &specialMappingLength ) - if let specialMapping = specialMappingPtr, specialMappingLength != 0 { - let buffer = UnsafeBufferPointer( + if let specialMapping = unsafe specialMappingPtr, specialMappingLength != 0 { + let buffer = unsafe UnsafeBufferPointer( start: specialMapping, count: specialMappingLength ) - return String._uncheckedFromUTF8(buffer, isASCII: false) + return unsafe String._uncheckedFromUTF8(buffer, isASCII: false) } // If we did not have a special mapping, check if we have a direct scalar @@ -1252,8 +1252,8 @@ extension Unicode.Scalar.Properties { // The longest name that Unicode defines is 88 characters long. let largestCount = Int(SWIFT_STDLIB_LARGEST_NAME_COUNT) - let name = String(_uninitializedCapacity: largestCount) { buffer in - _swift_stdlib_getScalarName( + let name = unsafe String(_uninitializedCapacity: largestCount) { buffer in + unsafe _swift_stdlib_getScalarName( _scalar.value, buffer.baseAddress, buffer.count @@ -1277,11 +1277,11 @@ extension Unicode.Scalar.Properties { /// This property corresponds to the "Name_Alias" property in the /// [Unicode Standard](http://www.unicode.org/versions/latest/). public var nameAlias: String? { - guard let nameAliasPtr = _swift_stdlib_getNameAlias(_scalar.value) else { + guard let nameAliasPtr = unsafe _swift_stdlib_getNameAlias(_scalar.value) else { return nil } - return String(cString: nameAliasPtr) + return unsafe String(cString: nameAliasPtr) } } diff --git a/stdlib/public/core/Unmanaged.swift b/stdlib/public/core/Unmanaged.swift index 58b6d6af7fc9c..a6d3e198dceb5 100644 --- a/stdlib/public/core/Unmanaged.swift +++ b/stdlib/public/core/Unmanaged.swift @@ -21,7 +21,7 @@ public struct Unmanaged { internal unowned(unsafe) var _value: Instance @usableFromInline @_transparent - internal init(_private: Instance) { _value = _private } + internal init(_private: Instance) { unsafe _value = _private } /// Unsafely turns an opaque C pointer into an unmanaged class reference. /// @@ -43,7 +43,7 @@ public struct Unmanaged { // retain/releases which must be avoided for the opaque pointer functions. // 'Unmanaged' is layout compatible with 'UnsafeRawPointer' and // casting to that will not attempt to retain the reference held at 'value'. - unsafeBitCast(value, to: Unmanaged.self) + unsafe unsafeBitCast(value, to: Unmanaged.self) } /// Unsafely converts an unmanaged class reference to a pointer. @@ -63,7 +63,7 @@ public struct Unmanaged { // try to retain/release. Use 'self' to avoid this. 'Unmanaged' is // layout compatible with 'UnsafeRawPointer' and casting from that will not // attempt to retain the reference held at '_value'. - unsafeBitCast(self, to: UnsafeMutableRawPointer.self) + unsafe unsafeBitCast(self, to: UnsafeMutableRawPointer.self) } /// Creates an unmanaged reference with an unbalanced retain. @@ -81,7 +81,7 @@ public struct Unmanaged { public static func passRetained(_ value: Instance) -> Unmanaged { // Retain 'value' before it becomes unmanaged. This may be its last use. Builtin.retain(value) - return Unmanaged(_private: value) + return unsafe Unmanaged(_private: value) } /// Creates an unmanaged reference without performing an unbalanced @@ -98,7 +98,7 @@ public struct Unmanaged { /// - Returns: An unmanaged reference to the object passed as `value`. @_transparent public static func passUnretained(_ value: Instance) -> Unmanaged { - return Unmanaged(_private: value) + return unsafe Unmanaged(_private: value) } /// Gets the value of this unmanaged reference as a managed @@ -110,7 +110,7 @@ public struct Unmanaged { /// - Returns: The object referenced by this `Unmanaged` instance. @_transparent // unsafe-performance public func takeUnretainedValue() -> Instance { - return _value + return unsafe _value } /// Gets the value of this unmanaged reference as a managed @@ -122,8 +122,8 @@ public struct Unmanaged { /// - Returns: The object referenced by this `Unmanaged` instance. @_transparent // unsafe-performance public func takeRetainedValue() -> Instance { - let result = _value - release() + let result = unsafe _value + unsafe release() return result } @@ -224,36 +224,36 @@ public struct Unmanaged { public func _withUnsafeGuaranteedRef( _ body: (Instance) throws -> Result ) rethrows -> Result { - var tmp = self + var tmp = unsafe self // Builtin.convertUnownedUnsafeToGuaranteed expects to have a base value // that the +0 value depends on. In this case, we are assuming that is done // for us opaquely already. So, the builtin will emit a mark_dependence on a // trivial object. The optimizer knows to eliminate that so we do not have // any overhead from this. let fakeBase: Int? = nil - return try body(Builtin.convertUnownedUnsafeToGuaranteed(fakeBase, + return try unsafe body(Builtin.convertUnownedUnsafeToGuaranteed(fakeBase, &tmp._value)) } /// Performs an unbalanced retain of the object. @_transparent public func retain() -> Unmanaged { - Builtin.retain(_value) - return self + unsafe Builtin.retain(_value) + return unsafe self } /// Performs an unbalanced release of the object. @_transparent public func release() { - Builtin.release(_value) + unsafe Builtin.release(_value) } #if _runtime(_ObjC) /// Performs an unbalanced autorelease of the object. @_transparent public func autorelease() -> Unmanaged { - Builtin.autorelease(_value) - return self + unsafe Builtin.autorelease(_value) + return unsafe self } #endif } diff --git a/stdlib/public/core/UnsafeBufferPointer.swift.gyb b/stdlib/public/core/UnsafeBufferPointer.swift.gyb index 43c4ada627a03..8ae405b8e2fc8 100644 --- a/stdlib/public/core/UnsafeBufferPointer.swift.gyb +++ b/stdlib/public/core/UnsafeBufferPointer.swift.gyb @@ -35,6 +35,7 @@ public struct Unsafe${Mutable}BufferPointer: Copyable { @usableFromInline @_preInverseGenerics + @safe let _position: Unsafe${Mutable}Pointer? /// The number of elements in the buffer. @@ -42,6 +43,7 @@ public struct Unsafe${Mutable}BufferPointer: Copyable { /// If the `baseAddress` of this buffer is `nil`, the count is zero. However, /// a buffer can have a `count` of zero even with a non-`nil` base address. @_preInverseGenerics + @safe public let count: Int // This works around _debugPrecondition() impacting the performance of @@ -51,7 +53,7 @@ public struct Unsafe${Mutable}BufferPointer: Copyable { @_nonEphemeral _uncheckedStart start: Unsafe${Mutable}Pointer?, count: Int ) { - _position = start + _position = unsafe start self.count = count } @@ -72,14 +74,15 @@ public struct Unsafe${Mutable}BufferPointer: Copyable { ) { _debugPrecondition( count >= 0, "Unsafe${Mutable}BufferPointer with negative count") - _debugPrecondition( + unsafe _debugPrecondition( count == 0 || start != nil, "Unsafe${Mutable}BufferPointer has a nil start and nonzero count") - self.init(_uncheckedStart: start, count: _assumeNonNegative(count)) + unsafe self.init(_uncheckedStart: start, count: _assumeNonNegative(count)) } @inlinable // unsafe-performance @_preInverseGenerics + @safe public init(_empty: ()) { _position = nil count = 0 @@ -93,7 +96,7 @@ public struct Unsafe${Mutable}BufferPointer: Copyable { @inlinable // unsafe-performance @_preInverseGenerics public init(mutating other: UnsafeBufferPointer) { - _position = UnsafeMutablePointer(mutating: other._position) + _position = unsafe UnsafeMutablePointer(mutating: other._position) count = other.count } % else: @@ -103,8 +106,9 @@ public struct Unsafe${Mutable}BufferPointer: Copyable { /// - Parameter other: The mutable buffer pointer to convert. @inlinable // unsafe-performance @_preInverseGenerics + @safe public init(_ other: UnsafeMutableBufferPointer) { - _position = UnsafePointer(other._position) + _position = unsafe UnsafePointer(other._position) count = other.count } % end @@ -124,6 +128,7 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { /// a buffer can have a `count` of zero even with a non-`nil` base address. @inlinable @_preInverseGenerics + @safe public var baseAddress: Unsafe${Mutable}Pointer? { _position } @@ -134,6 +139,7 @@ extension UnsafeBufferPointer { /// An iterator for the elements in the buffer referenced by an /// `UnsafeBufferPointer` or `UnsafeMutableBufferPointer` instance. @frozen // unsafe-performance + @unsafe public struct Iterator { @usableFromInline internal var _position, _end: UnsafePointer? @@ -143,8 +149,8 @@ extension UnsafeBufferPointer { _position: UnsafePointer?, _end: UnsafePointer? ) { - self._position = _position - self._end = _end + unsafe self._position = _position + unsafe self._end = _end } } } @@ -152,22 +158,22 @@ extension UnsafeBufferPointer { @available(*, unavailable) extension UnsafeBufferPointer.Iterator: Sendable {} -extension UnsafeBufferPointer.Iterator: IteratorProtocol { +extension UnsafeBufferPointer.Iterator: @unsafe IteratorProtocol { /// Advances to the next element and returns it, or `nil` if no next element /// exists. /// /// Once `nil` has been returned, all subsequent calls return `nil`. @inlinable // unsafe-performance public mutating func next() -> Element? { - guard let start = _position else { + guard let start = unsafe _position else { return nil } - _internalInvariant(_end != nil, "inconsistent _position, _end pointers") + _internalInvariant(unsafe _end != nil, "inconsistent _position, _end pointers") - if start == _end._unsafelyUnwrappedUnchecked { return nil } + if unsafe start == _end._unsafelyUnwrappedUnchecked { return nil } - let result = start.pointee - _position = start + 1 + let result = unsafe start.pointee + unsafe _position = start + 1 return result } } @@ -177,16 +183,16 @@ extension UnsafeMutableBufferPointer { } %end -extension Unsafe${Mutable}BufferPointer: Sequence { +extension Unsafe${Mutable}BufferPointer: @unsafe Sequence { /// Returns an iterator over the elements of this buffer. /// /// - Returns: An iterator over the elements of this buffer. @inlinable // unsafe-performance public func makeIterator() -> Iterator { guard let start = _position else { - return Iterator(_position: nil, _end: nil) + return unsafe Iterator(_position: nil, _end: nil) } - return Iterator(_position: start, _end: start + count) + return unsafe Iterator(_position: start, _end: start + count) } /// Initializes the memory at `destination.baseAddress` with elements of @@ -198,22 +204,23 @@ extension Unsafe${Mutable}BufferPointer: Sequence { public func _copyContents( initializing destination: UnsafeMutableBufferPointer ) -> (Iterator, UnsafeMutableBufferPointer.Index) { - guard !isEmpty && !destination.isEmpty else { return (makeIterator(), 0) } - let s = self.baseAddress._unsafelyUnwrappedUnchecked - let d = destination.baseAddress._unsafelyUnwrappedUnchecked + guard !isEmpty && !destination.isEmpty else { return (unsafe makeIterator(), 0) } + let s = unsafe self.baseAddress._unsafelyUnwrappedUnchecked + let d = unsafe destination.baseAddress._unsafelyUnwrappedUnchecked let n = Swift.min(destination.count, self.count) - d.initialize(from: s, count: n) - return (Iterator(_position: s + n, _end: s + count), n) + unsafe d.initialize(from: s, count: n) + return (unsafe Iterator(_position: s + n, _end: s + count), n) } @inlinable + @safe public func withContiguousStorageIfAvailable( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { %if Mutable: - return try body(UnsafeBufferPointer(self)) + return try unsafe body(UnsafeBufferPointer(self)) %else: - return try body(self) + return try unsafe body(self) %end } } @@ -226,6 +233,7 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { /// - Complexity: O(1) @_alwaysEmitIntoClient @_preInverseGenerics + @safe public var isEmpty: Bool { count == 0 } /// The index of the first element in a nonempty buffer. @@ -234,6 +242,7 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { /// is always zero. @inlinable @_preInverseGenerics + @safe public var startIndex: Int { 0 } /// The "past the end" position---that is, the position one greater than the @@ -243,6 +252,7 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { /// always identical to `count`. @inlinable @_preInverseGenerics + @safe public var endIndex: Int { count } @inlinable @@ -405,14 +415,14 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { unsafeAddress { _debugPrecondition(i >= 0) _debugPrecondition(i < endIndex) - return UnsafePointer(_position._unsafelyUnwrappedUnchecked) + i + return unsafe UnsafePointer(_position._unsafelyUnwrappedUnchecked) + i } %if Mutable: @_transparent nonmutating unsafeMutableAddress { _debugPrecondition(i >= 0) _debugPrecondition(i < endIndex) - return _position._unsafelyUnwrappedUnchecked + i + return unsafe _position._unsafelyUnwrappedUnchecked + i } %end } @@ -424,13 +434,13 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { unsafeAddress { _internalInvariant(i >= 0) _internalInvariant(i < endIndex) - return UnsafePointer(_position._unsafelyUnwrappedUnchecked) + i + return unsafe UnsafePointer(_position._unsafelyUnwrappedUnchecked) + i } %if Mutable: nonmutating unsafeMutableAddress { _internalInvariant(i >= 0) _internalInvariant(i < endIndex) - return _position._unsafelyUnwrappedUnchecked + i + return unsafe _position._unsafelyUnwrappedUnchecked + i } %end } @@ -451,11 +461,11 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { guard i != j else { return } _debugPrecondition(i >= 0 && j >= 0) _debugPrecondition(i < endIndex && j < endIndex) - let pi = (_position! + i) - let pj = (_position! + j) - let tmp = pi.move() - pi.moveInitialize(from: pj, count: 1) - pj.initialize(to: tmp) + let pi = unsafe (_position! + i) + let pj = unsafe (_position! + j) + let tmp = unsafe pi.move() + unsafe pi.moveInitialize(from: pj, count: 1) + unsafe pj.initialize(to: tmp) } % end # mutable @@ -491,13 +501,14 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { /// - Parameter bounds: A valid range of indices within this buffer. /// - Returns: A new buffer pointer over the items at `bounds`. @_alwaysEmitIntoClient + @safe public func extracting(_ bounds: Range) -> Self { _precondition(bounds.lowerBound >= 0 && bounds.upperBound <= count, "Index out of range") guard let start = self.baseAddress else { - return Self(start: nil, count: 0) + return unsafe Self(start: nil, count: 0) } - return Self(start: start + bounds.lowerBound, count: bounds.count) + return unsafe Self(start: start + bounds.lowerBound, count: bounds.count) } /// Constructs a standalone buffer pointer over the items within the supplied @@ -529,8 +540,9 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { /// - Parameter bounds: A valid range of indices within this buffer. /// - Returns: A new buffer pointer over the items at `bounds`. @_alwaysEmitIntoClient + @safe public func extracting(_ bounds: some RangeExpression) -> Self { - extracting(bounds.relative(to: Range(uncheckedBounds: (0, count)))) + extracting(bounds.relative(to: unsafe Range(uncheckedBounds: (0, count)))) } /// Extracts and returns a copy of the entire buffer. @@ -548,8 +560,9 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { // /// - Returns: The same buffer as `self`. @_alwaysEmitIntoClient + @safe public func extracting(_ bounds: UnboundedRange) -> Self { - self + unsafe self } } @@ -603,14 +616,14 @@ extension Unsafe${Mutable}BufferPointer { get { _debugPrecondition(i >= 0) _debugPrecondition(i < endIndex) - return _position._unsafelyUnwrappedUnchecked[i] + return unsafe _position._unsafelyUnwrappedUnchecked[i] } %if Mutable: @_transparent nonmutating _modify { _debugPrecondition(i >= 0) _debugPrecondition(i < endIndex) - yield &_position._unsafelyUnwrappedUnchecked[i] + yield unsafe &_position._unsafelyUnwrappedUnchecked[i] } %end } @@ -621,30 +634,31 @@ extension Unsafe${Mutable}BufferPointer { get { _internalInvariant(i >= 0) _internalInvariant(i < endIndex) - return _position._unsafelyUnwrappedUnchecked[i] + return unsafe _position._unsafelyUnwrappedUnchecked[i] } %if Mutable: nonmutating _modify { _internalInvariant(i >= 0) _internalInvariant(i < endIndex) - yield &_position._unsafelyUnwrappedUnchecked[i] + yield unsafe &_position._unsafelyUnwrappedUnchecked[i] } %end } } extension Unsafe${Mutable}BufferPointer: - Collection, + @unsafe Collection, %if Mutable: - MutableCollection, + @unsafe MutableCollection, %end - BidirectionalCollection, - RandomAccessCollection + @unsafe BidirectionalCollection, + @unsafe RandomAccessCollection { public typealias Indices = Range public typealias SubSequence = Slice> @inlinable // unsafe-performance + @safe public func _failEarlyRangeCheck(_ index: Int, bounds: Range) { // NOTE: In release mode, this method is a no-op for performance reasons. _debugPrecondition(index >= bounds.lowerBound) @@ -652,6 +666,7 @@ extension Unsafe${Mutable}BufferPointer: } @inlinable // unsafe-performance + @safe public func _failEarlyRangeCheck(_ range: Range, bounds: Range) { // NOTE: In release mode, this method is a no-op for performance reasons. _debugPrecondition(range.lowerBound >= bounds.lowerBound) @@ -659,9 +674,10 @@ extension Unsafe${Mutable}BufferPointer: } @inlinable // unsafe-performance + @safe public var indices: Indices { // Not checked because init forbids negative count. - return Indices(uncheckedBounds: (startIndex, endIndex)) + return unsafe Indices(uncheckedBounds: (startIndex, endIndex)) } /// Accesses a contiguous subrange of the buffer's elements. @@ -708,18 +724,18 @@ extension Unsafe${Mutable}BufferPointer: get { _debugPrecondition(bounds.lowerBound >= startIndex) _debugPrecondition(bounds.upperBound <= endIndex) - return Slice( + return unsafe Slice( base: self, bounds: bounds) } % if Mutable: nonmutating set { _debugPrecondition(bounds.lowerBound >= startIndex) _debugPrecondition(bounds.upperBound <= endIndex) - _debugPrecondition(bounds.count == newValue.count) + unsafe _debugPrecondition(bounds.count == newValue.count) // FIXME: swift-3-indexing-model: tests. - if !newValue.isEmpty { - (_position! + bounds.lowerBound).update( + if unsafe !newValue.isEmpty { + unsafe (_position! + bounds.lowerBound).update( from: newValue.base._position! + newValue.startIndex, count: newValue.count) } @@ -730,22 +746,24 @@ extension Unsafe${Mutable}BufferPointer: % if Mutable: @inlinable @available(*, deprecated, renamed: "withContiguousMutableStorageIfAvailable") + @safe public mutating func _withUnsafeMutableBufferPointerIfSupported( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { - return try body(&self) + return try unsafe body(&self) } @inlinable + @safe public mutating func withContiguousMutableStorageIfAvailable( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { let (oldBase, oldCount) = (self.baseAddress, self.count) defer { - _debugPrecondition((oldBase, oldCount) == (self.baseAddress, self.count), + _debugPrecondition(unsafe (oldBase, oldCount) == (self.baseAddress, self.count), "UnsafeMutableBufferPointer.withContiguousMutableStorageIfAvailable: replacing the buffer is not allowed") } - return try body(&self) + return try unsafe body(&self) } % end } @@ -782,11 +800,11 @@ extension Unsafe${Mutable}BufferPointer { // degradation wrt passing around pointers not wrapped in a BufferPointer // construct. _debugPrecondition( - slice.startIndex >= 0 && slice.endIndex <= slice.base.count, + unsafe slice.startIndex >= 0 && slice.endIndex <= slice.base.count, "Invalid slice") - let base = slice.base.baseAddress?.advanced(by: slice.startIndex) - let count = slice.endIndex &- slice.startIndex - self.init(start: base, count: count) + let base = unsafe slice.base.baseAddress?.advanced(by: slice.startIndex) + let count = unsafe slice.endIndex &- slice.startIndex + unsafe self.init(start: base, count: count) } % end @@ -812,9 +830,9 @@ extension Unsafe${Mutable}BufferPointer { /// - Parameter slice: The buffer slice to rebase. @inlinable // unsafe-performance public init(rebasing slice: Slice>) { - let base = slice.base.baseAddress?.advanced(by: slice.startIndex) - let count = slice.endIndex &- slice.startIndex - self.init(start: base, count: count) + let base = unsafe slice.base.baseAddress?.advanced(by: slice.startIndex) + let count = unsafe slice.endIndex &- slice.startIndex + unsafe self.init(start: base, count: count) } } @@ -831,7 +849,7 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { @inlinable @_preInverseGenerics public func deallocate() { - _position?.deallocate() + unsafe _position?.deallocate() } } @@ -860,11 +878,12 @@ extension UnsafeMutableBufferPointer where Element: ~Copyable { /// of `Element`. @inlinable @_preInverseGenerics + @safe public static func allocate( capacity count: Int ) -> UnsafeMutableBufferPointer { let base = UnsafeMutablePointer.allocate(capacity: count) - return UnsafeMutableBufferPointer(start: base, count: count) + return unsafe UnsafeMutableBufferPointer(start: base, count: count) } } @@ -884,7 +903,7 @@ extension UnsafeMutableBufferPointer { return } - dstBase.initialize(repeating: repeatedValue, count: count) + unsafe dstBase.initialize(repeating: repeatedValue, count: count) } /// Initializes the buffer's memory with the given elements. @@ -911,7 +930,7 @@ extension UnsafeMutableBufferPointer { public func initialize( from source: S ) -> (unwritten: S.Iterator, index: Index) where S.Element == Element { - return source._copyContents(initializing: self) + return unsafe source._copyContents(initializing: self) } /// Initializes the buffer's memory with every element of the source. @@ -950,7 +969,7 @@ extension UnsafeMutableBufferPointer { $0.count <= self.count, "buffer cannot contain every element from source." ) - baseAddress.unsafelyUnwrapped.initialize( + unsafe baseAddress.unsafelyUnwrapped.initialize( from: sourceAddress, count: $0.count ) return $0.count @@ -959,7 +978,7 @@ extension UnsafeMutableBufferPointer { return startIndex.advanced(by: count) } - var (iterator, copied) = source._copyContents(initializing: self) + var (iterator, copied) = unsafe source._copyContents(initializing: self) _precondition( iterator.next() == nil, "buffer cannot contain every element from source." @@ -985,14 +1004,14 @@ extension UnsafeMutableBufferPointer { return } - dstBase.update(repeating: repeatedValue, count: count) + unsafe dstBase.update(repeating: repeatedValue, count: count) } @_alwaysEmitIntoClient @available(*, deprecated, renamed: "update(repeating:)") @_silgen_name("_swift_se0370_UnsafeMutableBufferPointer_assign_repeating") public func assign(repeating repeatedValue: Element) { - update(repeating: repeatedValue) + unsafe update(repeating: repeatedValue) } } @@ -1012,12 +1031,12 @@ extension UnsafeMutableBufferPointer { ) -> (unwritten: S.Iterator, index: Index) where S.Element == Element { var iterator = source.makeIterator() guard !self.isEmpty else { return (iterator, startIndex) } - _internalInvariant(_position != nil) + _internalInvariant(unsafe _position != nil) var index = startIndex while index < endIndex { guard let element = iterator.next() else { break } - _position._unsafelyUnwrappedUnchecked[index] = element - formIndex(after: &index) + unsafe _position._unsafelyUnwrappedUnchecked[index] = element + unsafe formIndex(after: &index) } return (iterator, index) } @@ -1055,7 +1074,7 @@ extension UnsafeMutableBufferPointer { $0.count <= self.count, "buffer cannot contain every element from source." ) - baseAddress.unsafelyUnwrapped.update(from: sourceAddress, count: $0.count) + unsafe baseAddress.unsafelyUnwrapped.update(from: sourceAddress, count: $0.count) return $0.count } if let count { @@ -1069,7 +1088,7 @@ extension UnsafeMutableBufferPointer { ) return startIndex } - _internalInvariant(_position != nil) + _internalInvariant(unsafe _position != nil) var iterator = source.makeIterator() var index = startIndex while let value = iterator.next() { @@ -1079,8 +1098,8 @@ extension UnsafeMutableBufferPointer { ) break } - _position._unsafelyUnwrappedUnchecked[index] = value - formIndex(after: &index) + unsafe _position._unsafelyUnwrappedUnchecked[index] = value + unsafe formIndex(after: &index) } return index } @@ -1123,7 +1142,7 @@ extension UnsafeMutableBufferPointer where Element: ~Copyable { source.count <= self.count, "buffer cannot contain every element from source." ) - baseAddress.unsafelyUnwrapped.moveInitialize( + unsafe baseAddress.unsafelyUnwrapped.moveInitialize( from: sourceAddress, count: source.count ) return startIndex.advanced(by: source.count) @@ -1161,7 +1180,7 @@ extension UnsafeMutableBufferPointer { /// by this function. @_alwaysEmitIntoClient public func moveInitialize(fromContentsOf source: Slice) -> Index { - return moveInitialize(fromContentsOf: Self(rebasing: source)) + return unsafe moveInitialize(fromContentsOf: Self(rebasing: source)) } } @@ -1199,7 +1218,7 @@ extension UnsafeMutableBufferPointer where Element: ~Copyable { source.count <= self.count, "buffer cannot contain every element from source." ) - baseAddress.unsafelyUnwrapped.moveUpdate( + unsafe baseAddress.unsafelyUnwrapped.moveUpdate( from: sourceAddress, count: source.count ) return startIndex.advanced(by: source.count) @@ -1233,7 +1252,7 @@ extension UnsafeMutableBufferPointer { /// - Returns: An index one past the index of the last element updated. @_alwaysEmitIntoClient public func moveUpdate(fromContentsOf source: Slice) -> Index { - return moveUpdate(fromContentsOf: Self(rebasing: source)) + return unsafe moveUpdate(fromContentsOf: Self(rebasing: source)) } } @@ -1252,10 +1271,10 @@ extension UnsafeMutableBufferPointer where Element: ~Copyable { @_alwaysEmitIntoClient public func deinitialize() -> UnsafeMutableRawBufferPointer { guard let rawValue = baseAddress?._rawValue - else { return .init(start: nil, count: 0) } + else { return unsafe .init(start: nil, count: 0) } Builtin.destroyArray(Element.self, rawValue, count._builtinWordValue) - return .init(start: UnsafeMutableRawPointer(rawValue), - count: count*MemoryLayout.stride) + return unsafe .init(start: UnsafeMutableRawPointer(rawValue), + count: count*MemoryLayout.stride) } /// Initializes the element at `index` to the given value. @@ -1269,9 +1288,9 @@ extension UnsafeMutableBufferPointer where Element: ~Copyable { /// - index: The index of the element to initialize @_alwaysEmitIntoClient public func initializeElement(at index: Index, to value: consuming Element) { - _debugPrecondition(startIndex <= index && index < endIndex) - let p = baseAddress._unsafelyUnwrappedUnchecked.advanced(by: index) - p.initialize(to: value) + _debugPrecondition(unsafe startIndex <= index && index < endIndex) + let p = unsafe baseAddress._unsafelyUnwrappedUnchecked.advanced(by: index) + unsafe p.initialize(to: value) } /// Retrieves and returns the element at `index`, leaving that element's @@ -1286,8 +1305,8 @@ extension UnsafeMutableBufferPointer where Element: ~Copyable { /// - Returns: The instance referenced by this index in this buffer. @_alwaysEmitIntoClient public func moveElement(from index: Index) -> Element { - _debugPrecondition(startIndex <= index && index < endIndex) - return baseAddress._unsafelyUnwrappedUnchecked.advanced(by: index).move() + _debugPrecondition(unsafe startIndex <= index && index < endIndex) + return unsafe baseAddress._unsafelyUnwrappedUnchecked.advanced(by: index).move() } /// Deinitializes the memory underlying the element at `index`. @@ -1300,9 +1319,9 @@ extension UnsafeMutableBufferPointer where Element: ~Copyable { /// - index: The index of the buffer element to deinitialize. @_alwaysEmitIntoClient public func deinitializeElement(at index: Index) { - _debugPrecondition(startIndex <= index && index < endIndex) - let p = baseAddress._unsafelyUnwrappedUnchecked.advanced(by: index) - p.deinitialize(count: 1) + _debugPrecondition(unsafe startIndex <= index && index < endIndex) + let p = unsafe baseAddress._unsafelyUnwrappedUnchecked.advanced(by: index) + unsafe p.deinitialize(count: 1) } } % end @@ -1369,11 +1388,11 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { _ body: (_ buffer: ${Self}) throws(E) -> Result ) throws(E) -> Result { guard let base = _position?._rawValue else { - return try body(.init(start: nil, count: 0)) + return try unsafe body(.init(start: nil, count: 0)) } _debugPrecondition( - Int(bitPattern: .init(base)) & (MemoryLayout.alignment-1) == 0, + unsafe Int(bitPattern: .init(base)) & (MemoryLayout.alignment-1) == 0, "baseAddress must be a properly aligned pointer for types Element and T" ) @@ -1391,7 +1410,7 @@ extension Unsafe${Mutable}BufferPointer where Element: ~Copyable { } let binding = Builtin.bindMemory(base, newCount._builtinWordValue, T.self) defer { Builtin.rebindMemory(base, binding) } - return try body(.init(start: .init(base), count: newCount)) + return try unsafe body(unsafe .init(start: .init(base), count: newCount)) } } @@ -1411,7 +1430,7 @@ extension Unsafe${Mutable}BufferPointer { to type: T.Type, _ body: (${Self}) throws -> Result ) rethrows -> Result { - return try withMemoryRebound(to: T.self, body) + return try unsafe withMemoryRebound(to: T.self, body) } } @@ -1421,8 +1440,9 @@ extension Unsafe${Mutable}BufferPointer: CustomDebugStringConvertible where Element: ~Copyable { /// A textual representation of the buffer, suitable for debugging. @_preInverseGenerics + @safe public var debugDescription: String { - return "Unsafe${Mutable}BufferPointer" + return unsafe "Unsafe${Mutable}BufferPointer" + "(start: \(_position.map(String.init(describing:)) ?? "nil"), count: \(count))" } } diff --git a/stdlib/public/core/UnsafeBufferPointerSlice.swift b/stdlib/public/core/UnsafeBufferPointerSlice.swift index 5b86ed1708cb2..b00afb48b58e7 100644 --- a/stdlib/public/core/UnsafeBufferPointerSlice.swift +++ b/stdlib/public/core/UnsafeBufferPointerSlice.swift @@ -29,8 +29,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { public func copyBytes( from source: C ) where C.Element == UInt8 { - let buffer = Base(rebasing: self) - buffer.copyBytes(from: source) + let buffer = unsafe Base(rebasing: self) + unsafe buffer.copyBytes(from: source) } /// Initializes the memory referenced by this buffer slice with the given @@ -62,8 +62,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { public func initializeMemory( as type: T.Type, repeating repeatedValue: T ) -> UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - return buffer.initializeMemory(as: T.self, repeating: repeatedValue) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.initializeMemory(as: T.self, repeating: repeatedValue) } /// Initializes the buffer's memory with the given elements, binding the @@ -94,8 +94,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { public func initializeMemory( as type: S.Element.Type, from source: S ) -> (unwritten: S.Iterator, initialized: UnsafeMutableBufferPointer) { - let buffer = Base(rebasing: self) - return buffer.initializeMemory(as: S.Element.self, from: source) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.initializeMemory(as: S.Element.self, from: source) } /// Initializes the buffer slice's memory with every element of the source, @@ -127,8 +127,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { as type: C.Element.Type, fromContentsOf source: C ) -> UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - return buffer.initializeMemory(as: C.Element.self, fromContentsOf: source) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.initializeMemory(as: C.Element.self, fromContentsOf: source) } /// Moves every element of an initialized source buffer into the @@ -165,8 +165,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { as type: T.Type, fromContentsOf source: UnsafeMutableBufferPointer ) -> UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - return buffer.moveInitializeMemory(as: T.self, fromContentsOf: source) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.moveInitializeMemory(as: T.self, fromContentsOf: source) } /// Moves every element from an initialized source buffer slice into the @@ -203,8 +203,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { as type: T.Type, fromContentsOf source: Slice> ) -> UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - return buffer.moveInitializeMemory(as: T.self, fromContentsOf: source) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.moveInitializeMemory(as: T.self, fromContentsOf: source) } /// Binds this buffer slice’s memory to the specified type and returns @@ -229,8 +229,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { @inlinable @_alwaysEmitIntoClient public func bindMemory(to type: T.Type) -> UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - return buffer.bindMemory(to: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.bindMemory(to: T.self) } /// Executes the given closure while temporarily binding the buffer slice to @@ -282,8 +282,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { public func withMemoryRebound( to type: T.Type, _ body: (UnsafeMutableBufferPointer) throws(E) -> Result ) throws(E) -> Result { - let buffer = Base(rebasing: self) - return try buffer.withMemoryRebound(to: T.self, body) + let buffer = unsafe Base(rebasing: self) + return try unsafe buffer.withMemoryRebound(to: T.self, body) } /// Returns a typed buffer to the memory referenced by this buffer slice, @@ -307,8 +307,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { public func assumingMemoryBound( to type: T.Type ) -> UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - return buffer.assumingMemoryBound(to: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.assumingMemoryBound(to: T.self) } /// Returns a new instance of the given type, read from the @@ -343,8 +343,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { @inlinable @_alwaysEmitIntoClient public func load(fromByteOffset offset: Int = 0, as type: T.Type) -> T { - let buffer = Base(rebasing: self) - return buffer.load(fromByteOffset: offset, as: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.load(fromByteOffset: offset, as: T.self) } /// Returns a new instance of the given type, read from the @@ -384,8 +384,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - let buffer = Base(rebasing: self) - return buffer.loadUnaligned(fromByteOffset: offset, as: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.loadUnaligned(fromByteOffset: offset, as: T.self) } @inlinable @_alwaysEmitIntoClient @@ -393,8 +393,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - let buffer = Base(rebasing: self) - return buffer.loadUnaligned(fromByteOffset: offset, as: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.loadUnaligned(fromByteOffset: offset, as: T.self) } /// Stores a value's bytes into the buffer pointer slice's raw memory at the @@ -437,8 +437,8 @@ extension Slice where Base == UnsafeMutableRawBufferPointer { public func storeBytes( of value: T, toByteOffset offset: Int = 0, as type: T.Type ) { - let buffer = Base(rebasing: self) - buffer.storeBytes(of: value, toByteOffset: offset, as: T.self) + let buffer = unsafe Base(rebasing: self) + unsafe buffer.storeBytes(of: value, toByteOffset: offset, as: T.self) } } @@ -466,8 +466,8 @@ extension Slice where Base == UnsafeRawBufferPointer { @inlinable @_alwaysEmitIntoClient public func bindMemory(to type: T.Type) -> UnsafeBufferPointer { - let buffer = Base(rebasing: self) - return buffer.bindMemory(to: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.bindMemory(to: T.self) } /// Executes the given closure while temporarily binding the buffer slice to @@ -519,8 +519,8 @@ extension Slice where Base == UnsafeRawBufferPointer { public func withMemoryRebound( to type: T.Type, _ body: (UnsafeBufferPointer) throws(E) -> Result ) throws(E) -> Result { - let buffer = Base(rebasing: self) - return try buffer.withMemoryRebound(to: T.self, body) + let buffer = unsafe Base(rebasing: self) + return try unsafe buffer.withMemoryRebound(to: T.self, body) } /// Returns a typed buffer to the memory referenced by this buffer slice, @@ -544,8 +544,8 @@ extension Slice where Base == UnsafeRawBufferPointer { public func assumingMemoryBound( to type: T.Type ) -> UnsafeBufferPointer { - let buffer = Base(rebasing: self) - return buffer.assumingMemoryBound(to: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.assumingMemoryBound(to: T.self) } /// Returns a new instance of the given type, read from the @@ -580,8 +580,8 @@ extension Slice where Base == UnsafeRawBufferPointer { @inlinable @_alwaysEmitIntoClient public func load(fromByteOffset offset: Int = 0, as type: T.Type) -> T { - let buffer = Base(rebasing: self) - return buffer.load(fromByteOffset: offset, as: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.load(fromByteOffset: offset, as: T.self) } /// Returns a new instance of the given type, read from the @@ -621,8 +621,8 @@ extension Slice where Base == UnsafeRawBufferPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - let buffer = Base(rebasing: self) - return buffer.loadUnaligned(fromByteOffset: offset, as: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.loadUnaligned(fromByteOffset: offset, as: T.self) } @inlinable @_alwaysEmitIntoClient @@ -630,8 +630,8 @@ extension Slice where Base == UnsafeRawBufferPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - let buffer = Base(rebasing: self) - return buffer.loadUnaligned(fromByteOffset: offset, as: T.self) + let buffer = unsafe Base(rebasing: self) + return unsafe buffer.loadUnaligned(fromByteOffset: offset, as: T.self) } } @@ -696,8 +696,8 @@ extension Slice { public func withMemoryRebound( to type: T.Type, _ body: (UnsafeBufferPointer) throws -> Result ) rethrows -> Result where Base == UnsafeBufferPointer { - let rebased = UnsafeBufferPointer(rebasing: self) - return try rebased.withMemoryRebound(to: T.self, body) + let rebased = unsafe UnsafeBufferPointer(rebasing: self) + return try unsafe rebased.withMemoryRebound(to: T.self, body) } } @@ -716,7 +716,7 @@ extension Slice { @_alwaysEmitIntoClient public func initialize(repeating repeatedValue: Element) where Base == UnsafeMutableBufferPointer { - Base(rebasing: self).initialize(repeating: repeatedValue) + unsafe Base(rebasing: self).initialize(repeating: repeatedValue) } /// Initializes the buffer slice's memory with the given elements. @@ -746,10 +746,10 @@ extension Slice { from source: S ) -> (unwritten: S.Iterator, index: Index) where S: Sequence, Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let (iterator, index) = buffer.initialize(from: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return (iterator, startIndex.advanced(by: distance)) + let buffer = unsafe Base(rebasing: self) + let (iterator, index) = unsafe buffer.initialize(from: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe (iterator, startIndex.advanced(by: distance)) } /// Initializes the buffer slice's memory with with @@ -784,10 +784,10 @@ extension Slice { public func initialize( fromContentsOf source: some Collection ) -> Index where Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let index = buffer.initialize(fromContentsOf: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return startIndex.advanced(by: distance) + let buffer = unsafe Base(rebasing: self) + let index = unsafe buffer.initialize(fromContentsOf: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe startIndex.advanced(by: distance) } /// Updates every element of this buffer slice's initialized memory. @@ -803,7 +803,7 @@ extension Slice { @_alwaysEmitIntoClient public func update(repeating repeatedValue: Element) where Base == UnsafeMutableBufferPointer { - Base(rebasing: self).update(repeating: repeatedValue) + unsafe Base(rebasing: self).update(repeating: repeatedValue) } /// Updates the buffer slice's initialized memory with the given elements. @@ -821,10 +821,10 @@ extension Slice { from source: S ) -> (unwritten: S.Iterator, index: Index) where S: Sequence, Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let (iterator, index) = buffer.update(from: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return (iterator, startIndex.advanced(by: distance)) + let buffer = unsafe Base(rebasing: self) + let (iterator, index) = unsafe buffer.update(from: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe (iterator, startIndex.advanced(by: distance)) } /// Updates the buffer slice's initialized memory with @@ -854,10 +854,10 @@ extension Slice { public func update( fromContentsOf source: some Collection ) -> Index where Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let index = buffer.update(fromContentsOf: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return startIndex.advanced(by: distance) + let buffer = unsafe Base(rebasing: self) + let index = unsafe buffer.update(fromContentsOf: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe startIndex.advanced(by: distance) } /// Moves every element of an initialized source buffer into the @@ -893,10 +893,10 @@ extension Slice { public func moveInitialize( fromContentsOf source: UnsafeMutableBufferPointer ) -> Index where Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let index = buffer.moveInitialize(fromContentsOf: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return startIndex.advanced(by: distance) + let buffer = unsafe Base(rebasing: self) + let index = unsafe buffer.moveInitialize(fromContentsOf: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe startIndex.advanced(by: distance) } /// Moves every element of an initialized source buffer slice into the @@ -932,10 +932,10 @@ extension Slice { public func moveInitialize( fromContentsOf source: Slice> ) -> Index where Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let index = buffer.moveInitialize(fromContentsOf: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return startIndex.advanced(by: distance) + let buffer = unsafe Base(rebasing: self) + let index = unsafe buffer.moveInitialize(fromContentsOf: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe startIndex.advanced(by: distance) } /// Updates this buffer slice's initialized memory initialized memory by @@ -968,10 +968,10 @@ extension Slice { public func moveUpdate( fromContentsOf source: UnsafeMutableBufferPointer ) -> Index where Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let index = buffer.moveUpdate(fromContentsOf: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return startIndex.advanced(by: distance) + let buffer = unsafe Base(rebasing: self) + let index = unsafe buffer.moveUpdate(fromContentsOf: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe startIndex.advanced(by: distance) } /// Updates this buffer slice's initialized memory initialized memory by @@ -1004,10 +1004,10 @@ extension Slice { public func moveUpdate( fromContentsOf source: Slice> ) -> Index where Base == UnsafeMutableBufferPointer { - let buffer = Base(rebasing: self) - let index = buffer.moveUpdate(fromContentsOf: source) - let distance = buffer.distance(from: buffer.startIndex, to: index) - return startIndex.advanced(by: distance) + let buffer = unsafe Base(rebasing: self) + let index = unsafe buffer.moveUpdate(fromContentsOf: source) + let distance = unsafe buffer.distance(from: buffer.startIndex, to: index) + return unsafe startIndex.advanced(by: distance) } /// Deinitializes every instance in this buffer slice. @@ -1025,7 +1025,7 @@ extension Slice { @_alwaysEmitIntoClient public func deinitialize() -> UnsafeMutableRawBufferPointer where Base == UnsafeMutableBufferPointer { - Base(rebasing: self).deinitialize() + unsafe Base(rebasing: self).deinitialize() } /// Initializes the element at `index` to the given value. @@ -1041,8 +1041,8 @@ extension Slice { @_alwaysEmitIntoClient public func initializeElement(at index: Int, to value: Element) where Base == UnsafeMutableBufferPointer { - assert(startIndex <= index && index < endIndex) - base.baseAddress.unsafelyUnwrapped.advanced(by: index).initialize(to: value) + unsafe assert(startIndex <= index && index < endIndex) + unsafe base.baseAddress.unsafelyUnwrapped.advanced(by: index).initialize(to: value) } /// Retrieves and returns the element at `index`, @@ -1059,8 +1059,8 @@ extension Slice { @_alwaysEmitIntoClient public func moveElement(from index: Index) -> Element where Base == UnsafeMutableBufferPointer { - assert(startIndex <= index && index < endIndex) - return base.baseAddress.unsafelyUnwrapped.advanced(by: index).move() + unsafe assert(startIndex <= index && index < endIndex) + return unsafe base.baseAddress.unsafelyUnwrapped.advanced(by: index).move() } /// Deinitializes the memory underlying the element at `index`. @@ -1075,8 +1075,8 @@ extension Slice { @_alwaysEmitIntoClient public func deinitializeElement(at index: Base.Index) where Base == UnsafeMutableBufferPointer { - assert(startIndex <= index && index < endIndex) - base.baseAddress.unsafelyUnwrapped.advanced(by: index).deinitialize(count: 1) + unsafe assert(startIndex <= index && index < endIndex) + unsafe base.baseAddress.unsafelyUnwrapped.advanced(by: index).deinitialize(count: 1) } /// Executes the given closure while temporarily binding the memory referenced @@ -1139,7 +1139,7 @@ extension Slice { public func withMemoryRebound( to type: T.Type, _ body: (UnsafeMutableBufferPointer) throws -> Result ) rethrows -> Result where Base == UnsafeMutableBufferPointer { - try Base(rebasing: self).withMemoryRebound(to: T.self, body) + try unsafe Base(rebasing: self).withMemoryRebound(to: T.self, body) } @inlinable @@ -1147,16 +1147,16 @@ extension Slice { public func withContiguousMutableStorageIfAvailable( _ body: (_ buffer: inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? where Base == UnsafeMutableBufferPointer { - try base.withContiguousStorageIfAvailable { buffer in - let start = base.baseAddress?.advanced(by: startIndex) - var slice = UnsafeMutableBufferPointer(start: start, count: count) + try unsafe base.withContiguousStorageIfAvailable { buffer in + let start = unsafe base.baseAddress?.advanced(by: startIndex) + var slice = unsafe UnsafeMutableBufferPointer(start: start, count: count) let (b,c) = (slice.baseAddress, slice.count) defer { - _precondition( + unsafe _precondition( slice.baseAddress == b && slice.count == c, "withContiguousMutableStorageIfAvailable: replacing the buffer is not allowed") } - return try body(&slice) + return try unsafe body(&slice) } } } diff --git a/stdlib/public/core/UnsafePointer.swift b/stdlib/public/core/UnsafePointer.swift index 5755c1ab55254..55411e1436cdc 100644 --- a/stdlib/public/core/UnsafePointer.swift +++ b/stdlib/public/core/UnsafePointer.swift @@ -209,6 +209,7 @@ public struct UnsafePointer: Copyable { /// The underlying raw (untyped) pointer. @_preInverseGenerics + @safe public let _rawValue: Builtin.RawPointer /// Creates an `UnsafePointer` from a builtin raw pointer. @@ -236,8 +237,9 @@ extension UnsafePointer: Hashable where Pointee: ~Copyable { // Note: This explicit `hashValue` applies @_preInverseGenerics to emulate the // original (pre-6.0) compiler-synthesized version. @_preInverseGenerics + @safe public var hashValue: Int { - _hashValue(for: self) + unsafe _hashValue(for: self) } } @_preInverseGenerics @@ -282,7 +284,7 @@ extension UnsafePointer where Pointee: ~Copyable { @_alwaysEmitIntoClient public var pointee: Pointee { @_transparent unsafeAddress { - return self + return unsafe self } } } @@ -295,7 +297,7 @@ extension UnsafePointer { @usableFromInline internal var pointee: Pointee { @_transparent unsafeAddress { - return self + return unsafe self } } } @@ -311,7 +313,7 @@ extension UnsafePointer where Pointee: ~Copyable { public subscript(i: Int) -> Pointee { @_transparent unsafeAddress { - return self + i + return unsafe self + i } } } @@ -325,7 +327,7 @@ extension UnsafePointer { internal subscript(i: Int) -> Pointee { @_transparent unsafeAddress { - return self + i + return unsafe self + i } } } @@ -398,7 +400,7 @@ extension UnsafePointer where Pointee: ~Copyable { capacity count: Int, _ body: (_ pointer: UnsafePointer) throws(E) -> Result ) throws(E) -> Result { - _debugPrecondition( + unsafe _debugPrecondition( Int(bitPattern: .init(_rawValue)) & (MemoryLayout.alignment-1) == 0 && ( count == 1 || ( MemoryLayout.stride > MemoryLayout.stride @@ -410,7 +412,7 @@ extension UnsafePointer where Pointee: ~Copyable { ) let binding = Builtin.bindMemory(_rawValue, count._builtinWordValue, T.self) defer { Builtin.rebindMemory(_rawValue, binding) } - return try body(.init(_rawValue)) + return try unsafe body(.init(_rawValue)) } } @@ -429,7 +431,7 @@ extension UnsafePointer { ) rethrows -> Result { let binding = Builtin.bindMemory(_rawValue, count._builtinWordValue, T.self) defer { Builtin.rebindMemory(_rawValue, binding) } - return try body(.init(_rawValue)) + return try unsafe body(.init(_rawValue)) } } @@ -449,11 +451,11 @@ extension UnsafePointer { ) -> UnsafePointer? { guard let o = property._storedInlineOffset else { return nil } _internalInvariant(o >= 0) - _debugPrecondition( + unsafe _debugPrecondition( !UInt(bitPattern: self).addingReportingOverflow(UInt(bitPattern: o)).overflow, "Overflow in pointer arithmetic" ) - return .init(Builtin.gepRaw_Word(_rawValue, o._builtinWordValue)) + return unsafe .init(Builtin.gepRaw_Word(_rawValue, o._builtinWordValue)) } } @@ -461,7 +463,7 @@ extension UnsafePointer where Pointee: ~Copyable { @inlinable // unsafe-performance @_preInverseGenerics internal static var _max: UnsafePointer { - return UnsafePointer( + return unsafe UnsafePointer( bitPattern: 0 as Int &- MemoryLayout.stride )._unsafelyUnwrappedUnchecked } @@ -658,6 +660,7 @@ extension UnsafePointer where Pointee: ~Copyable { public struct UnsafeMutablePointer: Copyable { /// The underlying raw (untyped) pointer. @_preInverseGenerics + @safe public let _rawValue: Builtin.RawPointer /// Creates an `UnsafeMutablePointer` from a builtin raw pointer. @@ -685,8 +688,9 @@ extension UnsafeMutablePointer: Hashable where Pointee: ~Copyable { // Note: This explicit `hashValue` applies @_preInverseGenerics to emulate the // original (pre-6.0) compiler-synthesized version. @_preInverseGenerics + @safe public var hashValue: Int { - _hashValue(for: self) + unsafe _hashValue(for: self) } } @@ -726,8 +730,8 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { @_transparent @_preInverseGenerics public init?(@_nonEphemeral mutating other: UnsafePointer?) { - guard let unwrapped = other else { return nil } - self.init(mutating: unwrapped) + guard let unwrapped = unsafe other else { return nil } + unsafe self.init(mutating: unwrapped) } /// Creates a mutable typed pointer referencing the same memory as the @@ -736,6 +740,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { /// - Parameter other: The pointer to convert. @_transparent @_preInverseGenerics + @safe public init(@_nonEphemeral _ other: UnsafeMutablePointer) { self._rawValue = other._rawValue } @@ -747,8 +752,9 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { /// result is `nil`. @_transparent @_preInverseGenerics + @safe public init?(@_nonEphemeral _ other: UnsafeMutablePointer?) { - guard let unwrapped = other else { return nil } + guard let unwrapped = unsafe other else { return nil } self.init(unwrapped) } } @@ -779,6 +785,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { /// of `Pointee`. @inlinable @_preInverseGenerics + @safe public static func allocate( capacity count: Int ) -> UnsafeMutablePointer { @@ -800,7 +807,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { } let rawPtr = Builtin.allocRaw(size._builtinWordValue, align) Builtin.bindMemory(rawPtr, count._builtinWordValue, Pointee.self) - return UnsafeMutablePointer(rawPtr) + return unsafe UnsafeMutablePointer(rawPtr) } } @@ -835,10 +842,10 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { @_alwaysEmitIntoClient public var pointee: Pointee { @_transparent unsafeAddress { - return UnsafePointer(self) + return unsafe UnsafePointer(self) } @_transparent nonmutating unsafeMutableAddress { - return self + return unsafe self } } } @@ -851,10 +858,10 @@ extension UnsafeMutablePointer { @usableFromInline internal var pointee: Pointee { @_transparent unsafeAddress { - return UnsafePointer(self) + return unsafe UnsafePointer(self) } @_transparent nonmutating unsafeMutableAddress { - return self + return unsafe self } } } @@ -880,7 +887,7 @@ extension UnsafeMutablePointer { // Must not use `initializeFrom` with a `Collection` as that will introduce // a cycle. for offset in 0..= 0, "UnsafeMutablePointer.update(repeating:count:) with negative count") for i in 0.., count: Int) { _debugPrecondition( count >= 0, "UnsafeMutablePointer.update with negative count") - if UnsafePointer(self) < source || UnsafePointer(self) >= source + count { + if unsafe UnsafePointer(self) < source || UnsafePointer(self) >= source + count { // assign forward from a disjoint or following overlapping range. Builtin.assignCopyArrayFrontToBack( Pointee.self, self._rawValue, source._rawValue, count._builtinWordValue) @@ -995,7 +1002,7 @@ extension UnsafeMutablePointer { // self[i] = source[i] // } } - else if UnsafePointer(self) != source { + else if unsafe UnsafePointer(self) != source { // assign backward from a non-following overlapping range. Builtin.assignCopyArrayBackToFront( Pointee.self, self._rawValue, source._rawValue, count._builtinWordValue) @@ -1011,8 +1018,9 @@ extension UnsafeMutablePointer { @_alwaysEmitIntoClient @available(*, deprecated, renamed: "update(from:count:)") @_silgen_name("_swift_se0370_UnsafeMutablePointer_assign_from_count") + @unsafe public func assign(from source: UnsafePointer, count: Int) { - update(from: source, count: count) + unsafe update(from: source, count: count) } } @@ -1042,7 +1050,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { ) { _debugPrecondition( count >= 0, "UnsafeMutablePointer.moveInitialize with negative count") - if self < source || self >= source + count { + if unsafe self < source || self >= source + count { // initialize forward from a disjoint or following overlapping range. Builtin.takeArrayFrontToBack( Pointee.self, self._rawValue, source._rawValue, count._builtinWordValue) @@ -1051,7 +1059,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { // (self + i).initialize(to: (source + i).move()) // } } - else if self != source { + else if unsafe self != source { // initialize backward from a non-following overlapping range. Builtin.takeArrayBackToFront( Pointee.self, self._rawValue, source._rawValue, count._builtinWordValue) @@ -1086,7 +1094,7 @@ extension UnsafeMutablePointer { public func initialize(from source: UnsafePointer, count: Int) { _debugPrecondition( count >= 0, "UnsafeMutablePointer.initialize with negative count") - _debugPrecondition( + unsafe _debugPrecondition( UnsafePointer(self) + count <= source || source + count <= UnsafePointer(self), "UnsafeMutablePointer.initialize overlapping range") @@ -1126,7 +1134,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { ) { _debugPrecondition( count >= 0, "UnsafeMutablePointer.moveUpdate(from:) with negative count") - _debugPrecondition( + unsafe _debugPrecondition( self + count <= source || source + count <= self, "moveUpdate overlapping range") Builtin.assignTakeArray( @@ -1145,7 +1153,7 @@ extension UnsafeMutablePointer { public func moveAssign( @_nonEphemeral from source: UnsafeMutablePointer, count: Int ) { - moveUpdate(from: source, count: count) + unsafe moveUpdate(from: source, count: count) } } @@ -1234,12 +1242,13 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { /// - pointer: The pointer temporarily bound to `T`. /// - Returns: The return value, if any, of the `body` closure parameter. @_alwaysEmitIntoClient + @unsafe public func withMemoryRebound( to type: T.Type, capacity count: Int, _ body: (_ pointer: UnsafeMutablePointer) throws(E) -> Result ) throws(E) -> Result { - _debugPrecondition( + unsafe _debugPrecondition( Int(bitPattern: .init(_rawValue)) & (MemoryLayout.alignment-1) == 0 && ( count == 1 || ( MemoryLayout.stride > MemoryLayout.stride @@ -1251,7 +1260,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { ) let binding = Builtin.bindMemory(_rawValue, count._builtinWordValue, T.self) defer { Builtin.rebindMemory(_rawValue, binding) } - return try body(.init(_rawValue)) + return try unsafe body(.init(_rawValue)) } } @@ -1269,7 +1278,7 @@ extension UnsafeMutablePointer { ) rethrows -> Result { let binding = Builtin.bindMemory(_rawValue, count._builtinWordValue, T.self) defer { Builtin.rebindMemory(_rawValue, binding) } - return try body(.init(_rawValue)) + return try unsafe body(.init(_rawValue)) } } @@ -1291,11 +1300,11 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { public subscript(i: Int) -> Pointee { @_transparent unsafeAddress { - return UnsafePointer(self + i) + return unsafe UnsafePointer(self + i) } @_transparent nonmutating unsafeMutableAddress { - return self + i + return unsafe self + i } } } @@ -1309,11 +1318,11 @@ extension UnsafeMutablePointer { internal subscript(i: Int) -> Pointee { @_transparent unsafeAddress { - return UnsafePointer(self + i) + return unsafe UnsafePointer(self + i) } @_transparent nonmutating unsafeMutableAddress { - return self + i + return unsafe self + i } } } @@ -1334,11 +1343,11 @@ extension UnsafeMutablePointer { ) -> UnsafePointer? { guard let o = property._storedInlineOffset else { return nil } _internalInvariant(o >= 0) - _debugPrecondition( + unsafe _debugPrecondition( !UInt(bitPattern: self).addingReportingOverflow(UInt(bitPattern: o)).overflow, "Overflow in pointer arithmetic" ) - return .init(Builtin.gepRaw_Word(_rawValue, o._builtinWordValue)) + return unsafe .init(Builtin.gepRaw_Word(_rawValue, o._builtinWordValue)) } /// Obtain a mutable pointer to the stored property referred to by a key path. @@ -1356,11 +1365,11 @@ extension UnsafeMutablePointer { ) -> UnsafeMutablePointer? { guard let o = property._storedInlineOffset else { return nil } _internalInvariant(o >= 0) - _debugPrecondition( + unsafe _debugPrecondition( !UInt(bitPattern: self).addingReportingOverflow(UInt(bitPattern: o)).overflow, "Overflow in pointer arithmetic" ) - return .init(Builtin.gepRaw_Word(_rawValue, o._builtinWordValue)) + return unsafe .init(Builtin.gepRaw_Word(_rawValue, o._builtinWordValue)) } } @@ -1368,7 +1377,7 @@ extension UnsafeMutablePointer where Pointee: ~Copyable { @inlinable // unsafe-performance @_preInverseGenerics internal static var _max: UnsafeMutablePointer { - return UnsafeMutablePointer( + return unsafe UnsafeMutablePointer( bitPattern: 0 as Int &- MemoryLayout.stride )._unsafelyUnwrappedUnchecked } diff --git a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb index af288923dd101..5cb2002fa357f 100644 --- a/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb +++ b/stdlib/public/core/UnsafeRawBufferPointer.swift.gyb @@ -114,11 +114,11 @@ public struct Unsafe${Mutable}RawBufferPointer { @_nonEphemeral start: Unsafe${Mutable}RawPointer?, count: Int ) { _debugPrecondition(count >= 0, "${Self} with negative count") - _debugPrecondition(count == 0 || start != nil, + _debugPrecondition(unsafe count == 0 || start != nil, "${Self} has a nil start and nonzero count") - _position = start - _end = start.map { $0 + _assumeNonNegative(count) } + unsafe _position = start + unsafe _end = start.map { unsafe $0 + _assumeNonNegative(count) } } } @@ -129,14 +129,15 @@ extension Unsafe${Mutable}RawBufferPointer: Sendable {} extension UnsafeRawBufferPointer { /// An iterator over the bytes viewed by a raw buffer pointer. @frozen + @unsafe public struct Iterator { @usableFromInline internal var _position, _end: UnsafeRawPointer? @inlinable internal init(_position: UnsafeRawPointer?, _end: UnsafeRawPointer?) { - self._position = _position - self._end = _end + unsafe self._position = _position + unsafe self._end = _end } } } @@ -144,7 +145,7 @@ extension UnsafeRawBufferPointer { @available(*, unavailable) extension UnsafeRawBufferPointer.Iterator: Sendable { } -extension UnsafeRawBufferPointer.Iterator: IteratorProtocol, Sequence { +extension UnsafeRawBufferPointer.Iterator: @unsafe IteratorProtocol, @unsafe Sequence { /// Advances to the next byte and returns it, or `nil` if no next byte /// exists. /// @@ -154,7 +155,7 @@ extension UnsafeRawBufferPointer.Iterator: IteratorProtocol, Sequence { /// exists; otherwise, `nil`. @inlinable public mutating func next() -> UInt8? { - guard let position = _position else { + guard let position = unsafe _position else { return nil } // We can do an unchecked unwrap here by borrowing invariants from the pointer. @@ -163,15 +164,15 @@ extension UnsafeRawBufferPointer.Iterator: IteratorProtocol, Sequence { // Thus, we can safely do an unchecked unwrap here. // We check these invariants in debug builds to defend against invalidly constructed // pointers. - _debugPrecondition(_end != nil) - let end = _end._unsafelyUnwrappedUnchecked - if position == end { return nil } - _debugPrecondition(position < end) - let result = position.load(as: UInt8.self) + _debugPrecondition(unsafe _end != nil) + let end = unsafe _end._unsafelyUnwrappedUnchecked + if unsafe position == end { return nil } + _debugPrecondition(unsafe position < end) + let result = unsafe position.load(as: UInt8.self) // Validly constructed buffer pointers also have an _end that is strictly // greater than or equal to _position. // So we do not need to do checked arithmetic here as we cannot possibly overflow. - _position = position + 1 + unsafe _position = position + 1 return result } } @@ -181,13 +182,13 @@ extension UnsafeMutableRawBufferPointer { } %end -extension Unsafe${Mutable}RawBufferPointer: Sequence { +extension Unsafe${Mutable}RawBufferPointer: @unsafe Sequence { public typealias SubSequence = Slice<${Self}> /// Returns an iterator over the bytes of this sequence. @inlinable public func makeIterator() -> Iterator { - return Iterator(_position: _position, _end: _end) + return unsafe Iterator(_position: _position, _end: _end) } /// Copies the elements of `self` to the memory at `destination.baseAddress`, @@ -199,18 +200,18 @@ extension Unsafe${Mutable}RawBufferPointer: Sequence { public func _copyContents( initializing destination: UnsafeMutableBufferPointer ) -> (Iterator, UnsafeMutableBufferPointer.Index) { - guard let s = _position, let e = _end, e > s, !destination.isEmpty else { - return (makeIterator(), 0) + guard let s = unsafe _position, let e = unsafe _end, unsafe e > s, !destination.isEmpty else { + return (unsafe makeIterator(), 0) } - let destinationAddress = destination.baseAddress._unsafelyUnwrappedUnchecked + let destinationAddress = unsafe destination.baseAddress._unsafelyUnwrappedUnchecked let d = UnsafeMutableRawPointer(destinationAddress) - let n = Swift.min(destination.count, s.distance(to: e)) - d.copyMemory(from: s, byteCount: n) - return (Iterator(_position: s.advanced(by: n), _end: e), n) + let n = Swift.min(destination.count, unsafe s.distance(to: e)) + unsafe d.copyMemory(from: s, byteCount: n) + return (unsafe Iterator(_position: unsafe s.advanced(by: n), _end: e), n) } } -extension Unsafe${Mutable}RawBufferPointer: ${Mutable}Collection { +extension Unsafe${Mutable}RawBufferPointer: @unsafe ${Mutable}Collection { // TODO: Specialize `index` and `formIndex` and // `_failEarlyRangeCheck` as in `UnsafeBufferPointer`. public typealias Element = UInt8 @@ -219,6 +220,7 @@ extension Unsafe${Mutable}RawBufferPointer: ${Mutable}Collection { /// Always zero, which is the index of the first byte in a nonempty buffer. @inlinable + @safe public var startIndex: Index { return 0 } @@ -229,14 +231,16 @@ extension Unsafe${Mutable}RawBufferPointer: ${Mutable}Collection { /// The `endIndex` property of an `Unsafe${Mutable}RawBufferPointer` /// instance is always identical to `count`. @inlinable + @safe public var endIndex: Index { return count } @inlinable + @safe public var indices: Indices { // Not checked because init forbids negative count. - return Indices(uncheckedBounds: (startIndex, endIndex)) + return unsafe Indices(uncheckedBounds: (startIndex, endIndex)) } /// Accesses the byte at the given offset in the memory region as a `UInt8` @@ -249,13 +253,13 @@ extension Unsafe${Mutable}RawBufferPointer: ${Mutable}Collection { get { _debugPrecondition(i >= 0) _debugPrecondition(i < endIndex) - return _position._unsafelyUnwrappedUnchecked.load(fromByteOffset: i, as: UInt8.self) + return unsafe _position._unsafelyUnwrappedUnchecked.load(fromByteOffset: i, as: UInt8.self) } % if mutable: nonmutating set { _debugPrecondition(i >= 0) _debugPrecondition(i < endIndex) - _position._unsafelyUnwrappedUnchecked.storeBytes(of: newValue, toByteOffset: i, as: UInt8.self) + unsafe _position._unsafelyUnwrappedUnchecked.storeBytes(of: newValue, toByteOffset: i, as: UInt8.self) } % end # mutable } @@ -269,16 +273,16 @@ extension Unsafe${Mutable}RawBufferPointer: ${Mutable}Collection { get { _debugPrecondition(bounds.lowerBound >= startIndex) _debugPrecondition(bounds.upperBound <= endIndex) - return Slice(base: self, bounds: bounds) + return unsafe Slice(base: self, bounds: bounds) } % if mutable: nonmutating set { _debugPrecondition(bounds.lowerBound >= startIndex) _debugPrecondition(bounds.upperBound <= endIndex) - _debugPrecondition(bounds.count == newValue.count) + _debugPrecondition(unsafe bounds.count == newValue.count) - if !newValue.isEmpty { - (baseAddress! + bounds.lowerBound).copyMemory( + if unsafe !newValue.isEmpty { + unsafe (baseAddress! + bounds.lowerBound).copyMemory( from: newValue.base.baseAddress! + newValue.startIndex, byteCount: newValue.count) } @@ -302,11 +306,11 @@ extension Unsafe${Mutable}RawBufferPointer: ${Mutable}Collection { guard i != j else { return } _debugPrecondition(i >= 0 && j >= 0) _debugPrecondition(i < endIndex && j < endIndex) - let pi = (_position! + i) - let pj = (_position! + j) - let tmp = pi.load(fromByteOffset: 0, as: UInt8.self) - pi.copyMemory(from: pj, byteCount: MemoryLayout.size) - pj.storeBytes(of: tmp, toByteOffset: 0, as: UInt8.self) + let pi = unsafe (_position! + i) + let pj = unsafe (_position! + j) + let tmp = unsafe pi.load(fromByteOffset: 0, as: UInt8.self) + unsafe pi.copyMemory(from: pj, byteCount: MemoryLayout.size) + unsafe pj.storeBytes(of: tmp, toByteOffset: 0, as: UInt8.self) } % end # mutable @@ -315,18 +319,19 @@ extension Unsafe${Mutable}RawBufferPointer: ${Mutable}Collection { /// If the `baseAddress` of this buffer is `nil`, the count is zero. However, /// a buffer can have a `count` of zero even with a non-`nil` base address. @inlinable + @safe public var count: Int { - if let pos = _position { + if let pos = unsafe _position { // Unsafely unwrapped because init forbids end being nil if _position // isn't. - _internalInvariant(_end != nil) - return _assumeNonNegative(_end._unsafelyUnwrappedUnchecked - pos) + _internalInvariant(unsafe _end != nil) + return _assumeNonNegative(unsafe _end._unsafelyUnwrappedUnchecked - pos) } return 0 } } -extension Unsafe${Mutable}RawBufferPointer: RandomAccessCollection { } +extension Unsafe${Mutable}RawBufferPointer: @unsafe RandomAccessCollection { } extension Unsafe${Mutable}RawBufferPointer { % if mutable: @@ -348,12 +353,13 @@ extension Unsafe${Mutable}RawBufferPointer { /// - Returns: A buffer pointer to a newly allocated region of memory aligned /// to `alignment`. @inlinable + @safe public static func allocate( byteCount: Int, alignment: Int ) -> UnsafeMutableRawBufferPointer { let base = UnsafeMutableRawPointer.allocate( byteCount: byteCount, alignment: alignment) - return UnsafeMutableRawBufferPointer(start: base, count: byteCount) + return unsafe UnsafeMutableRawBufferPointer(start: base, count: byteCount) } % end # mutable @@ -367,7 +373,7 @@ extension Unsafe${Mutable}RawBufferPointer { /// be equal to the originally allocated size of the memory block. @inlinable public func deallocate() { - _position?.deallocate() + unsafe _position?.deallocate() } /// Returns a new instance of the given type, read from the buffer pointer's @@ -405,7 +411,7 @@ extension Unsafe${Mutable}RawBufferPointer { _debugPrecondition(offset >= 0, "${Self}.load with negative offset") _debugPrecondition(offset + MemoryLayout.size <= self.count, "${Self}.load out of bounds") - return baseAddress!.load(fromByteOffset: offset, as: T.self) + return unsafe baseAddress!.load(fromByteOffset: offset, as: T.self) } // FIXME(NCG): Add a consuming analogue of `load`, like `move(fromByteOffset:as:_:)` (in the mutable variant) @@ -449,7 +455,7 @@ extension Unsafe${Mutable}RawBufferPointer { _debugPrecondition(offset >= 0, "${Self}.load with negative offset") _debugPrecondition(offset + MemoryLayout.size <= self.count, "${Self}.load out of bounds") - return baseAddress!.loadUnaligned(fromByteOffset: offset, as: T.self) + return unsafe baseAddress!.loadUnaligned(fromByteOffset: offset, as: T.self) } @_alwaysEmitIntoClient @@ -460,7 +466,7 @@ extension Unsafe${Mutable}RawBufferPointer { _debugPrecondition(offset >= 0, "${Self}.load with negative offset") _debugPrecondition(offset + MemoryLayout.size <= self.count, "${Self}.load out of bounds") - return baseAddress!.loadUnaligned(fromByteOffset: offset, as: T.self) + return unsafe baseAddress!.loadUnaligned(fromByteOffset: offset, as: T.self) } % if mutable: @@ -508,8 +514,8 @@ extension Unsafe${Mutable}RawBufferPointer { _debugPrecondition(offset + MemoryLayout.size <= self.count, "${Self}.storeBytes out of bounds") - let pointer = baseAddress._unsafelyUnwrappedUnchecked - pointer.storeBytes(of: value, toByteOffset: offset, as: T.self) + let pointer = unsafe baseAddress._unsafelyUnwrappedUnchecked + unsafe pointer.storeBytes(of: value, toByteOffset: offset, as: T.self) } // This unavailable implementation uses the expected mangled name @@ -517,14 +523,15 @@ extension Unsafe${Mutable}RawBufferPointer { // any binary linked against the stdlib binary for Swift 5.6 and older. @_spi(SwiftStdlibLegacyABI) @available(swift, obsoleted: 1) @_silgen_name("$sSw10storeBytes2of12toByteOffset2asyx_SixmtlF") - @usableFromInline func _legacy_se0349_storeBytes( + @usableFromInline + func _legacy_se0349_storeBytes( of value: T, toByteOffset offset: Int = 0, as type: T.Type ) { _debugPrecondition(offset >= 0, "${Self}.storeBytes with negative offset") _debugPrecondition(offset + MemoryLayout.size <= self.count, "${Self}.storeBytes out of bounds") - baseAddress!._legacy_se0349_storeBytes_internal( + unsafe baseAddress!._legacy_se0349_storeBytes_internal( of: value, toByteOffset: offset, as: T.self ) } @@ -550,7 +557,7 @@ extension Unsafe${Mutable}RawBufferPointer { _debugPrecondition(source.count <= self.count, "${Self}.copyMemory source has too many elements") if let baseAddress = baseAddress, let sourceAddress = source.baseAddress { - baseAddress.copyMemory(from: sourceAddress, byteCount: source.count) + unsafe baseAddress.copyMemory(from: sourceAddress, byteCount: source.count) } } @@ -571,7 +578,7 @@ extension Unsafe${Mutable}RawBufferPointer { public func copyBytes( from source: C ) where C.Element == UInt8 { - guard let position = _position else { + guard let position = unsafe _position else { return } @@ -580,7 +587,7 @@ extension Unsafe${Mutable}RawBufferPointer { _debugPrecondition(source.count <= self.count, "${Self}.copyBytes source has too many elements") if let base = buffer.baseAddress { - position.copyMemory(from: base, byteCount: buffer.count) + unsafe position.copyMemory(from: base, byteCount: buffer.count) } }) != nil { return @@ -589,7 +596,7 @@ extension Unsafe${Mutable}RawBufferPointer { for (index, byteValue) in source.enumerated() { _debugPrecondition(index < self.count, "${Self}.copyBytes source has too many elements") - position.storeBytes( + unsafe position.storeBytes( of: byteValue, toByteOffset: index, as: UInt8.self) } } @@ -599,8 +606,9 @@ extension Unsafe${Mutable}RawBufferPointer { /// /// - Parameter bytes: The buffer to convert. @inlinable + @safe public init(_ bytes: UnsafeMutableRawBufferPointer) { - self.init(start: bytes.baseAddress, count: bytes.count) + unsafe self.init(start: bytes.baseAddress, count: bytes.count) } % if mutable: @@ -609,7 +617,7 @@ extension Unsafe${Mutable}RawBufferPointer { /// - Parameter bytes: The buffer to convert. @inlinable public init(mutating bytes: UnsafeRawBufferPointer) { - self.init(start: UnsafeMutableRawPointer(mutating: bytes.baseAddress), + unsafe self.init(start: UnsafeMutableRawPointer(mutating: bytes.baseAddress), count: bytes.count) } % else: @@ -617,8 +625,9 @@ extension Unsafe${Mutable}RawBufferPointer { /// /// - Parameter bytes: The buffer to convert. @inlinable + @safe public init(_ bytes: UnsafeRawBufferPointer) { - self.init(start: bytes.baseAddress, count: bytes.count) + unsafe self.init(start: bytes.baseAddress, count: bytes.count) } % end # !mutable @@ -628,8 +637,9 @@ extension Unsafe${Mutable}RawBufferPointer { /// buffer's type `T` must be a trivial type. @inlinable @_preInverseGenerics + @safe public init(_ buffer: UnsafeMutableBufferPointer) { - self.init(start: buffer.baseAddress, + unsafe self.init(start: buffer.baseAddress, count: buffer.count * MemoryLayout.stride) } @@ -640,8 +650,9 @@ extension Unsafe${Mutable}RawBufferPointer { /// buffer's type `T` must be a trivial type. @inlinable @_preInverseGenerics + @safe public init(_ buffer: UnsafeBufferPointer) { - self.init(start: buffer.baseAddress, + unsafe self.init(start: buffer.baseAddress, count: buffer.count * MemoryLayout.stride) } % end # !mutable @@ -678,11 +689,11 @@ extension Unsafe${Mutable}RawBufferPointer { // degradation wrt passing around pointers not wrapped in a BufferPointer // construct. _debugPrecondition( - slice.startIndex >= 0 && slice.endIndex <= slice.base.count, + unsafe slice.startIndex >= 0 && slice.endIndex <= slice.base.count, "Invalid slice") - let base = slice.base.baseAddress?.advanced(by: slice.startIndex) - let count = slice.endIndex &- slice.startIndex - self.init(start: base, count: count) + let base = unsafe slice.base.baseAddress?.advanced(by: slice.startIndex) + let count = unsafe slice.endIndex &- slice.startIndex + unsafe self.init(start: base, count: count) } % end # !mutable @@ -708,9 +719,9 @@ extension Unsafe${Mutable}RawBufferPointer { /// - Parameter slice: The raw buffer slice to rebase. @inlinable public init(rebasing slice: Slice) { - let base = slice.base.baseAddress?.advanced(by: slice.startIndex) - let count = slice.endIndex &- slice.startIndex - self.init(start: base, count: count) + let base = unsafe slice.base.baseAddress?.advanced(by: slice.startIndex) + let count = unsafe slice.endIndex &- slice.startIndex + unsafe self.init(start: base, count: count) } /// A pointer to the first byte of the buffer. @@ -718,8 +729,9 @@ extension Unsafe${Mutable}RawBufferPointer { /// If the `baseAddress` of this buffer is `nil`, the count is zero. However, /// a buffer can have a `count` of zero even with a non-`nil` base address. @inlinable + @safe public var baseAddress: Unsafe${Mutable}RawPointer? { - return _position + return unsafe _position } % if mutable: @@ -750,15 +762,15 @@ extension Unsafe${Mutable}RawBufferPointer { @discardableResult public func initializeMemory(as type: T.Type, repeating repeatedValue: T) -> UnsafeMutableBufferPointer { - guard let base = _position else { - return .init(start: nil, count: 0) + guard let base = unsafe _position else { + return unsafe .init(start: nil, count: 0) } - let count = (_end._unsafelyUnwrappedUnchecked-base) / MemoryLayout.stride - let initialized = base.initializeMemory( + let count = (unsafe _end._unsafelyUnwrappedUnchecked-base) / MemoryLayout.stride + let initialized = unsafe base.initializeMemory( as: type, repeating: repeatedValue, count: count ) - return .init(start: initialized, count: count) + return unsafe .init(start: initialized, count: count) } /// Initializes the buffer's memory with the given elements, binding the @@ -800,16 +812,16 @@ extension Unsafe${Mutable}RawBufferPointer { // this can be a precondition since only an invalid argument should be costly _precondition(source.underestimatedCount == 0, "no memory available to initialize from source") - return (it, UnsafeMutableBufferPointer(start: nil, count: 0)) + return (it, unsafe UnsafeMutableBufferPointer(start: nil, count: 0)) } _debugPrecondition( - Int(bitPattern: base) & (MemoryLayout.alignment-1) == 0, + unsafe Int(bitPattern: base) & (MemoryLayout.alignment-1) == 0, "buffer base address must be properly aligned to access S.Element" ) - _internalInvariant(_end != nil) - for p in stride(from: base, + _internalInvariant(unsafe _end != nil) + for unsafe p in unsafe stride(from: base, // only advance to as far as the last element that will fit to: _end._unsafelyUnwrappedUnchecked - elementStride + 1, by: elementStride @@ -817,11 +829,11 @@ extension Unsafe${Mutable}RawBufferPointer { // underflow is permitted -- e.g. a sequence into // the spare capacity of an Array buffer guard let x = it.next() else { break } - p.initializeMemory(as: S.Element.self, repeating: x, count: 1) - formIndex(&idx, offsetBy: elementStride) + unsafe p.initializeMemory(as: S.Element.self, repeating: x, count: 1) + unsafe formIndex(&idx, offsetBy: elementStride) } - return (it, UnsafeMutableBufferPointer( + return (it, unsafe UnsafeMutableBufferPointer( start: base.assumingMemoryBound(to: S.Element.self), count: idx / elementStride)) } @@ -858,25 +870,25 @@ extension Unsafe${Mutable}RawBufferPointer { fromContentsOf source: C ) -> UnsafeMutableBufferPointer { let buffer: UnsafeMutableBufferPointer? - buffer = source.withContiguousStorageIfAvailable { + unsafe buffer = source.withContiguousStorageIfAvailable { guard let sourceAddress = $0.baseAddress, !$0.isEmpty else { - return .init(start: nil, count: 0) + return unsafe .init(start: nil, count: 0) } _debugPrecondition( - Int(bitPattern: baseAddress) & (MemoryLayout.alignment-1) == 0, + unsafe Int(bitPattern: baseAddress) & (MemoryLayout.alignment-1) == 0, "buffer base address must be properly aligned to access C.Element" ) _precondition( $0.count * MemoryLayout.stride <= self.count, "buffer cannot contain every element from source collection." ) - let start = baseAddress.unsafelyUnwrapped.initializeMemory( + let start = unsafe baseAddress.unsafelyUnwrapped.initializeMemory( as: C.Element.self, from: sourceAddress, count: $0.count ) - return .init(start: start, count: $0.count) + return unsafe .init(start: start, count: $0.count) } - if let buffer { - return buffer + if let buffer = unsafe buffer { + return unsafe buffer } guard let base = baseAddress else { @@ -884,30 +896,30 @@ extension Unsafe${Mutable}RawBufferPointer { source.isEmpty, "buffer cannot contain every element from source collection." ) - return .init(start: nil, count: 0) + return unsafe .init(start: nil, count: 0) } - _internalInvariant(_end != nil) + _internalInvariant(unsafe _end != nil) _debugPrecondition( - Int(bitPattern: baseAddress) & (MemoryLayout.alignment-1) == 0, + unsafe Int(bitPattern: baseAddress) & (MemoryLayout.alignment-1) == 0, "buffer base address must be properly aligned to access C.Element" ) var iterator = source.makeIterator() - var element = base + var element = unsafe base var initialized = 0 - let end = _end._unsafelyUnwrappedUnchecked - MemoryLayout.stride - while element <= end { + let end = unsafe _end._unsafelyUnwrappedUnchecked - MemoryLayout.stride + while unsafe element <= end { guard let value = iterator.next() else { - return .init(start: .init(base._rawValue), count: initialized) + return unsafe .init(start: .init(base._rawValue), count: initialized) } - element.initializeMemory(as: C.Element.self, to: value) - element = element.advanced(by: MemoryLayout.stride) + unsafe element.initializeMemory(as: C.Element.self, to: value) + unsafe element = element.advanced(by: MemoryLayout.stride) initialized += 1 } _precondition( iterator.next() == nil, "buffer cannot contain every element from source collection." ) - return .init(start: .init(base._rawValue), count: initialized) + return unsafe .init(start: .init(base._rawValue), count: initialized) } /// Moves every element of an initialized source buffer into the @@ -946,20 +958,20 @@ extension Unsafe${Mutable}RawBufferPointer { fromContentsOf source: UnsafeMutableBufferPointer ) -> UnsafeMutableBufferPointer { guard let sourceAddress = source.baseAddress, !source.isEmpty else { - return .init(start: nil, count: 0) + return unsafe .init(start: nil, count: 0) } _debugPrecondition( - Int(bitPattern: baseAddress) & (MemoryLayout.alignment-1) == 0, + unsafe Int(bitPattern: baseAddress) & (MemoryLayout.alignment-1) == 0, "buffer base address must be properly aligned to access T" ) _precondition( source.count * MemoryLayout.stride <= self.count, "buffer cannot contain every element from source." ) - let initialized = baseAddress.unsafelyUnwrapped.moveInitializeMemory( + let initialized = unsafe baseAddress.unsafelyUnwrapped.moveInitializeMemory( as: T.self, from: sourceAddress, count: source.count ) - return .init(start: initialized, count: source.count) + return unsafe .init(start: initialized, count: source.count) } /// Moves every element of an initialized source buffer slice into the @@ -997,8 +1009,8 @@ extension Unsafe${Mutable}RawBufferPointer { as type: T.Type, fromContentsOf source: Slice> ) -> UnsafeMutableBufferPointer { - let rebased = UnsafeMutableBufferPointer(rebasing: source) - return moveInitializeMemory(as: T.self, fromContentsOf: rebased) + let rebased = unsafe UnsafeMutableBufferPointer(rebasing: source) + return unsafe moveInitializeMemory(as: T.self, fromContentsOf: rebased) } % end # mutable @@ -1028,13 +1040,13 @@ extension Unsafe${Mutable}RawBufferPointer { public func bindMemory( to type: T.Type ) -> Unsafe${Mutable}BufferPointer { - guard let base = _position else { - return Unsafe${Mutable}BufferPointer(start: nil, count: 0) + guard let base = unsafe _position else { + return unsafe Unsafe${Mutable}BufferPointer(start: nil, count: 0) } let capacity = count / MemoryLayout.stride Builtin.bindMemory(base._rawValue, capacity._builtinWordValue, type) - return Unsafe${Mutable}BufferPointer( + return unsafe Unsafe${Mutable}BufferPointer( start: Unsafe${Mutable}Pointer(base._rawValue), count: capacity) } @@ -1087,20 +1099,20 @@ extension Unsafe${Mutable}RawBufferPointer { to type: T.Type, _ body: (_ buffer: Unsafe${Mutable}BufferPointer) throws(E) -> Result ) throws(E) -> Result { - guard let s = _position else { - return try body(.init(start: nil, count: 0)) + guard let s = unsafe _position else { + return try unsafe body(.init(start: nil, count: 0)) } _debugPrecondition( - Int(bitPattern: s) & (MemoryLayout.alignment-1) == 0, + unsafe Int(bitPattern: s) & (MemoryLayout.alignment-1) == 0, "baseAddress must be a properly aligned pointer for type T" ) // initializer ensures _end is nil only when _position is nil. - _internalInvariant(_end != nil) - let c = _assumeNonNegative(s.distance(to: _end._unsafelyUnwrappedUnchecked)) + _internalInvariant(unsafe _end != nil) + let c = _assumeNonNegative(unsafe s.distance(to: _end._unsafelyUnwrappedUnchecked)) let n = c / MemoryLayout.stride let binding = Builtin.bindMemory(s._rawValue, n._builtinWordValue, T.self) defer { Builtin.rebindMemory(s._rawValue, binding) } - return try body(.init(start: .init(s._rawValue), count: n)) + return try unsafe body(.init(start: .init(s._rawValue), count: n)) } /// Returns a typed buffer to the memory referenced by this buffer, @@ -1123,14 +1135,14 @@ extension Unsafe${Mutable}RawBufferPointer { public func assumingMemoryBound( to: T.Type ) -> Unsafe${Mutable}BufferPointer { - guard let s = _position else { - return .init(start: nil, count: 0) + guard let s = unsafe _position else { + return unsafe .init(start: nil, count: 0) } // initializer ensures _end is nil only when _position is nil. - _internalInvariant(_end != nil) - let c = _assumeNonNegative(s.distance(to: _end._unsafelyUnwrappedUnchecked)) + _internalInvariant(unsafe _end != nil) + let c = _assumeNonNegative(unsafe s.distance(to: _end._unsafelyUnwrappedUnchecked)) let n = c / MemoryLayout.stride - return .init(start: .init(s._rawValue), count: n) + return unsafe .init(start: .init(s._rawValue), count: n) } % if Mutable: @@ -1138,15 +1150,15 @@ extension Unsafe${Mutable}RawBufferPointer { public func withContiguousMutableStorageIfAvailable( _ body: (inout UnsafeMutableBufferPointer) throws -> R ) rethrows -> R? { - try withMemoryRebound(to: Element.self) { b in - var buffer = b + try unsafe withMemoryRebound(to: Element.self) { b in + var buffer = unsafe b defer { _debugPrecondition( - (b.baseAddress, b.count) == (buffer.baseAddress, buffer.count), + unsafe (b.baseAddress, b.count) == (buffer.baseAddress, buffer.count), "UnsafeMutableRawBufferPointer.withContiguousMutableStorageIfAvailable: replacing the buffer is not allowed" ) } - return try body(&buffer) + return try unsafe body(&buffer) } } @@ -1155,8 +1167,8 @@ extension Unsafe${Mutable}RawBufferPointer { public func withContiguousStorageIfAvailable( _ body: (UnsafeBufferPointer) throws -> R ) rethrows -> R? { - try withMemoryRebound(to: Element.self) { - try body(${ 'UnsafeBufferPointer($0)' if Mutable else '$0' }) + try unsafe withMemoryRebound(to: Element.self) { + try unsafe body(${ 'UnsafeBufferPointer($0)' if Mutable else '$0' }) } } } @@ -1164,9 +1176,10 @@ extension Unsafe${Mutable}RawBufferPointer { @_unavailableInEmbedded extension Unsafe${Mutable}RawBufferPointer: CustomDebugStringConvertible { /// A textual representation of the buffer, suitable for debugging. + @safe public var debugDescription: String { return "${Self}" - + "(start: \(_position.map(String.init(describing:)) ?? "nil"), count: \(count))" + + "(start: \(unsafe _position.map(String.init(describing:)) ?? "nil"), count: \(count))" } } @@ -1174,7 +1187,7 @@ extension ${Self} { @available(*, unavailable, message: "use 'Unsafe${Mutable}RawBufferPointer(rebasing:)' to convert a slice into a zero-based raw buffer.") public subscript(bounds: Range) -> ${Self} { - get { return ${Self}(start: nil, count: 0) } + get { return unsafe ${Self}(start: nil, count: 0) } % if mutable: nonmutating set {} % end # mutable @@ -1184,7 +1197,7 @@ extension ${Self} { @available(*, unavailable, message: "use 'UnsafeRawBufferPointer(rebasing:)' to convert a slice into a zero-based raw buffer.") public subscript(bounds: Range) -> UnsafeRawBufferPointer { - get { return UnsafeRawBufferPointer(start: nil, count: 0) } + get { return unsafe UnsafeRawBufferPointer(start: nil, count: 0) } nonmutating set {} } % end # mutable @@ -1218,7 +1231,7 @@ public func withUnsafeMutableBytes( _ body: (UnsafeMutableRawBufferPointer) throws(E) -> Result ) throws(E) -> Result { let pointer = UnsafeMutableRawPointer(Builtin.addressof(&value)) - return try body(.init(start: pointer, count: MemoryLayout.size)) + return try unsafe body(unsafe .init(start: pointer, count: MemoryLayout.size)) } /// ABI: Historical withUnsafeMutableBytes(of:_:) rethrows, @@ -1230,8 +1243,8 @@ func __abi_se0413_withUnsafeMutableBytes( of value: inout T, _ body: (UnsafeMutableRawBufferPointer) throws -> Result ) throws -> Result { - return try withUnsafeMutablePointer(to: &value) { - return try body(UnsafeMutableRawBufferPointer( + return try unsafe withUnsafeMutablePointer(to: &value) { + return try unsafe body(unsafe UnsafeMutableRawBufferPointer( start: $0, count: MemoryLayout.size)) } } @@ -1253,7 +1266,7 @@ public func _withUnprotectedUnsafeMutableBytes< #else let pointer = UnsafeMutableRawPointer(Builtin.addressof(&value)) #endif - return try body(.init(start: pointer, count: MemoryLayout.size)) + return try unsafe body(unsafe .init(start: pointer, count: MemoryLayout.size)) } /// Invokes the given closure with a buffer pointer covering the raw bytes of @@ -1285,7 +1298,7 @@ public func withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws(E) -> Result ) throws(E) -> Result { let address = UnsafeRawPointer(Builtin.addressof(&value)) - return try body(.init(start: address, count: MemoryLayout.size)) + return try unsafe body(unsafe .init(start: address, count: MemoryLayout.size)) } /// ABI: Historical withUnsafeBytes(of:_:) rethrows, @@ -1297,8 +1310,8 @@ func __abi_se0413_withUnsafeBytes( of value: inout T, _ body: (UnsafeRawBufferPointer) throws -> Result ) throws -> Result { - return try withUnsafePointer(to: &value) { - try body(UnsafeRawBufferPointer(start: $0, count: MemoryLayout.size)) + return try unsafe withUnsafePointer(to: &value) { + try unsafe body(unsafe UnsafeRawBufferPointer(start: $0, count: MemoryLayout.size)) } } @@ -1319,7 +1332,7 @@ public func _withUnprotectedUnsafeBytes< #else let p = UnsafeRawPointer(Builtin.addressof(&value)) #endif - return try body(.init(start: p, count: MemoryLayout.size)) + return try unsafe body(unsafe .init(start: p, count: MemoryLayout.size)) } /// Invokes the given closure with a buffer pointer covering the raw bytes of @@ -1349,7 +1362,7 @@ public func withUnsafeBytes< _ body: (UnsafeRawBufferPointer) throws(E) -> Result ) throws(E) -> Result { let addr = UnsafeRawPointer(Builtin.addressOfBorrow(value)) - return try body(.init(start: addr, count: MemoryLayout.size)) + return try unsafe body(unsafe .init(start: addr, count: MemoryLayout.size)) } /// ABI: Historical withUnsafeBytes(of:_:) rethrows, @@ -1362,8 +1375,8 @@ func __abi_se0413_withUnsafeBytes( _ body: (UnsafeRawBufferPointer) throws -> Result ) throws -> Result { let addr = UnsafeRawPointer(Builtin.addressOfBorrow(value)) - let buffer = UnsafeRawBufferPointer(start: addr, count: MemoryLayout.size) - return try body(buffer) + let buffer = unsafe UnsafeRawBufferPointer(start: addr, count: MemoryLayout.size) + return try unsafe body(buffer) } /// Invokes the given closure with a buffer pointer covering the raw bytes of @@ -1383,8 +1396,8 @@ public func _withUnprotectedUnsafeBytes< #else let addr = UnsafeRawPointer(Builtin.addressOfBorrow(value)) #endif - let buffer = UnsafeRawBufferPointer(start: addr, count: MemoryLayout.size) - return try body(buffer) + let buffer = unsafe UnsafeRawBufferPointer(start: addr, count: MemoryLayout.size) + return try unsafe body(buffer) } // ${'Local Variables'}: diff --git a/stdlib/public/core/UnsafeRawPointer.swift b/stdlib/public/core/UnsafeRawPointer.swift index ad6556d7bcae6..e26e13999976b 100644 --- a/stdlib/public/core/UnsafeRawPointer.swift +++ b/stdlib/public/core/UnsafeRawPointer.swift @@ -169,16 +169,18 @@ /// // Accessing 'numberPointer' is undefined behavior. @frozen @unsafe -public struct UnsafeRawPointer: _Pointer { +public struct UnsafeRawPointer: @unsafe _Pointer { public typealias Pointee = UInt8 /// The underlying raw pointer. /// Implements conformance to the public protocol `_Pointer`. + @safe public let _rawValue: Builtin.RawPointer /// Creates a new raw pointer from a builtin raw pointer. @_transparent + @safe public init(_ _rawValue: Builtin.RawPointer) { self._rawValue = _rawValue } @@ -197,6 +199,7 @@ extension UnsafeRawPointer { /// - Parameter other: The typed pointer to convert. @_transparent @_preInverseGenerics + @safe public init(@_nonEphemeral _ other: UnsafePointer) { _rawValue = other._rawValue } @@ -211,8 +214,9 @@ extension UnsafeRawPointer { /// result is `nil`. @_transparent @_preInverseGenerics + @safe public init?(@_nonEphemeral _ other: UnsafePointer?) { - guard let unwrapped = other else { return nil } + guard let unwrapped = unsafe other else { return nil } _rawValue = unwrapped._rawValue } } @@ -226,6 +230,7 @@ extension UnsafeRawPointer { /// /// - Parameter other: The mutable raw pointer to convert. @_transparent + @safe public init(@_nonEphemeral _ other: UnsafeMutableRawPointer) { _rawValue = other._rawValue } @@ -239,8 +244,9 @@ extension UnsafeRawPointer { /// - Parameter other: The mutable raw pointer to convert. If `other` is /// `nil`, the result is `nil`. @_transparent + @safe public init?(@_nonEphemeral _ other: UnsafeMutableRawPointer?) { - guard let unwrapped = other else { return nil } + guard let unwrapped = unsafe other else { return nil } _rawValue = unwrapped._rawValue } } @@ -255,6 +261,7 @@ extension UnsafeRawPointer { /// - Parameter other: The typed pointer to convert. @_transparent @_preInverseGenerics + @safe public init(@_nonEphemeral _ other: UnsafeMutablePointer) { _rawValue = other._rawValue } @@ -269,8 +276,9 @@ extension UnsafeRawPointer { /// result is `nil`. @_transparent @_preInverseGenerics + @safe public init?(@_nonEphemeral _ other: UnsafeMutablePointer?) { - guard let unwrapped = other else { return nil } + guard let unwrapped = unsafe other else { return nil } _rawValue = unwrapped._rawValue } @@ -331,7 +339,7 @@ extension UnsafeRawPointer { to type: T.Type, capacity count: Int ) -> UnsafePointer { Builtin.bindMemory(_rawValue, count._builtinWordValue, type) - return UnsafePointer(_rawValue) + return unsafe UnsafePointer(_rawValue) } /// Executes the given closure while temporarily binding memory to @@ -391,13 +399,13 @@ extension UnsafeRawPointer { capacity count: Int, _ body: (_ pointer: UnsafePointer) throws(E) -> Result ) throws(E) -> Result { - _debugPrecondition( + unsafe _debugPrecondition( Int(bitPattern: self) & (MemoryLayout.alignment-1) == 0, "self must be a properly aligned pointer for type T" ) let binding = Builtin.bindMemory(_rawValue, count._builtinWordValue, T.self) defer { Builtin.rebindMemory(_rawValue, binding) } - return try body(.init(_rawValue)) + return try unsafe body(.init(_rawValue)) } /// Returns a typed pointer to the memory referenced by this pointer, @@ -416,7 +424,7 @@ extension UnsafeRawPointer { public func assumingMemoryBound( to: T.Type ) -> UnsafePointer { - return UnsafePointer(_rawValue) + return unsafe UnsafePointer(_rawValue) } /// Returns a new instance of the given type, constructed from the raw memory @@ -438,11 +446,11 @@ extension UnsafeRawPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - _debugPrecondition(0 == (UInt(bitPattern: self + offset) + unsafe _debugPrecondition(0 == (UInt(bitPattern: self + offset) & (UInt(MemoryLayout.alignment) - 1)), "load from misaligned raw pointer") - let rawPointer = (self + offset)._rawValue + let rawPointer = unsafe (self + offset)._rawValue #if compiler(>=5.5) && $BuiltinAssumeAlignment let alignedPointer = @@ -484,7 +492,7 @@ extension UnsafeRawPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - return Builtin.loadRaw((self + offset)._rawValue) + return unsafe Builtin.loadRaw((self + offset)._rawValue) } /// Returns a new instance of the given type, constructed from the raw memory @@ -519,20 +527,20 @@ extension UnsafeRawPointer { _isPOD(T.self), "loadUnaligned only supports loading BitwiseCopyable types." ) - return _withUnprotectedUnsafeTemporaryAllocation(of: T.self, capacity: 1) { - let temporary = $0.baseAddress._unsafelyUnwrappedUnchecked - Builtin.int_memcpy_RawPointer_RawPointer_Int64( + return unsafe _withUnprotectedUnsafeTemporaryAllocation(of: T.self, capacity: 1) { + let temporary = unsafe $0.baseAddress._unsafelyUnwrappedUnchecked + unsafe Builtin.int_memcpy_RawPointer_RawPointer_Int64( temporary._rawValue, (self + offset)._rawValue, UInt64(MemoryLayout.size)._value, /*volatile:*/ false._value ) - return temporary.pointee + return unsafe temporary.pointee } } } -extension UnsafeRawPointer: Strideable { +extension UnsafeRawPointer: @unsafe Strideable { // custom version for raw pointers @_transparent public func advanced(by n: Int) -> UnsafeRawPointer { @@ -775,16 +783,18 @@ extension UnsafeRawPointer { /// // Accessing 'numberPointer' is undefined behavior. @frozen @unsafe -public struct UnsafeMutableRawPointer: _Pointer { +public struct UnsafeMutableRawPointer: @unsafe _Pointer { public typealias Pointee = UInt8 /// The underlying raw pointer. /// Implements conformance to the public protocol `_Pointer`. + @safe public let _rawValue: Builtin.RawPointer /// Creates a new raw pointer from a builtin raw pointer. @_transparent + @safe public init(_ _rawValue: Builtin.RawPointer) { self._rawValue = _rawValue } @@ -803,6 +813,7 @@ extension UnsafeMutableRawPointer { /// - Parameter other: The typed pointer to convert. @_transparent @_preInverseGenerics + @safe public init(@_nonEphemeral _ other: UnsafeMutablePointer) { _rawValue = other._rawValue } @@ -817,8 +828,9 @@ extension UnsafeMutableRawPointer { /// result is `nil`. @_transparent @_preInverseGenerics + @safe public init?(@_nonEphemeral _ other: UnsafeMutablePointer?) { - guard let unwrapped = other else { return nil } + guard let unwrapped = unsafe other else { return nil } _rawValue = unwrapped._rawValue } @@ -844,7 +856,7 @@ extension UnsafeMutableRawPointer { /// `nil`, the result is `nil`. @_transparent public init?(@_nonEphemeral mutating other: UnsafeRawPointer?) { - guard let unwrapped = other else { return nil } + guard let unwrapped = unsafe other else { return nil } _rawValue = unwrapped._rawValue } @@ -866,6 +878,7 @@ extension UnsafeMutableRawPointer { /// - Returns: A pointer to a newly allocated region of memory. The memory is /// allocated, but not initialized. @inlinable + @safe public static func allocate( byteCount: Int, alignment: Int ) -> UnsafeMutableRawPointer { @@ -945,7 +958,7 @@ extension UnsafeMutableRawPointer { to type: T.Type, capacity count: Int ) -> UnsafeMutablePointer { Builtin.bindMemory(_rawValue, count._builtinWordValue, type) - return UnsafeMutablePointer(_rawValue) + return unsafe UnsafeMutablePointer(_rawValue) } /// Executes the given closure while temporarily binding memory to @@ -1003,13 +1016,13 @@ extension UnsafeMutableRawPointer { capacity count: Int, _ body: (_ pointer: UnsafeMutablePointer) throws(E) -> Result ) throws(E) -> Result { - _debugPrecondition( + unsafe _debugPrecondition( Int(bitPattern: self) & (MemoryLayout.alignment-1) == 0, "self must be a properly aligned pointer for type T" ) let binding = Builtin.bindMemory(_rawValue, count._builtinWordValue, T.self) defer { Builtin.rebindMemory(_rawValue, binding) } - return try body(.init(_rawValue)) + return try unsafe body(.init(_rawValue)) } /// Returns a typed pointer to the memory referenced by this pointer, @@ -1028,7 +1041,7 @@ extension UnsafeMutableRawPointer { public func assumingMemoryBound( to: T.Type ) -> UnsafeMutablePointer { - return UnsafeMutablePointer(_rawValue) + return unsafe UnsafeMutablePointer(_rawValue) } /// Initializes the memory referenced by this pointer with the given value, @@ -1067,7 +1080,7 @@ extension UnsafeMutableRawPointer { ) -> UnsafeMutablePointer { Builtin.bindMemory(_rawValue, (1)._builtinWordValue, type) Builtin.initialize(consume value, _rawValue) - return UnsafeMutablePointer(_rawValue) + return unsafe UnsafeMutablePointer(_rawValue) } /// Initializes the memory referenced by this pointer with the given value, @@ -1113,12 +1126,12 @@ extension UnsafeMutableRawPointer { "UnsafeMutableRawPointer.initializeMemory: negative count") Builtin.bindMemory(_rawValue, count._builtinWordValue, type) - var nextPtr = self + var nextPtr = unsafe self for _ in 0...stride + unsafe nextPtr += MemoryLayout.stride } - return UnsafeMutablePointer(_rawValue) + return unsafe UnsafeMutablePointer(_rawValue) } /// Initializes the memory referenced by this pointer with the values @@ -1172,7 +1185,7 @@ extension UnsafeMutableRawPointer { _debugPrecondition( count >= 0, "UnsafeMutableRawPointer.initializeMemory with negative count") - _debugPrecondition( + unsafe _debugPrecondition( (UnsafeRawPointer(self + count * MemoryLayout.stride) <= UnsafeRawPointer(source)) || UnsafeRawPointer(source + count) <= UnsafeRawPointer(self), @@ -1185,7 +1198,7 @@ extension UnsafeMutableRawPointer { // for i in 0..= UnsafeMutableRawPointer(source + count) { // initialize forward from a disjoint or following overlapping range. Builtin.takeArrayFrontToBack( @@ -1250,7 +1263,7 @@ extension UnsafeMutableRawPointer { // (--dst).initialize(to: (--src).move()) // } } - return UnsafeMutablePointer(_rawValue) + return unsafe UnsafeMutablePointer(_rawValue) } /// Returns a new instance of the given type, constructed from the raw memory @@ -1272,11 +1285,11 @@ extension UnsafeMutableRawPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - _debugPrecondition(0 == (UInt(bitPattern: self + offset) + unsafe _debugPrecondition(0 == (UInt(bitPattern: self + offset) & (UInt(MemoryLayout.alignment) - 1)), "load from misaligned raw pointer") - let rawPointer = (self + offset)._rawValue + let rawPointer = unsafe (self + offset)._rawValue #if compiler(>=5.5) && $BuiltinAssumeAlignment let alignedPointer = @@ -1319,7 +1332,7 @@ extension UnsafeMutableRawPointer { fromByteOffset offset: Int = 0, as type: T.Type ) -> T { - return Builtin.loadRaw((self + offset)._rawValue) + return unsafe Builtin.loadRaw((self + offset)._rawValue) } /// Returns a new instance of the given type, constructed from the raw memory @@ -1354,15 +1367,15 @@ extension UnsafeMutableRawPointer { _isPOD(T.self), "loadUnaligned only supports loading BitwiseCopyable types." ) - return _withUnprotectedUnsafeTemporaryAllocation(of: T.self, capacity: 1) { - let temporary = $0.baseAddress._unsafelyUnwrappedUnchecked - Builtin.int_memcpy_RawPointer_RawPointer_Int64( + return unsafe _withUnprotectedUnsafeTemporaryAllocation(of: T.self, capacity: 1) { + let temporary = unsafe $0.baseAddress._unsafelyUnwrappedUnchecked + unsafe Builtin.int_memcpy_RawPointer_RawPointer_Int64( temporary._rawValue, (self + offset)._rawValue, UInt64(MemoryLayout.size)._value, /*volatile:*/ false._value ) - return temporary.pointee + return unsafe temporary.pointee } } @@ -1406,7 +1419,7 @@ extension UnsafeMutableRawPointer { public func storeBytes( of value: T, toByteOffset offset: Int = 0, as type: T.Type ) { - Builtin.storeRaw(value, (self + offset)._rawValue) + unsafe Builtin.storeRaw(value, (self + offset)._rawValue) } /// Stores the given value's bytes into raw memory at the specified offset. @@ -1456,9 +1469,9 @@ extension UnsafeMutableRawPointer { "storeBytes only supports storing the bytes of BitwiseCopyable types." ) - withUnsafePointer(to: value) { source in + unsafe withUnsafePointer(to: value) { source in // FIXME: to be replaced by _memcpy when conversions are implemented. - Builtin.int_memcpy_RawPointer_RawPointer_Int64( + unsafe Builtin.int_memcpy_RawPointer_RawPointer_Int64( (self + offset)._rawValue, source._rawValue, UInt64(MemoryLayout.size)._value, @@ -1472,10 +1485,11 @@ extension UnsafeMutableRawPointer { // any binary compiled against the stdlib binary for Swift 5.6 and older. @_spi(SwiftStdlibLegacyABI) @available(swift, obsoleted: 1) @_silgen_name("$sSv10storeBytes2of12toByteOffset2asyx_SixmtlF") - @usableFromInline func _legacy_se0349_storeBytes( + @usableFromInline + func _legacy_se0349_storeBytes( of value: T, toByteOffset offset: Int = 0, as type: T.Type ) { - _legacy_se0349_storeBytes_internal( + unsafe _legacy_se0349_storeBytes_internal( of: value, toByteOffset: offset, as: T.self ) } @@ -1485,15 +1499,15 @@ extension UnsafeMutableRawPointer { internal func _legacy_se0349_storeBytes_internal( of value: T, toByteOffset offset: Int = 0, as type: T.Type ) { - _debugPrecondition(0 == (UInt(bitPattern: self + offset) + unsafe _debugPrecondition(0 == (UInt(bitPattern: self + offset) & (UInt(MemoryLayout.alignment) - 1)), "storeBytes to misaligned raw pointer") var temp = value - withUnsafeMutablePointer(to: &temp) { source in + unsafe withUnsafeMutablePointer(to: &temp) { source in let rawSrc = UnsafeMutableRawPointer(source)._rawValue // FIXME: to be replaced by _memcpy when conversions are implemented. - Builtin.int_memcpy_RawPointer_RawPointer_Int64( + unsafe Builtin.int_memcpy_RawPointer_RawPointer_Int64( (self + offset)._rawValue, rawSrc, UInt64(MemoryLayout.size)._value, /*volatile:*/ false._value) } @@ -1525,11 +1539,11 @@ extension UnsafeMutableRawPointer { _debugPrecondition( byteCount >= 0, "UnsafeMutableRawPointer.copyMemory with negative count") - _memmove(dest: self, src: source, size: UInt(byteCount)) + unsafe _memmove(dest: self, src: source, size: UInt(byteCount)) } } -extension UnsafeMutableRawPointer: Strideable { +extension UnsafeMutableRawPointer: @unsafe Strideable { // custom version for raw pointers @_transparent public func advanced(by n: Int) -> UnsafeMutableRawPointer { @@ -1622,24 +1636,28 @@ extension UnsafeMutableRawPointer { extension OpaquePointer { @_transparent + @safe public init(@_nonEphemeral _ from: UnsafeMutableRawPointer) { - self._rawValue = from._rawValue + unsafe self._rawValue = from._rawValue } @_transparent + @safe public init?(@_nonEphemeral _ from: UnsafeMutableRawPointer?) { - guard let unwrapped = from else { return nil } - self._rawValue = unwrapped._rawValue + guard let unwrapped = unsafe from else { return nil } + unsafe self._rawValue = unwrapped._rawValue } @_transparent + @safe public init(@_nonEphemeral _ from: UnsafeRawPointer) { - self._rawValue = from._rawValue + unsafe self._rawValue = from._rawValue } @_transparent + @safe public init?(@_nonEphemeral _ from: UnsafeRawPointer?) { - guard let unwrapped = from else { return nil } - self._rawValue = unwrapped._rawValue + guard let unwrapped = unsafe from else { return nil } + unsafe self._rawValue = unwrapped._rawValue } } diff --git a/stdlib/public/core/VarArgs.swift b/stdlib/public/core/VarArgs.swift index bc0df83c91a8c..82b8f5847d185 100644 --- a/stdlib/public/core/VarArgs.swift +++ b/stdlib/public/core/VarArgs.swift @@ -155,11 +155,11 @@ internal typealias _VAInt = Int32 @inlinable // c-abi public func withVaList(_ args: [CVarArg], _ body: (CVaListPointer) -> R) -> R { - let builder = __VaListBuilder() + let builder = unsafe __VaListBuilder() for a in args { - builder.append(a) + unsafe builder.append(a) } - let result = _withVaList(builder, body) + let result = unsafe _withVaList(builder, body) _fixLifetime(args) return result } @@ -170,8 +170,8 @@ internal func _withVaList( _ builder: __VaListBuilder, _ body: (CVaListPointer) -> R ) -> R { - let result = body(builder.va_list()) - _fixLifetime(builder) + let result = unsafe body(builder.va_list()) + unsafe _fixLifetime(builder) return result } @@ -198,14 +198,14 @@ internal func _withVaList( /// `va_list` argument. @inlinable // c-abi public func getVaList(_ args: [CVarArg]) -> CVaListPointer { - let builder = __VaListBuilder() + let builder = unsafe __VaListBuilder() for a in args { - builder.append(a) + unsafe builder.append(a) } // FIXME: Use some Swift equivalent of NS_RETURNS_INNER_POINTER if we get one. - Builtin.retain(builder) - Builtin.autorelease(builder) - return builder.va_list() + unsafe Builtin.retain(builder) + unsafe Builtin.autorelease(builder) + return unsafe builder.va_list() } #endif @@ -217,8 +217,8 @@ public func _encodeBitsAsWords(_ x: T) -> [Int] { _internalInvariant(!result.isEmpty) var tmp = x // FIXME: use UnsafeMutablePointer.assign(from:) instead of memcpy. - _withUnprotectedUnsafeMutablePointer(to: &tmp) { - _memcpy(dest: UnsafeMutablePointer(result._baseAddressIfContiguous!), + unsafe _withUnprotectedUnsafeMutablePointer(to: &tmp) { + unsafe _memcpy(dest: UnsafeMutablePointer(result._baseAddressIfContiguous!), src: $0, size: UInt(MemoryLayout.size)) } @@ -343,44 +343,44 @@ extension UInt8: CVarArg { } } -extension OpaquePointer: CVarArg { +extension OpaquePointer: @unsafe CVarArg { /// Transform `self` into a series of machine words that can be /// appropriately interpreted by C varargs. @inlinable // c-abi public var _cVarArgEncoding: [Int] { - return _encodeBitsAsWords(self) + return unsafe _encodeBitsAsWords(self) } } @_preInverseGenerics -extension UnsafePointer: CVarArg where Pointee: ~Copyable { +extension UnsafePointer: @unsafe CVarArg where Pointee: ~Copyable { /// Transform `self` into a series of machine words that can be /// appropriately interpreted by C varargs. @inlinable // c-abi @_preInverseGenerics public var _cVarArgEncoding: [Int] { - return _encodeBitsAsWords(self) + return unsafe _encodeBitsAsWords(self) } } @_preInverseGenerics -extension UnsafeMutablePointer: CVarArg where Pointee: ~Copyable { +extension UnsafeMutablePointer: @unsafe CVarArg where Pointee: ~Copyable { /// Transform `self` into a series of machine words that can be /// appropriately interpreted by C varargs. @inlinable // c-abi @_preInverseGenerics public var _cVarArgEncoding: [Int] { - return _encodeBitsAsWords(self) + return unsafe _encodeBitsAsWords(self) } } #if _runtime(_ObjC) -extension AutoreleasingUnsafeMutablePointer: CVarArg { +extension AutoreleasingUnsafeMutablePointer: @unsafe CVarArg { /// Transform `self` into a series of machine words that can be /// appropriately interpreted by C varargs. @inlinable public var _cVarArgEncoding: [Int] { - return _encodeBitsAsWords(self) + return unsafe _encodeBitsAsWords(self) } } #endif @@ -587,6 +587,7 @@ final internal class __VaListBuilder { // runtime. @_fixed_layout @usableFromInline // c-abi +@unsafe final internal class __VaListBuilder { @inlinable // c-abi @@ -621,7 +622,7 @@ final internal class __VaListBuilder { #endif // Write the argument's value itself. - appendWords(arg._cVarArgEncoding) + unsafe appendWords(arg._cVarArgEncoding) } // NB: This function *cannot* be @inlinable because it expects to project @@ -633,9 +634,9 @@ final internal class __VaListBuilder { internal func va_list() -> CVaListPointer { // Use Builtin.addressof to emphasize that we are deliberately escaping this // pointer and assuming it is safe to do so. - let emptyAddr = UnsafeMutablePointer( + let emptyAddr = unsafe UnsafeMutablePointer( Builtin.addressof(&__VaListBuilder.alignedStorageForEmptyVaLists)) - return CVaListPointer(_fromUnsafeMutablePointer: storage ?? emptyAddr) + return unsafe CVaListPointer(_fromUnsafeMutablePointer: storage ?? emptyAddr) } // Manage storage that is accessed as Words @@ -644,27 +645,27 @@ final internal class __VaListBuilder { @inlinable // c-abi internal func appendWords(_ words: [Int]) { - let newCount = count + words.count - if newCount > allocated { - let oldAllocated = allocated - let oldStorage = storage - let oldCount = count - - allocated = max(newCount, allocated * 2) - let newStorage = allocStorage(wordCount: allocated) - storage = newStorage + let newCount = unsafe count + words.count + if unsafe newCount > allocated { + let oldAllocated = unsafe allocated + let oldStorage = unsafe storage + let oldCount = unsafe count + + unsafe allocated = unsafe max(newCount, allocated * 2) + let newStorage = unsafe allocStorage(wordCount: allocated) + unsafe storage = unsafe newStorage // count is updated below - if let allocatedOldStorage = oldStorage { - newStorage.moveInitialize(from: allocatedOldStorage, count: oldCount) - deallocStorage(wordCount: oldAllocated, storage: allocatedOldStorage) + if let allocatedOldStorage = unsafe oldStorage { + unsafe newStorage.moveInitialize(from: allocatedOldStorage, count: oldCount) + unsafe deallocStorage(wordCount: oldAllocated, storage: allocatedOldStorage) } } - let allocatedStorage = storage! + let allocatedStorage = unsafe storage! for word in words { - allocatedStorage[count] = word - count += 1 + unsafe allocatedStorage[count] = word + unsafe count += 1 } } @@ -672,15 +673,15 @@ final internal class __VaListBuilder { internal func rawSizeAndAlignment( _ wordCount: Int ) -> (Builtin.Word, Builtin.Word) { - return ((wordCount * MemoryLayout.stride)._builtinWordValue, + return unsafe ((wordCount * MemoryLayout.stride)._builtinWordValue, requiredAlignmentInBytes._builtinWordValue) } @inlinable // c-abi internal func allocStorage(wordCount: Int) -> UnsafeMutablePointer { - let (rawSize, rawAlignment) = rawSizeAndAlignment(wordCount) + let (rawSize, rawAlignment) = unsafe rawSizeAndAlignment(wordCount) let rawStorage = Builtin.allocRaw(rawSize, rawAlignment) - return UnsafeMutablePointer(rawStorage) + return unsafe UnsafeMutablePointer(rawStorage) } @usableFromInline // c-abi @@ -688,14 +689,14 @@ final internal class __VaListBuilder { wordCount: Int, storage: UnsafeMutablePointer ) { - let (rawSize, rawAlignment) = rawSizeAndAlignment(wordCount) + let (rawSize, rawAlignment) = unsafe rawSizeAndAlignment(wordCount) Builtin.deallocRaw(storage._rawValue, rawSize, rawAlignment) } @inlinable // c-abi deinit { - if let allocatedStorage = storage { - deallocStorage(wordCount: allocated, storage: allocatedStorage) + if let allocatedStorage = unsafe storage { + unsafe deallocStorage(wordCount: allocated, storage: allocatedStorage) } } diff --git a/test/Concurrency/isolated_conformance.swift b/test/Concurrency/isolated_conformance.swift new file mode 100644 index 0000000000000..0cea3701f3035 --- /dev/null +++ b/test/Concurrency/isolated_conformance.swift @@ -0,0 +1,80 @@ +// RUN: %target-swift-frontend -typecheck -verify -target %target-swift-5.1-abi-triple -swift-version 6 -enable-experimental-feature IsolatedConformances %s + +// REQUIRES: swift_feature_IsolatedConformances + +protocol P { + func f() // expected-note 2{{mark the protocol requirement 'f()' 'async' to allow actor-isolated conformances}} +} + +// expected-note@+3{{add '@preconcurrency' to the 'P' conformance to defer isolation checking to run time}}{{25-25=@preconcurrency }} +// expected-note@+2{{add 'isolated' to the 'P' conformance to restrict it to main actor-isolated code}}{{25-25=isolated }} +@MainActor +class CWithNonIsolated: P { + func f() { } // expected-error{{main actor-isolated instance method 'f()' cannot be used to satisfy nonisolated requirement from protocol 'P'}} + // expected-note@-1{{add 'nonisolated' to 'f()' to make this instance method not isolated to the actor}} +} + +actor SomeActor { } + +// Isolated conformances need a global-actor-constrained type. +class CNonIsolated: isolated P { // expected-error{{isolated conformance is only permitted on global-actor-isolated types}} + func f() { } +} + +extension SomeActor: isolated P { // expected-error{{isolated conformance is only permitted on global-actor-isolated types}} + nonisolated func f() { } +} + +@globalActor +struct SomeGlobalActor { + static let shared = SomeActor() +} + +// Isolation of the function needs to match that of the enclosing type. +@MainActor +class CMismatchedIsolation: isolated P { + @SomeGlobalActor func f() { } // expected-error{{global actor 'SomeGlobalActor'-isolated instance method 'f()' cannot be used to satisfy nonisolated requirement from protocol 'P'}} +} + +@MainActor +class C: isolated P { + func f() { } // okay +} + +// Associated conformances with isolation + +protocol Q { + associatedtype A: P +} + +// expected-error@+2{{conformance of 'SMissingIsolation' to 'Q' depends on main actor-isolated conformance of 'C' to 'P'; mark it as 'isolated'}}{{27-27=isolated }} +@MainActor +struct SMissingIsolation: Q { + typealias A = C +} + +struct PWrapper: P { + func f() { } +} + +// expected-error@+2{{conformance of 'SMissingIsolationViaWrapper' to 'Q' depends on main actor-isolated conformance of 'C' to 'P'; mark it as 'isolated'}} +@MainActor +struct SMissingIsolationViaWrapper: Q { + typealias A = PWrapper +} + +@SomeGlobalActor +class C2: isolated P { + func f() { } +} + +@MainActor +struct S: isolated Q { + typealias A = C +} + +// expected-error@+2{{main actor-isolated conformance of 'SMismatchedActors' to 'Q' cannot depend on global actor 'SomeGlobalActor'-isolated conformance of 'C2' to 'P'}} +@MainActor +struct SMismatchedActors: isolated Q { + typealias A = C2 +} diff --git a/test/Driver/loaded_module_trace.swift b/test/Driver/loaded_module_trace.swift index 3c50b864040e9..d38d90a09c82d 100644 --- a/test/Driver/loaded_module_trace.swift +++ b/test/Driver/loaded_module_trace.swift @@ -25,8 +25,8 @@ // CHECK: "swiftmodulesDetailedInfo":[ // CHECK-DAG: {"name":"Module2","path":"{{[^"]*\\[/\\]}}Module2.swiftmodule","isImportedDirectly":true,"supportsLibraryEvolution":false,"strictMemorySafety":false} -// CHECK-DAG: {"name":"Swift","path":"{{[^"]*\\[/\\]}}Swift.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":false} -// CHECK-DAG: {"name":"SwiftOnoneSupport","path":"{{[^"]*\\[/\\]}}SwiftOnoneSupport.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":false} +// CHECK-DAG: {"name":"Swift","path":"{{[^"]*\\[/\\]}}Swift.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":true} +// CHECK-DAG: {"name":"SwiftOnoneSupport","path":"{{[^"]*\\[/\\]}}SwiftOnoneSupport.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":true} // CHECK-DAG: {"name":"Module","path":"{{[^"]*\\[/\\]}}Module.swiftmodule","isImportedDirectly":false,"supportsLibraryEvolution":false,"strictMemorySafety":false} // CHECK: ], // CHECK: "swiftmacros":[ diff --git a/test/Driver/loaded_module_trace_multifile.swift b/test/Driver/loaded_module_trace_multifile.swift index 4943aaa499677..9c5fb94e56306 100644 --- a/test/Driver/loaded_module_trace_multifile.swift +++ b/test/Driver/loaded_module_trace_multifile.swift @@ -22,8 +22,8 @@ // CHECK: ] // CHECK: "swiftmodulesDetailedInfo":[ // CHECK-DAG: {"name":"Module2","path":"{{[^"]*\\[/\\]}}Module2.swiftmodule","isImportedDirectly":true,"supportsLibraryEvolution":false,"strictMemorySafety":false} -// CHECK-DAG: {"name":"Swift","path":"{{[^"]*\\[/\\]}}Swift.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":false} -// CHECK-DAG: {"name":"SwiftOnoneSupport","path":"{{[^"]*\\[/\\]}}SwiftOnoneSupport.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":false} +// CHECK-DAG: {"name":"Swift","path":"{{[^"]*\\[/\\]}}Swift.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":true} +// CHECK-DAG: {"name":"SwiftOnoneSupport","path":"{{[^"]*\\[/\\]}}SwiftOnoneSupport.swiftmodule{{(\\[/\\][^"]+[.]swiftmodule)?}}","isImportedDirectly":true,"supportsLibraryEvolution":true,"strictMemorySafety":true} // CHECK-DAG: {"name":"Module","path":"{{[^"]*\\[/\\]}}Module.swiftmodule","isImportedDirectly":true,"supportsLibraryEvolution":false,"strictMemorySafety":false} // CHECK: ] // CHECK: } diff --git a/test/Interop/Cxx/class/safe-interop-mode.swift b/test/Interop/Cxx/class/safe-interop-mode.swift index 4b3a47af0b762..b8ffb5ba68053 100644 --- a/test/Interop/Cxx/class/safe-interop-mode.swift +++ b/test/Interop/Cxx/class/safe-interop-mode.swift @@ -1,11 +1,9 @@ // RUN: rm -rf %t // RUN: split-file %s %t -// RUN: %target-swift-frontend -typecheck -verify -I %swift_src_root/lib/ClangImporter/SwiftBridging -Xcc -std=c++20 -I %t/Inputs %t/test.swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -enable-experimental-feature LifetimeDependence -cxx-interoperability-mode=default -diagnostic-style llvm 2>&1 +// RUN: %target-swift-frontend -typecheck -verify -I %swift_src_root/lib/ClangImporter/SwiftBridging -Xcc -std=c++20 -I %t/Inputs %t/test.swift -strict-memory-safety -enable-experimental-feature LifetimeDependence -cxx-interoperability-mode=default -diagnostic-style llvm 2>&1 // REQUIRES: objc_interop -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe // REQUIRES: swift_feature_LifetimeDependence //--- Inputs/module.modulemap diff --git a/test/SILOptimizer/mutable_span_bounds_check_tests.swift b/test/SILOptimizer/mutable_span_bounds_check_tests.swift index 1f1d85f1fcd56..41dc480bc1b58 100644 --- a/test/SILOptimizer/mutable_span_bounds_check_tests.swift +++ b/test/SILOptimizer/mutable_span_bounds_check_tests.swift @@ -1,11 +1,10 @@ // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend -emit-module-path %t/SpanExtras.swiftmodule %S/Inputs/SpanExtras.swift -enable-builtin-module -enable-experimental-feature LifetimeDependence -enable-experimental-feature AllowUnsafeAttribute -O +// RUN: %target-swift-frontend -emit-module-path %t/SpanExtras.swiftmodule %S/Inputs/SpanExtras.swift -enable-builtin-module -enable-experimental-feature LifetimeDependence -O // RUN: %target-swift-frontend -I %t -O -emit-sil %s -disable-availability-checking | %FileCheck %s --check-prefix=CHECK-SIL // RUN: %target-swift-frontend -I %t -O -emit-ir %s -disable-availability-checking | %FileCheck %s --check-prefix=CHECK-IR // REQUIRES: swift_in_compiler // REQUIRES: swift_feature_LifetimeDependence -// REQUIRES: swift_feature_AllowUnsafeAttribute // REQUIRES: swift_stdlib_no_asserts, optimized_stdlib diff --git a/test/Unsafe/interface_printing.swift b/test/Unsafe/interface_printing.swift index bdba93169260f..0fd0e8c4de578 100644 --- a/test/Unsafe/interface_printing.swift +++ b/test/Unsafe/interface_printing.swift @@ -1,10 +1,8 @@ // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend -swift-version 5 -enable-library-evolution -module-name unsafe -emit-module -o %t/unsafe.swiftmodule -emit-module-interface-path - %s -enable-experimental-feature AllowUnsafeAttribute | %FileCheck %s +// RUN: %target-swift-frontend -swift-version 5 -enable-library-evolution -module-name unsafe -emit-module -o %t/unsafe.swiftmodule -emit-module-interface-path - %s | %FileCheck %s -// REQUIRES: swift_feature_AllowUnsafeAttribute - -// CHECK: #if compiler(>=5.3) && $AllowUnsafeAttribute +// CHECK: #if compiler(>=5.3) && $MemorySafetyAttributes // CHECK: @unsafe public func testFunction() // CHECK: #else // CHECK: public func testFunction() diff --git a/test/Unsafe/module-interface.swift b/test/Unsafe/module-interface.swift index 138526a71d517..fe7a78cf7699b 100644 --- a/test/Unsafe/module-interface.swift +++ b/test/Unsafe/module-interface.swift @@ -1,11 +1,8 @@ -// RUN: %target-swift-emit-module-interface(%t.swiftinterface) %s -module-name UserModule -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe +// RUN: %target-swift-emit-module-interface(%t.swiftinterface) %s -module-name UserModule -strict-memory-safety // RUN: %target-swift-typecheck-module-from-interface(%t.swiftinterface) -module-name UserModule // RUN: %FileCheck %s < %t.swiftinterface -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe - -// CHECK: #if compiler(>=5.3) && $AllowUnsafeAttribute +// CHECK: #if compiler(>=5.3) && $MemorySafetyAttributes // CHECK: @unsafe public func getIntUnsafely() -> Swift.Int // CHECK: #else // CHECK: public func getIntUnsafely() -> Swift.Int @@ -25,7 +22,7 @@ public protocol P { // CHECK: public struct X : @unsafe UserModule.P public struct X: @unsafe P { -// CHECK: #if compiler(>=5.3) && $AllowUnsafeAttribute +// CHECK: #if compiler(>=5.3) && $MemorySafetyAttributes // CHECK: @unsafe public func f() // CHECK: #else // CHECK: public func f() diff --git a/test/Unsafe/module-trace.swift b/test/Unsafe/module-trace.swift index 7beea9c766657..43bafb56fa8e1 100644 --- a/test/Unsafe/module-trace.swift +++ b/test/Unsafe/module-trace.swift @@ -1,14 +1,11 @@ // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift -enable-experimental-feature AllowUnsafeAttribute -// RUN: %target-swift-frontend -emit-module-path %t/safe_swift_decls.swiftmodule %S/Inputs/safe_swift_decls.swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe +// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift +// RUN: %target-swift-frontend -emit-module-path %t/safe_swift_decls.swiftmodule %S/Inputs/safe_swift_decls.swift -strict-memory-safety -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -I %S/Inputs -I %t -emit-loaded-module-trace-path %t/unsafe.trace +// RUN: %target-typecheck-verify-swift -strict-memory-safety -I %S/Inputs -I %t -emit-loaded-module-trace-path %t/unsafe.trace // RUN: %FileCheck -check-prefix TRACE %s < %t/unsafe.trace -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe - import unsafe_decls import unsafe_swift_decls import safe_swift_decls @@ -17,5 +14,7 @@ import safe_swift_decls // TRACE: "strictMemorySafety":true // Dependencies -// TRACE: "safe_swift_decls"{{.*}}"strictMemorySafety":true -// TRACE: "unsafe_swift_decls"{{.*}}"strictMemorySafety":false +// TRACE-SAME: "safe_swift_decls" +// TRACE-SAME: "strictMemorySafety":true +// TRACE-SAME: "unsafe_swift_decls" +// TRACE-SAME: "strictMemorySafety":false diff --git a/test/Unsafe/safe.swift b/test/Unsafe/safe.swift index 6a96154cdcbac..f1d0633399cfd 100644 --- a/test/Unsafe/safe.swift +++ b/test/Unsafe/safe.swift @@ -1,7 +1,10 @@ -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -print-diagnostic-groups +// RUN: %target-typecheck-verify-swift -strict-memory-safety -print-diagnostic-groups + +// The feature flag should be enabled. +#if !hasFeature(StrictMemorySafety) +#error("Strict memory safety is not enabled!") +#endif -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe @unsafe func unsafeFunction() { } @@ -95,6 +98,15 @@ func testUnsafeAsSequenceForEach() { for unsafe _ in unsafe uas { } // okay } +func testForInUnsafeAmbiguity(_ integers: [Int]) { + for unsafe in integers { + _ = unsafe + } + for unsafe: Int in integers { + _ = unsafe + } +} + struct UnsafeIterator: @unsafe IteratorProtocol { @unsafe mutating func next() -> Int? { nil } } @@ -173,7 +185,8 @@ struct MyArray { } extension UnsafeBufferPointer { - @safe var safeCount: Int { unsafe count } + @unsafe var unsafeCount: Int { 17 } + @safe var safeCount: Int { unsafe unsafeCount } } func testMyArray(ints: MyArray) { @@ -182,6 +195,6 @@ func testMyArray(ints: MyArray) { _ = unsafe bufferCopy print(buffer.safeCount) - unsafe print(buffer.baseAddress!) + unsafe print(buffer.unsafeCount) } } diff --git a/test/Unsafe/safe_argument_suppression.swift b/test/Unsafe/safe_argument_suppression.swift index 6f3a9b5aef1d7..1a571607a95ef 100644 --- a/test/Unsafe/safe_argument_suppression.swift +++ b/test/Unsafe/safe_argument_suppression.swift @@ -1,7 +1,4 @@ -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -print-diagnostic-groups - -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe +// RUN: %target-typecheck-verify-swift -strict-memory-safety -print-diagnostic-groups @unsafe class NotSafe { diff --git a/test/Unsafe/unsafe-suppression.swift b/test/Unsafe/unsafe-suppression.swift index c47dfa2853e4f..eb3c108cb7c1d 100644 --- a/test/Unsafe/unsafe-suppression.swift +++ b/test/Unsafe/unsafe-suppression.swift @@ -1,7 +1,4 @@ -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -print-diagnostic-groups - -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe +// RUN: %target-typecheck-verify-swift -strict-memory-safety -print-diagnostic-groups @unsafe func iAmUnsafe() { } diff --git a/test/Unsafe/unsafe.swift b/test/Unsafe/unsafe.swift index 74f503bca0f23..18c644da82366 100644 --- a/test/Unsafe/unsafe.swift +++ b/test/Unsafe/unsafe.swift @@ -1,13 +1,10 @@ // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift -enable-experimental-feature AllowUnsafeAttribute +// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -I %t -print-diagnostic-groups +// RUN: %target-typecheck-verify-swift -strict-memory-safety -I %t -print-diagnostic-groups // Make sure everything compiles without error when unsafe code is allowed. -// RUN: %target-swift-frontend -typecheck -enable-experimental-feature AllowUnsafeAttribute %s -I %t - -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe +// RUN: %target-swift-frontend -typecheck %s -I %t import unsafe_swift_decls diff --git a/test/Unsafe/unsafe_c_imports.swift b/test/Unsafe/unsafe_c_imports.swift index abcec88068140..4ac9fe64ca452 100644 --- a/test/Unsafe/unsafe_c_imports.swift +++ b/test/Unsafe/unsafe_c_imports.swift @@ -1,7 +1,4 @@ -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -I %S/Inputs - -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe +// RUN: %target-typecheck-verify-swift -strict-memory-safety -I %S/Inputs import unsafe_decls diff --git a/test/Unsafe/unsafe_command_line.swift b/test/Unsafe/unsafe_command_line.swift index 881da443ac505..fdb9d2651db76 100644 --- a/test/Unsafe/unsafe_command_line.swift +++ b/test/Unsafe/unsafe_command_line.swift @@ -1,6 +1,4 @@ -// RUN: %target-swift-frontend -typecheck -enable-experimental-feature WarnUnsafe -Ounchecked -disable-access-control %s 2>&1 | %FileCheck %s - -// REQUIRES: swift_feature_WarnUnsafe +// RUN: %target-swift-frontend -typecheck -strict-memory-safety -Ounchecked -disable-access-control %s 2>&1 | %FileCheck %s // CHECK: warning: '-Ounchecked' is not memory-safe // CHECK: warning: '-disable-access-control' is not memory-safe diff --git a/test/Unsafe/unsafe_concurrency.swift b/test/Unsafe/unsafe_concurrency.swift index 64952c6c848b0..0820ab11c5ce3 100644 --- a/test/Unsafe/unsafe_concurrency.swift +++ b/test/Unsafe/unsafe_concurrency.swift @@ -1,12 +1,10 @@ // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature AllowUnsafeAttribute +// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift -// RUN: %target-typecheck-verify-swift -enable-experimental-feature WarnUnsafe -enable-experimental-feature StrictConcurrency -enable-experimental-feature AllowUnsafeAttribute -I %t +// RUN: %target-typecheck-verify-swift -strict-memory-safety -enable-experimental-feature StrictConcurrency -I %t // REQUIRES: concurrency // REQUIRES: swift_feature_StrictConcurrency -// REQUIRES: swift_feature_WarnUnsafe -// REQUIRES: swift_feature_AllowUnsafeAttribute @preconcurrency import unsafe_swift_decls // expected-warning{{@preconcurrency import is not memory-safe because it can silently introduce data races}} diff --git a/test/Unsafe/unsafe_disallowed.swift b/test/Unsafe/unsafe_disallowed.swift deleted file mode 100644 index 3874de05a11e0..0000000000000 --- a/test/Unsafe/unsafe_disallowed.swift +++ /dev/null @@ -1,5 +0,0 @@ -// RUN: %target-typecheck-verify-swift - -@unsafe func f() { } -// expected-error@-1{{attribute requires '-enable-experimental-feature AllowUnsafeAttribute'}} - diff --git a/test/Unsafe/unsafe_feature.swift b/test/Unsafe/unsafe_feature.swift new file mode 100644 index 0000000000000..1ec94a5fffe51 --- /dev/null +++ b/test/Unsafe/unsafe_feature.swift @@ -0,0 +1,19 @@ +// RUN: %target-typecheck-verify-swift + + +// Can use @unsafe and @safe without strict memory safety being enabled. +@unsafe func f() { } +@safe func g(_: UnsafeRawPointer) { } + +protocol P { + func f() +} + +struct X: @unsafe P { + @unsafe func f() { } +} + +// The feature flag is not enabled, though. +#if hasFeature(StrictMemorySafety) +#error("Strict memory safety is not enabled!") +#endif diff --git a/test/Unsafe/unsafe_imports.swift b/test/Unsafe/unsafe_imports.swift index e113e847529b2..f02db8b56e505 100644 --- a/test/Unsafe/unsafe_imports.swift +++ b/test/Unsafe/unsafe_imports.swift @@ -1,10 +1,7 @@ // RUN: %empty-directory(%t) -// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift -enable-experimental-feature AllowUnsafeAttribute +// RUN: %target-swift-frontend -emit-module-path %t/unsafe_swift_decls.swiftmodule %S/Inputs/unsafe_swift_decls.swift -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -enable-experimental-feature WarnUnsafe -I %S/Inputs -I %t - -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe +// RUN: %target-typecheck-verify-swift -strict-memory-safety -I %S/Inputs -I %t import unsafe_decls import unsafe_swift_decls diff --git a/test/Unsafe/unsafe_in_unsafe.swift b/test/Unsafe/unsafe_in_unsafe.swift index 63c5f69f151f2..c0e1420082d6d 100644 --- a/test/Unsafe/unsafe_in_unsafe.swift +++ b/test/Unsafe/unsafe_in_unsafe.swift @@ -1,7 +1,4 @@ -// RUN: %target-typecheck-verify-swift -enable-experimental-feature AllowUnsafeAttribute -print-diagnostic-groups - -// REQUIRES: swift_feature_AllowUnsafeAttribute - +// RUN: %target-typecheck-verify-swift -print-diagnostic-groups protocol P { } diff --git a/test/Unsafe/unsafe_stdlib.swift b/test/Unsafe/unsafe_stdlib.swift index da7fe3d7d087a..08bd0e26f859e 100644 --- a/test/Unsafe/unsafe_stdlib.swift +++ b/test/Unsafe/unsafe_stdlib.swift @@ -1,10 +1,7 @@ -// RUN: %target-typecheck-verify-swift -enable-experimental-feature WarnUnsafe +// RUN: %target-typecheck-verify-swift -strict-memory-safety // Make sure everything compiles without error when unsafe code is allowed. -// RUN: %target-swift-frontend -typecheck -enable-experimental-feature AllowUnsafeAttribute -warnings-as-errors %s - -// REQUIRES: swift_feature_AllowUnsafeAttribute -// REQUIRES: swift_feature_WarnUnsafe +// RUN: %target-swift-frontend -typecheck -warnings-as-errors %s func test( x: OpaquePointer, diff --git a/test/lit.swift-features.cfg.inc b/test/lit.swift-features.cfg.inc index 01ca97e1fb87e..44457af0e0368 100644 --- a/test/lit.swift-features.cfg.inc +++ b/test/lit.swift-features.cfg.inc @@ -31,5 +31,6 @@ def language_feature(feature_name, enabled): #define UPCOMING_FEATURE(FeatureName, SENumber, Version) language_feature(#FeatureName, True) #define EXPERIMENTAL_FEATURE(FeatureName, AvailableInProd) language_feature(#FeatureName, #AvailableInProd == "true") #define LANGUAGE_FEATURE(FeatureName, SENumber, Description) language_feature(#FeatureName, True) +#define OPTIONAL_LANGUAGE_FEATURE(FeatureName, SENumber, Description) language_feature(#FeatureName, True) #include diff --git a/userdocs/diagnostic_groups/Unsafe.md b/userdocs/diagnostic_groups/Unsafe.md index e9ac00f817717..b22e5b7833b82 100644 --- a/userdocs/diagnostic_groups/Unsafe.md +++ b/userdocs/diagnostic_groups/Unsafe.md @@ -23,3 +23,14 @@ This diagnostic group includes warnings that identify the use of unsafe language return Int(bitPattern: malloc(size)) } ``` + +These warnings can be suppressed using an `unsafe` expression, which acknowledges the presence of memory-unsafe code. For example: + +```swift +func evilMalloc(size: Int) -> Int { + return unsafe Int(bitPattern: malloc(size)) +} +``` + +The warnings produced by this diagnostic group can be enabled with the compiler +flag `-strict-memory-safety`. \ No newline at end of file