Skip to content

Commit 4da94a9

Browse files
shoumikhinfacebook-github-bot
authored andcommitted
Further Module improvements. (#1622)
Summary: Pull Request resolved: #1622 - Merge Runner and Module - Init runtime in constructor - Allow injecting event tracer - Allow providing verification mode at load - Utilize check and unwrap macros for brevity Reviewed By: dbort Differential Revision: D52846711 fbshipit-source-id: fc02beb18ddf7740f6329b093e7704a3a4e353aa
1 parent b46cc57 commit 4da94a9

File tree

21 files changed

+457
-547
lines changed

21 files changed

+457
-547
lines changed

CMakeLists.txt

+4-4
Original file line numberDiff line numberDiff line change
@@ -130,8 +130,8 @@ if(NOT BUCK2)
130130
endif()
131131

132132
# Build dataloader library
133-
option(EXECUTORCH_BUILD_EXTENSION_RUNNER_MODULE
134-
"Build the extension/runner/module directory" OFF)
133+
option(EXECUTORCH_BUILD_EXTENSION_MODULE
134+
"Build the extension/module directory" OFF)
135135
if(NOT BUCK2)
136136
set(BUCK2 buck2)
137137
endif()
@@ -319,8 +319,8 @@ if(EXECUTORCH_BUILD_EXTENSION_DATA_LOADER)
319319
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/data_loader)
320320
endif()
321321

322-
if(EXECUTORCH_BUILD_EXTENSION_RUNNER_MODULE)
323-
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/runner/module)
322+
if(EXECUTORCH_BUILD_EXTENSION_MODULE)
323+
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/module)
324324
endif()
325325

326326
option(EXECUTORCH_BUILD_XNNPACK "Build the backends/xnnpack directory" OFF)

build/build_apple_frameworks.sh

+3-6
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@ COREML=OFF
2121
MPS=OFF
2222
XNNPACK=OFF
2323
HEADERS_PATH="include"
24-
EXECUTORCH_FRAMEWORK="executorch:libexecutorch.a,libextension_data_loader.a,libextension_runner_module.a:$HEADERS_PATH"
24+
EXECUTORCH_FRAMEWORK="executorch:libexecutorch.a,libextension_data_loader.a,libextension_module.a:$HEADERS_PATH"
2525
PORTABLE_FRAMEWORK="portable_backend:libportable_kernels.a,libportable_ops_lib.a:"
2626
COREML_FRAMEWORK="coreml_backend:libcoremldelegate.a:"
2727
MPS_FRAMEWORK="mps_backend:libmpsdelegate.a:"
@@ -112,7 +112,7 @@ cmake_build() {
112112
-DPYTHON_EXECUTABLE="$PYTHON" \
113113
-DFLATC_EXECUTABLE="$FLATC" \
114114
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
115-
-DEXECUTORCH_BUILD_EXTENSION_RUNNER_MODULE=ON \
115+
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
116116
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY="$(pwd)" \
117117
-DIOS_DEPLOYMENT_TARGET="$IOS_DEPLOYMENT_TARGET" \
118118
-DEXECUTORCH_BUILD_COREML=$COREML \
@@ -132,10 +132,7 @@ echo "Exporting headers"
132132
mkdir -p "$HEADERS_PATH"
133133

134134
"$SOURCE_ROOT_DIR"/build/print_exported_headers.py --buck2="$BUCK2" --targets \
135-
//runtime/executor:program \
136-
//extension/data_loader: \
137-
//extension/memory_allocator: \
138-
//extension/runner/module: \
135+
//extension/module: \
139136
| rsync -av --files-from=- "$SOURCE_ROOT_DIR" "$HEADERS_PATH/executorch"
140137

141138
echo "Creating frameworks"

build/cmake_deps.toml

+2-2
Original file line numberDiff line numberDiff line change
@@ -52,9 +52,9 @@ deps = [
5252
"executorch",
5353
]
5454

55-
[targets.extension_runner_module]
55+
[targets.extension_module]
5656
buck_targets = [
57-
"//extension/runner/module:module",
57+
"//extension/module:module",
5858
]
5959
filters = [
6060
".cpp$",

examples/demo-apps/apple_ios/ExecuTorchDemo/ExecuTorchDemo/Sources/MobileNet/MobileNetClassifier.h

+1-2
Original file line numberDiff line numberDiff line change
@@ -14,8 +14,7 @@ FOUNDATION_EXPORT NSErrorDomain const ETMobileNetClassifierErrorDomain;
1414

1515
@interface ETMobileNetClassifier : NSObject
1616

17-
- (nullable instancetype)initWithFilePath:(NSString*)filePath
18-
error:(NSError**)error;
17+
- (instancetype)initWithFilePath:(NSString*)filePath;
1918
- (BOOL)classifyWithInput:(float*)input
2019
output:(float*)output
2120
outputSize:(NSInteger)predictionBufferSize

examples/demo-apps/apple_ios/ExecuTorchDemo/ExecuTorchDemo/Sources/MobileNet/MobileNetClassifier.mm

+10-29
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
#import "MobileNetClassifier.h"
1010

11-
#import <executorch/extension/runner/module/module.h>
11+
#import <executorch/extension/module/module.h>
1212

1313
using namespace ::torch::executor;
1414

@@ -21,26 +21,10 @@ @implementation ETMobileNetClassifier {
2121
std::unique_ptr<Module> _module;
2222
}
2323

24-
- (nullable instancetype)initWithFilePath:(NSString*)filePath
25-
error:(NSError**)error {
24+
- (instancetype)initWithFilePath:(NSString*)filePath {
2625
self = [super init];
2726
if (self) {
28-
try {
29-
_module = std::make_unique<Module>(filePath.UTF8String);
30-
} catch (const std::exception& exception) {
31-
if (error) {
32-
*error = [NSError
33-
errorWithDomain:ETMobileNetClassifierErrorDomain
34-
code:-1
35-
userInfo:@{
36-
NSLocalizedDescriptionKey : [NSString
37-
stringWithFormat:
38-
@"Failed to initialize the torch module: %s",
39-
exception.what()]
40-
}];
41-
}
42-
return nil;
43-
}
27+
_module = std::make_unique<Module>(filePath.UTF8String);
4428
}
4529
return self;
4630
}
@@ -50,28 +34,25 @@ - (BOOL)classifyWithInput:(float*)input
5034
outputSize:(NSInteger)outputSize
5135
error:(NSError**)error {
5236
int32_t sizes[] = {1, kChannels, kSize, kSize};
53-
TensorImpl tensorImpl(ScalarType::Float, std::size(sizes), sizes, input);
54-
std::vector<EValue> inputs{EValue(Tensor(&tensorImpl))};
55-
std::vector<EValue> outputs;
37+
TensorImpl inputTensor(ScalarType::Float, std::size(sizes), sizes, input);
38+
const auto result = _module->forward({EValue(Tensor(&inputTensor))});
5639

57-
const auto torchError = _module->forward(inputs, outputs);
58-
if (torchError != Error::Ok) {
40+
if (!result.ok()) {
5941
if (error) {
6042
*error = [NSError
6143
errorWithDomain:ETMobileNetClassifierErrorDomain
62-
code:NSInteger(torchError)
44+
code:NSInteger(result.error())
6345
userInfo:@{
6446
NSLocalizedDescriptionKey : [NSString
6547
stringWithFormat:
6648
@"Failed to run forward on the torch module, error code: %i",
67-
torchError]
49+
result.error()]
6850
}];
6951
}
7052
return NO;
7153
}
72-
const auto outputTensor = outputs[0].toTensor();
73-
const auto data = outputTensor.const_data_ptr<float>();
74-
std::copy(data, data + outputSize, output);
54+
const auto outputData = result->at(0).toTensor().const_data_ptr<float>();
55+
std::copy(outputData, outputData + outputSize, output);
7556

7657
return YES;
7758
}

examples/demo-apps/apple_ios/ExecuTorchDemo/ExecuTorchDemo/Sources/MobileNet/MobileNetClassifier.swift

+1-1
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ public class MobileNetClassifier: ImageClassification {
4040
public init?(modelFilePath: String, labelsFilePath: String) throws {
4141
labels = try String(contentsOfFile: labelsFilePath, encoding: .utf8)
4242
.components(separatedBy: .newlines)
43-
mobileNetClassifier = try ETMobileNetClassifier(filePath: modelFilePath)
43+
mobileNetClassifier = ETMobileNetClassifier(filePath: modelFilePath)
4444
rawDataBuffer = [UInt8](repeating: 0, count: Int(Self.cropSize * Self.cropSize) * 4)
4545
normalizedBuffer = [Float](repeating: 0, count: rawDataBuffer.count / 4 * 3)
4646
}

extension/runner/module/CMakeLists.txt renamed to extension/module/CMakeLists.txt

+5-5
Original file line numberDiff line numberDiff line change
@@ -13,17 +13,17 @@ cmake_minimum_required(VERSION 3.19)
1313

1414
# Source root directory for executorch.
1515
if(NOT EXECUTORCH_ROOT)
16-
set(EXECUTORCH_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../..)
16+
set(EXECUTORCH_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../..)
1717
endif()
1818

19-
list(TRANSFORM _extension_runner_module__srcs PREPEND "${EXECUTORCH_ROOT}/")
20-
add_library(extension_runner_module ${_extension_runner_module__srcs})
21-
target_include_directories(extension_runner_module PUBLIC ${EXECUTORCH_ROOT}/..)
19+
list(TRANSFORM _extension_module__srcs PREPEND "${EXECUTORCH_ROOT}/")
20+
add_library(extension_module ${_extension_module__srcs})
21+
target_include_directories(extension_module PUBLIC ${EXECUTORCH_ROOT}/..)
2222

2323

2424
# Install libraries
2525
install(
26-
TARGETS extension_runner_module
26+
TARGETS extension_module
2727
DESTINATION ${CMAKE_BINARY_DIR}/lib
2828
INCLUDES
2929
DESTINATION ${_common_include_directories})
File renamed without changes.

extension/module/module.cpp

+154
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
/*
2+
* Copyright (c) Meta Platforms, Inc. and affiliates.
3+
* All rights reserved.
4+
*
5+
* This source code is licensed under the BSD-style license found in the
6+
* LICENSE file in the root directory of this source tree.
7+
*/
8+
9+
#include <executorch/extension/module/module.h>
10+
11+
#include <executorch/extension/data_loader/mmap_data_loader.h>
12+
#include <executorch/extension/memory_allocator/malloc_memory_allocator.h>
13+
#include <executorch/runtime/platform/runtime.h>
14+
15+
/**
16+
* Unwrap a Result to obtain its value (direct object, not a pointer).
17+
* If the Result contains an error, propagate the error via trivial function
18+
* return. The macro wraps the object into a unique_ptr.
19+
*
20+
* Note: A function using ET_UNWRAP_UNIQUE should itself return a Result or
21+
* Error.
22+
*
23+
* @param[in] result__ Expression yielding the result to unwrap.
24+
*/
25+
#define ET_UNWRAP_UNIQUE(result__) \
26+
({ \
27+
auto et_result__ = (result__); \
28+
if (!et_result__.ok()) { \
29+
return et_result__.error(); \
30+
} \
31+
std::make_unique<std::remove_reference_t<decltype(*et_result__)>>( \
32+
std::move(*et_result__)); \
33+
})
34+
35+
namespace torch::executor {
36+
37+
Module::Module(
38+
const std::string& filePath,
39+
const Module::MlockConfig mlockConfig)
40+
: filePath_(filePath),
41+
mlockConfig_(mlockConfig),
42+
memoryAllocator_(std::make_unique<util::MallocMemoryAllocator>()) {
43+
runtime_init();
44+
}
45+
46+
Module::Module(
47+
std::unique_ptr<DataLoader> dataLoader,
48+
std::unique_ptr<MemoryAllocator> memoryAllocator,
49+
std::unique_ptr<EventTracer> eventTracer)
50+
: dataLoader_(std::move(dataLoader)),
51+
memoryAllocator_(
52+
std::move(memoryAllocator)
53+
?: std::make_unique<util::MallocMemoryAllocator>()),
54+
eventTracer_(std::move(eventTracer)) {
55+
runtime_init();
56+
}
57+
58+
Error Module::load(const Program::Verification verification) {
59+
if (!isLoaded()) {
60+
if (!dataLoader_) {
61+
dataLoader_ = ET_UNWRAP_UNIQUE(
62+
util::MmapDataLoader::from(filePath_.c_str(), [this] {
63+
switch (mlockConfig_) {
64+
case MlockConfig::NoMlock:
65+
return util::MmapDataLoader::MlockConfig::NoMlock;
66+
case MlockConfig::UseMlock:
67+
return util::MmapDataLoader::MlockConfig::UseMlock;
68+
case MlockConfig::UseMlockIgnoreErrors:
69+
return util::MmapDataLoader::MlockConfig::UseMlockIgnoreErrors;
70+
}
71+
ET_ASSERT_UNREACHABLE();
72+
}()));
73+
};
74+
program_ = ET_UNWRAP_UNIQUE(Program::load(dataLoader_.get(), verification));
75+
}
76+
return Error::Ok;
77+
}
78+
79+
bool Module::isLoaded() const {
80+
return program_ != nullptr;
81+
}
82+
83+
Result<std::vector<std::string>> Module::methodNames() {
84+
ET_CHECK_OK_OR_RETURN_ERROR(load());
85+
const auto methodCount = program_->num_methods();
86+
std::vector<std::string> result;
87+
result.reserve(methodCount);
88+
89+
for (auto index = 0; index < methodCount; ++index) {
90+
result.emplace_back(program_->get_method_name(index).get());
91+
}
92+
return result;
93+
}
94+
95+
Error Module::loadMethod(const std::string& methodName) {
96+
if (!isMethodLoaded(methodName)) {
97+
ET_CHECK_OK_OR_RETURN_ERROR(load());
98+
99+
MethodHolder methodHolder;
100+
const auto methodMetadata =
101+
ET_UNWRAP(program_->method_meta(methodName.c_str()));
102+
const auto plannedBuffersCount =
103+
methodMetadata.num_memory_planned_buffers();
104+
methodHolder.plannedBuffers.reserve(plannedBuffersCount);
105+
methodHolder.plannedSpans.reserve(plannedBuffersCount);
106+
107+
for (auto index = 0; index < plannedBuffersCount; ++index) {
108+
const auto bufferSize =
109+
methodMetadata.memory_planned_buffer_size(index).get();
110+
methodHolder.plannedBuffers.emplace_back(bufferSize);
111+
methodHolder.plannedSpans.emplace_back(
112+
methodHolder.plannedBuffers.back().data(), bufferSize);
113+
}
114+
methodHolder.plannedMemory = std::make_unique<HierarchicalAllocator>(Span(
115+
methodHolder.plannedSpans.data(), methodHolder.plannedSpans.size()));
116+
methodHolder.memoryManager = std::make_unique<MemoryManager>(
117+
memoryAllocator_.get(), methodHolder.plannedMemory.get());
118+
methodHolder.method = ET_UNWRAP_UNIQUE(program_->load_method(
119+
methodName.c_str(),
120+
methodHolder.memoryManager.get(),
121+
eventTracer_.get()));
122+
methods_.emplace(methodName, std::move(methodHolder));
123+
}
124+
return Error::Ok;
125+
}
126+
127+
bool Module::isMethodLoaded(const std::string& methodName) const {
128+
return methods_.count(methodName);
129+
}
130+
131+
Result<MethodMeta> Module::methodMeta(const std::string& methodName) {
132+
ET_CHECK_OK_OR_RETURN_ERROR(loadMethod(methodName));
133+
return methods_.at(methodName).method->method_meta();
134+
}
135+
136+
Result<std::vector<EValue>> Module::execute(
137+
const std::string& methodName,
138+
const std::vector<EValue>& input) {
139+
ET_CHECK_OK_OR_RETURN_ERROR(loadMethod(methodName));
140+
auto& method = methods_.at(methodName).method;
141+
142+
for (auto index = 0; index < input.size(); ++index) {
143+
ET_CHECK_OK_OR_RETURN_ERROR(method->set_input(input[index], index));
144+
}
145+
ET_CHECK_OK_OR_RETURN_ERROR(method->execute());
146+
147+
const auto outputsSize = method->outputs_size();
148+
std::vector<EValue> outputs(outputsSize);
149+
ET_CHECK_OK_OR_RETURN_ERROR(method->get_outputs(outputs.data(), outputsSize));
150+
151+
return outputs;
152+
}
153+
154+
} // namespace torch::executor

0 commit comments

Comments
 (0)