Skip to content

Further Runner and Module improvements. #1622

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 2 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -130,8 +130,8 @@ if(NOT BUCK2)
endif()

# Build dataloader library
option(EXECUTORCH_BUILD_EXTENSION_RUNNER_MODULE
"Build the extension/runner/module directory" OFF)
option(EXECUTORCH_BUILD_EXTENSION_MODULE
"Build the extension/module directory" OFF)
if(NOT BUCK2)
set(BUCK2 buck2)
endif()
Expand Down Expand Up @@ -319,8 +319,8 @@ if(EXECUTORCH_BUILD_EXTENSION_DATA_LOADER)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/data_loader)
endif()

if(EXECUTORCH_BUILD_EXTENSION_RUNNER_MODULE)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/runner/module)
if(EXECUTORCH_BUILD_EXTENSION_MODULE)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/extension/module)
endif()

option(EXECUTORCH_BUILD_XNNPACK "Build the backends/xnnpack directory" OFF)
Expand Down
9 changes: 3 additions & 6 deletions build/build_apple_frameworks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ COREML=OFF
MPS=OFF
XNNPACK=OFF
HEADERS_PATH="include"
EXECUTORCH_FRAMEWORK="executorch:libexecutorch.a,libextension_data_loader.a,libextension_runner_module.a:$HEADERS_PATH"
EXECUTORCH_FRAMEWORK="executorch:libexecutorch.a,libextension_data_loader.a,libextension_module.a:$HEADERS_PATH"
PORTABLE_FRAMEWORK="portable_backend:libportable_kernels.a,libportable_ops_lib.a:"
COREML_FRAMEWORK="coreml_backend:libcoremldelegate.a:"
MPS_FRAMEWORK="mps_backend:libmpsdelegate.a:"
Expand Down Expand Up @@ -112,7 +112,7 @@ cmake_build() {
-DPYTHON_EXECUTABLE="$PYTHON" \
-DFLATC_EXECUTABLE="$FLATC" \
-DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
-DEXECUTORCH_BUILD_EXTENSION_RUNNER_MODULE=ON \
-DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
-DCMAKE_ARCHIVE_OUTPUT_DIRECTORY="$(pwd)" \
-DIOS_DEPLOYMENT_TARGET="$IOS_DEPLOYMENT_TARGET" \
-DEXECUTORCH_BUILD_COREML=$COREML \
Expand All @@ -132,10 +132,7 @@ echo "Exporting headers"
mkdir -p "$HEADERS_PATH"

"$SOURCE_ROOT_DIR"/build/print_exported_headers.py --buck2="$BUCK2" --targets \
//runtime/executor:program \
//extension/data_loader: \
//extension/memory_allocator: \
//extension/runner/module: \
//extension/module: \
| rsync -av --files-from=- "$SOURCE_ROOT_DIR" "$HEADERS_PATH/executorch"

echo "Creating frameworks"
Expand Down
4 changes: 2 additions & 2 deletions build/cmake_deps.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,9 @@ deps = [
"executorch",
]

[targets.extension_runner_module]
[targets.extension_module]
buck_targets = [
"//extension/runner/module:module",
"//extension/module:module",
]
filters = [
".cpp$",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,7 @@ FOUNDATION_EXPORT NSErrorDomain const ETMobileNetClassifierErrorDomain;

@interface ETMobileNetClassifier : NSObject

- (nullable instancetype)initWithFilePath:(NSString*)filePath
error:(NSError**)error;
- (instancetype)initWithFilePath:(NSString*)filePath;
- (BOOL)classifyWithInput:(float*)input
output:(float*)output
outputSize:(NSInteger)predictionBufferSize
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

#import "MobileNetClassifier.h"

#import <executorch/extension/runner/module/module.h>
#import <executorch/extension/module/module.h>

using namespace ::torch::executor;

Expand All @@ -21,26 +21,10 @@ @implementation ETMobileNetClassifier {
std::unique_ptr<Module> _module;
}

- (nullable instancetype)initWithFilePath:(NSString*)filePath
error:(NSError**)error {
- (instancetype)initWithFilePath:(NSString*)filePath {
self = [super init];
if (self) {
try {
_module = std::make_unique<Module>(filePath.UTF8String);
} catch (const std::exception& exception) {
if (error) {
*error = [NSError
errorWithDomain:ETMobileNetClassifierErrorDomain
code:-1
userInfo:@{
NSLocalizedDescriptionKey : [NSString
stringWithFormat:
@"Failed to initialize the torch module: %s",
exception.what()]
}];
}
return nil;
}
_module = std::make_unique<Module>(filePath.UTF8String);
}
return self;
}
Expand All @@ -50,28 +34,25 @@ - (BOOL)classifyWithInput:(float*)input
outputSize:(NSInteger)outputSize
error:(NSError**)error {
int32_t sizes[] = {1, kChannels, kSize, kSize};
TensorImpl tensorImpl(ScalarType::Float, std::size(sizes), sizes, input);
std::vector<EValue> inputs{EValue(Tensor(&tensorImpl))};
std::vector<EValue> outputs;
TensorImpl inputTensor(ScalarType::Float, std::size(sizes), sizes, input);
const auto result = _module->forward({EValue(Tensor(&inputTensor))});

const auto torchError = _module->forward(inputs, outputs);
if (torchError != Error::Ok) {
if (!result.ok()) {
if (error) {
*error = [NSError
errorWithDomain:ETMobileNetClassifierErrorDomain
code:NSInteger(torchError)
code:NSInteger(result.error())
userInfo:@{
NSLocalizedDescriptionKey : [NSString
stringWithFormat:
@"Failed to run forward on the torch module, error code: %i",
torchError]
result.error()]
}];
}
return NO;
}
const auto outputTensor = outputs[0].toTensor();
const auto data = outputTensor.const_data_ptr<float>();
std::copy(data, data + outputSize, output);
const auto outputData = result->at(0).toTensor().const_data_ptr<float>();
std::copy(outputData, outputData + outputSize, output);

return YES;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class MobileNetClassifier: ImageClassification {
public init?(modelFilePath: String, labelsFilePath: String) throws {
labels = try String(contentsOfFile: labelsFilePath, encoding: .utf8)
.components(separatedBy: .newlines)
mobileNetClassifier = try ETMobileNetClassifier(filePath: modelFilePath)
mobileNetClassifier = ETMobileNetClassifier(filePath: modelFilePath)
rawDataBuffer = [UInt8](repeating: 0, count: Int(Self.cropSize * Self.cropSize) * 4)
normalizedBuffer = [Float](repeating: 0, count: rawDataBuffer.count / 4 * 3)
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,17 @@ cmake_minimum_required(VERSION 3.19)

# Source root directory for executorch.
if(NOT EXECUTORCH_ROOT)
set(EXECUTORCH_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../../..)
set(EXECUTORCH_ROOT ${CMAKE_CURRENT_SOURCE_DIR}/../..)
endif()

list(TRANSFORM _extension_runner_module__srcs PREPEND "${EXECUTORCH_ROOT}/")
add_library(extension_runner_module ${_extension_runner_module__srcs})
target_include_directories(extension_runner_module PUBLIC ${EXECUTORCH_ROOT}/..)
list(TRANSFORM _extension_module__srcs PREPEND "${EXECUTORCH_ROOT}/")
add_library(extension_module ${_extension_module__srcs})
target_include_directories(extension_module PUBLIC ${EXECUTORCH_ROOT}/..)


# Install libraries
install(
TARGETS extension_runner_module
TARGETS extension_module
DESTINATION ${CMAKE_BINARY_DIR}/lib
INCLUDES
DESTINATION ${_common_include_directories})
File renamed without changes.
154 changes: 154 additions & 0 deletions extension/module/module.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,154 @@
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/

#include <executorch/extension/module/module.h>

#include <executorch/extension/data_loader/mmap_data_loader.h>
#include <executorch/extension/memory_allocator/malloc_memory_allocator.h>
#include <executorch/runtime/platform/runtime.h>

/**
* Unwrap a Result to obtain its value (direct object, not a pointer).
* If the Result contains an error, propagate the error via trivial function
* return. The macro wraps the object into a unique_ptr.
*
* Note: A function using ET_UNWRAP_UNIQUE should itself return a Result or
* Error.
*
* @param[in] result__ Expression yielding the result to unwrap.
*/
#define ET_UNWRAP_UNIQUE(result__) \
({ \
auto et_result__ = (result__); \
if (!et_result__.ok()) { \
return et_result__.error(); \
} \
std::make_unique<std::remove_reference_t<decltype(*et_result__)>>( \
std::move(*et_result__)); \
})

namespace torch::executor {

Module::Module(
const std::string& filePath,
const Module::MlockConfig mlockConfig)
: filePath_(filePath),
mlockConfig_(mlockConfig),
memoryAllocator_(std::make_unique<util::MallocMemoryAllocator>()) {
runtime_init();
}

Module::Module(
std::unique_ptr<DataLoader> dataLoader,
std::unique_ptr<MemoryAllocator> memoryAllocator,
std::unique_ptr<EventTracer> eventTracer)
: dataLoader_(std::move(dataLoader)),
memoryAllocator_(
std::move(memoryAllocator)
?: std::make_unique<util::MallocMemoryAllocator>()),
eventTracer_(std::move(eventTracer)) {
runtime_init();
}

Error Module::load(const Program::Verification verification) {
if (!isLoaded()) {
if (!dataLoader_) {
dataLoader_ = ET_UNWRAP_UNIQUE(
util::MmapDataLoader::from(filePath_.c_str(), [this] {
switch (mlockConfig_) {
case MlockConfig::NoMlock:
return util::MmapDataLoader::MlockConfig::NoMlock;
case MlockConfig::UseMlock:
return util::MmapDataLoader::MlockConfig::UseMlock;
case MlockConfig::UseMlockIgnoreErrors:
return util::MmapDataLoader::MlockConfig::UseMlockIgnoreErrors;
}
ET_ASSERT_UNREACHABLE();
}()));
};
program_ = ET_UNWRAP_UNIQUE(Program::load(dataLoader_.get(), verification));
}
return Error::Ok;
}

bool Module::isLoaded() const {
return program_ != nullptr;
}

Result<std::vector<std::string>> Module::methodNames() {
ET_CHECK_OK_OR_RETURN_ERROR(load());
const auto methodCount = program_->num_methods();
std::vector<std::string> result;
result.reserve(methodCount);

for (auto index = 0; index < methodCount; ++index) {
result.emplace_back(program_->get_method_name(index).get());
}
return result;
}

Error Module::loadMethod(const std::string& methodName) {
if (!isMethodLoaded(methodName)) {
ET_CHECK_OK_OR_RETURN_ERROR(load());

MethodHolder methodHolder;
const auto methodMetadata =
ET_UNWRAP(program_->method_meta(methodName.c_str()));
const auto plannedBuffersCount =
methodMetadata.num_memory_planned_buffers();
methodHolder.plannedBuffers.reserve(plannedBuffersCount);
methodHolder.plannedSpans.reserve(plannedBuffersCount);

for (auto index = 0; index < plannedBuffersCount; ++index) {
const auto bufferSize =
methodMetadata.memory_planned_buffer_size(index).get();
methodHolder.plannedBuffers.emplace_back(bufferSize);
methodHolder.plannedSpans.emplace_back(
methodHolder.plannedBuffers.back().data(), bufferSize);
}
methodHolder.plannedMemory = std::make_unique<HierarchicalAllocator>(Span(
methodHolder.plannedSpans.data(), methodHolder.plannedSpans.size()));
methodHolder.memoryManager = std::make_unique<MemoryManager>(
memoryAllocator_.get(), methodHolder.plannedMemory.get());
methodHolder.method = ET_UNWRAP_UNIQUE(program_->load_method(
methodName.c_str(),
methodHolder.memoryManager.get(),
eventTracer_.get()));
methods_.emplace(methodName, std::move(methodHolder));
}
return Error::Ok;
}

bool Module::isMethodLoaded(const std::string& methodName) const {
return methods_.count(methodName);
}

Result<MethodMeta> Module::methodMeta(const std::string& methodName) {
ET_CHECK_OK_OR_RETURN_ERROR(loadMethod(methodName));
return methods_.at(methodName).method->method_meta();
}

Result<std::vector<EValue>> Module::execute(
const std::string& methodName,
const std::vector<EValue>& input) {
ET_CHECK_OK_OR_RETURN_ERROR(loadMethod(methodName));
auto& method = methods_.at(methodName).method;

for (auto index = 0; index < input.size(); ++index) {
ET_CHECK_OK_OR_RETURN_ERROR(method->set_input(input[index], index));
}
ET_CHECK_OK_OR_RETURN_ERROR(method->execute());

const auto outputsSize = method->outputs_size();
std::vector<EValue> outputs(outputsSize);
ET_CHECK_OK_OR_RETURN_ERROR(method->get_outputs(outputs.data(), outputsSize));

return outputs;
}

} // namespace torch::executor
Loading