Skip to content

Commit e73b0e8

Browse files
YUNQIUGUOrachguorachguo
authored
[Mobile] Add super resolution ios sample app with ort-extensions support for ppp (#185)
* initial wip impl ios superes * update sample and upload example screenshots * add readme and minor update * upload model * update pbxproj etc. * minor update * update Co-authored-by: rachguo <[email protected]> Co-authored-by: rachguo <[email protected]>
1 parent a9c13af commit e73b0e8

File tree

20 files changed

+728
-0
lines changed

20 files changed

+728
-0
lines changed

Diff for: mobile/examples/super_resolution/ios/ORTSuperResolution/ORTSuperResolution.xcodeproj/project.pbxproj

+382
Large diffs are not rendered by default.

Diff for: mobile/examples/super_resolution/ios/ORTSuperResolution/ORTSuperResolution.xcodeproj/project.xcworkspace/contents.xcworkspacedata

+7
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
3+
<plist version="1.0">
4+
<dict>
5+
<key>IDEDidComputeMac32BitWarning</key>
6+
<true/>
7+
</dict>
8+
</plist>
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"colors" : [
3+
{
4+
"idiom" : "universal"
5+
}
6+
],
7+
"info" : {
8+
"author" : "xcode",
9+
"version" : 1
10+
}
11+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
{
2+
"images" : [
3+
{
4+
"idiom" : "universal",
5+
"platform" : "ios",
6+
"size" : "1024x1024"
7+
}
8+
],
9+
"info" : {
10+
"author" : "xcode",
11+
"version" : 1
12+
}
13+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"info" : {
3+
"author" : "xcode",
4+
"version" : 1
5+
}
6+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
{
2+
"images" : [
3+
{
4+
"filename" : "cat_224x224.png",
5+
"idiom" : "universal",
6+
"scale" : "1x"
7+
},
8+
{
9+
"idiom" : "universal",
10+
"scale" : "2x"
11+
},
12+
{
13+
"idiom" : "universal",
14+
"scale" : "3x"
15+
}
16+
],
17+
"info" : {
18+
"author" : "xcode",
19+
"version" : 1
20+
}
21+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
// Copyright (c) Microsoft Corporation. All rights reserved.
2+
// Licensed under the MIT License.
3+
4+
5+
import SwiftUI
6+
7+
struct ContentView: View {
8+
@State private var performSuperRes = false
9+
10+
func runOrtSuperResolution() -> UIImage? {
11+
do {
12+
let outputImage = try ORTSuperResolutionPerformer.performSuperResolution()
13+
return outputImage
14+
} catch let error as NSError {
15+
print("Error: \(error.localizedDescription)")
16+
return nil
17+
}
18+
}
19+
20+
var body: some View {
21+
ScrollView {
22+
VStack {
23+
VStack {
24+
Text("ORTSuperResolution").font(.title).bold()
25+
.frame(width: 400, height: 80)
26+
.border(Color.purple, width: 4)
27+
.background(Color.purple)
28+
29+
Text("Input low resolution image: ").frame(width: 350, height: 40, alignment:.leading)
30+
31+
Image("cat_224x224").frame(width: 250, height: 250)
32+
33+
Button("Perform Super Resolution") {
34+
performSuperRes.toggle()
35+
}
36+
37+
if performSuperRes {
38+
Text("Output high resolution image: ").frame(width: 350, height: 40, alignment:.leading)
39+
40+
if let outputImage = runOrtSuperResolution() {
41+
Image(uiImage: outputImage)
42+
} else {
43+
Text("Unable to perform super resolution. ").frame(width: 350, height: 40, alignment:.leading)
44+
}
45+
}
46+
Spacer()
47+
}
48+
}
49+
.padding()
50+
}
51+
}
52+
}
53+
54+
struct ContentView_Previews: PreviewProvider {
55+
static var previews: some View {
56+
ContentView()
57+
}
58+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
//
2+
// Use this file to import your target's public headers that you would like to expose to Swift.
3+
//
4+
5+
#import "ORTSuperResolutionPerformer.h"
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
// Copyright (c) Microsoft Corporation. All rights reserved.
2+
// Licensed under the MIT License.
3+
4+
5+
import SwiftUI
6+
7+
@main
8+
struct ORTSuperResolutionApp: App {
9+
var body: some Scene {
10+
WindowGroup {
11+
ContentView()
12+
}
13+
}
14+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
// Copyright (c) Microsoft Corporation. All rights reserved.
2+
// Licensed under the MIT License.
3+
4+
#ifndef ORTSuperResolutionPerformer_h
5+
#define ORTSuperResolutionPerformer_h
6+
7+
#import <Foundation/Foundation.h>
8+
#import <UIKit/UIKit.h>
9+
10+
NS_ASSUME_NONNULL_BEGIN
11+
12+
@interface ORTSuperResolutionPerformer : NSObject
13+
14+
+ (nullable UIImage*)performSuperResolutionWithError:(NSError**)error;
15+
16+
@end
17+
18+
NS_ASSUME_NONNULL_END
19+
20+
#endif
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
// Copyright (c) Microsoft Corporation. All rights reserved.
2+
// Licensed under the MIT License.
3+
4+
#import "ORTSuperResolutionPerformer.h"
5+
#import <Foundation/Foundation.h>
6+
#import <UIKit/UIKit.h>
7+
8+
#include <array>
9+
#include <cstdint>
10+
#include <stdexcept>
11+
#include <string>
12+
#include <vector>
13+
14+
#include <onnxruntime_cxx_api.h>
15+
#include <onnxruntime_extensions.h>
16+
17+
18+
@implementation ORTSuperResolutionPerformer
19+
20+
+ (nullable UIImage*)performSuperResolutionWithError:(NSError **)error {
21+
22+
UIImage* output_image = nil;
23+
24+
try {
25+
26+
// Register custom ops
27+
28+
const auto ort_log_level = ORT_LOGGING_LEVEL_INFO;
29+
auto ort_env = Ort::Env(ort_log_level, "ORTSuperResolution");
30+
auto session_options = Ort::SessionOptions();
31+
32+
if (RegisterCustomOps(session_options, OrtGetApiBase()) != nullptr) {
33+
throw std::runtime_error("RegisterCustomOps failed");
34+
}
35+
36+
// Step 1: Load model
37+
38+
NSString *model_path = [NSBundle.mainBundle pathForResource:@"pt_super_resolution_with_pre_post_processing_opset16"
39+
ofType:@"onnx"];
40+
if (model_path == nullptr) {
41+
throw std::runtime_error("Failed to get model path");
42+
}
43+
44+
// Step 2: Create Ort Inference Session
45+
46+
auto sess = Ort::Session(ort_env, [model_path UTF8String], session_options);
47+
48+
// Read input image
49+
50+
// note: need to set Xcode settings to prevent it from messing with PNG files:
51+
// in "Build Settings":
52+
// - set "Compress PNG Files" to "No"
53+
// - set "Remove Text Metadata From PNG Files" to "No"
54+
NSString *input_image_path =
55+
[NSBundle.mainBundle pathForResource:@"cat_224x224" ofType:@"png"];
56+
if (input_image_path == nullptr) {
57+
throw std::runtime_error("Failed to get image path");
58+
}
59+
60+
// Step 3: Prepare input tensors and input/output names
61+
62+
NSMutableData *input_data =
63+
[NSMutableData dataWithContentsOfFile:input_image_path];
64+
const int64_t input_data_length = input_data.length;
65+
const auto memoryInfo =
66+
Ort::MemoryInfo::CreateCpu(OrtDeviceAllocator, OrtMemTypeCPU);
67+
68+
const auto input_tensor = Ort::Value::CreateTensor(memoryInfo, [input_data mutableBytes], input_data_length,
69+
&input_data_length, 1, ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8);
70+
71+
constexpr auto input_names = std::array{"image"};
72+
constexpr auto output_names = std::array{"image_out"};
73+
74+
// Step 4: Call inference session run
75+
76+
const auto outputs = sess.Run(Ort::RunOptions(), input_names.data(),
77+
&input_tensor, 1, output_names.data(), 1);
78+
if (outputs.size() != 1) {
79+
throw std::runtime_error("Unexpected number of outputs");
80+
}
81+
82+
// Step 5: Analyze model outputs
83+
84+
const auto &output_tensor = outputs.front();
85+
const auto output_type_and_shape_info = output_tensor.GetTensorTypeAndShapeInfo();
86+
const auto output_shape = output_type_and_shape_info.GetShape();
87+
88+
if (const auto output_element_type =
89+
output_type_and_shape_info.GetElementType();
90+
output_element_type != ONNX_TENSOR_ELEMENT_DATA_TYPE_UINT8) {
91+
throw std::runtime_error("Unexpected output element type");
92+
}
93+
94+
const uint8_t *output_data_raw = output_tensor.GetTensorData<uint8_t>();
95+
96+
// Step 6: Convert raw bytes into NSData and return as displayable UIImage
97+
98+
NSData *output_data = [NSData dataWithBytes:output_data_raw length:(output_shape[0])];
99+
output_image = [UIImage imageWithData:output_data];
100+
101+
} catch (std::exception &e) {
102+
NSLog(@"%s error: %s", __FUNCTION__, e.what());
103+
104+
static NSString *const kErrorDomain = @"ORTSuperResolution";
105+
constexpr NSInteger kErrorCode = 0;
106+
if (error) {
107+
NSString *description =
108+
[NSString stringWithCString:e.what() encoding:NSASCIIStringEncoding];
109+
*error =
110+
[NSError errorWithDomain:kErrorDomain
111+
code:kErrorCode
112+
userInfo:@{NSLocalizedDescriptionKey : description}];
113+
}
114+
return nullptr;
115+
}
116+
117+
if (error) {
118+
*error = nullptr;
119+
}
120+
return output_image;
121+
}
122+
123+
@end
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"info" : {
3+
"author" : "xcode",
4+
"version" : 1
5+
}
6+
}
Loading
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
platform :ios, '11.0'
2+
3+
target 'ORTSuperResolution' do
4+
# Comment the next line if you don't want to use dynamic frameworks
5+
use_frameworks!
6+
7+
# Pods for OrtSuperResolution
8+
pod 'onnxruntime-c'
9+
10+
# Pre-release version pods
11+
pod 'onnxruntime-extensions-c', '0.5.0-dev+261962.e3663fb'
12+
13+
end
14+
15+
post_install do |installer|
16+
installer.pods_project.targets.each do |target|
17+
target.build_configurations.each do |config|
18+
config.build_settings['CODE_SIGNING_ALLOWED'] = 'NO'
19+
end
20+
end
21+
end
22+

Diff for: mobile/examples/super_resolution/ios/README.md

+32
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
# ONNX Runtime Mobile Super Resolution iOS sample application with Ort-Extensions support for pre/post processing
2+
3+
## Overview
4+
5+
This is a basic Super Resolution example application for [ONNX Runtime](https://github.com/microsoft/onnxruntime) on Android with [Ort-Extensions](https://github.com/microsoft/onnxruntime-extensions) support for pre/post processing. The demo app accomplishes the task of recovering a high resolution (HR) image from its low resolution counterpart.
6+
7+
The model used here is from source: [Pytorch Super Resolution](https://pytorch.org/tutorials/advanced/super_resolution_with_onnxruntime.html) and accomodated into [ONNX](https://github.com/onnx/onnx) version with pre/post processing support.
8+
9+
## Requirements
10+
- Install Xcode 13.0 and above (preferably latest version)
11+
- A valid Apple Developer ID
12+
- An iOS device or iOS simulator
13+
- Xcode command line tools `xcode-select --install`
14+
- Clone the `onnxruntime-inference-examples` source code repo
15+
16+
## Build And Run
17+
18+
1. Install CocoaPods. `sudo gem install cocoapods`
19+
20+
2. In terminal, run `pod install` under `<ONNXRuntime-inference-example-root>/mobile/examples/super_resolution/ios/` to generate the workspace file and install required pod files.
21+
22+
Note: At the end of this step, you should get a file called `ORTSuperResolution.xcworkspace`.
23+
24+
3. Open `<ONNXRuntime-inference-example-root>/mobile/examples/super_resolution/ios/ORTSuperResolution.xcworkspace` in Xcode and make sure to select your corresponding development team under `Target-General-Signing` for a proper codesign procedure to run the app (only on device required, if running on iOS simulator can skip this step.)
25+
26+
4. Connect your iOS device/simulator, build and run the app. Click `Perform Super Resolution` button to see performed result on displayed sample image.
27+
28+
#
29+
Here's an example screenshot of the app:
30+
31+
<img width=25% src="images/Screenshot1.png" alt="App Screenshot" />
32+
<img width=25% src="images/Screenshot2.png" alt="App Screenshot" />
1.77 MB
Loading
2.38 MB
Loading

0 commit comments

Comments
 (0)