Skip to content

Add support for Lambda streaming response #628

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 6 commits into from
Apr 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions examples/basic-streaming-response/Cargo.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
[package]
name = "basic-streaming-response"
version = "0.1.0"
edition = "2021"

# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

[dependencies]
hyper = { version = "0.14", features = [
"http1",
"client",
"stream",
] }
lambda_runtime = { path = "../../lambda-runtime" }
tokio = { version = "1", features = ["macros"] }
tracing = { version = "0.1", features = ["log"] }
tracing-subscriber = { version = "0.3", default-features = false, features = ["ansi", "fmt"] }
serde_json = "1.0"
13 changes: 13 additions & 0 deletions examples/basic-streaming-response/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# AWS Lambda Function example

## Build & Deploy

1. Install [cargo-lambda](https://github.com/cargo-lambda/cargo-lambda#installation)
2. Build the function with `cargo lambda build --release`
3. Deploy the function to AWS Lambda with `cargo lambda deploy --enable-function-url --iam-role YOUR_ROLE`
4. Enable Lambda streaming response on Lambda console: change the function url's invoke mode to `RESPONSE_STREAM`
5. Verify the function works: `curl <function-url>`. The results should be streamed back with 0.5 second pause between each word.

## Build for ARM 64

Build the function with `cargo lambda build --release --arm64`
42 changes: 42 additions & 0 deletions examples/basic-streaming-response/src/main.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
use hyper::{body::Body, Response};
use lambda_runtime::{service_fn, Error, LambdaEvent};
use serde_json::Value;
use std::{thread, time::Duration};

async fn func(_event: LambdaEvent<Value>) -> Result<Response<Body>, Error> {
let messages = vec!["Hello", "world", "from", "Lambda!"];

let (mut tx, rx) = Body::channel();

tokio::spawn(async move {
for message in messages.iter() {
tx.send_data((message.to_string() + "\n").into()).await.unwrap();
thread::sleep(Duration::from_millis(500));
}
});

let resp = Response::builder()
.header("content-type", "text/html")
.header("CustomHeader", "outerspace")
.body(rx)?;

Ok(resp)
}

#[tokio::main]
async fn main() -> Result<(), Error> {
// required to enable CloudWatch error logging by the runtime
tracing_subscriber::fmt()
.with_max_level(tracing::Level::INFO)
// disable printing the name of the module in every log line.
.with_target(false)
// this needs to be set to false, otherwise ANSI color codes will
// show up in a confusing manner in CloudWatch logs.
.with_ansi(false)
// disabling time is handy because CloudWatch will add the ingestion time.
.without_time()
.init();

lambda_runtime::run_with_streaming_response(service_fn(func)).await?;
Ok(())
}
21 changes: 11 additions & 10 deletions lambda-http/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,20 @@ apigw_websockets = []
alb = []

[dependencies]
base64 = "0.13.0"
bytes = "1"
base64 = "0.21"
bytes = "1.4"
futures = "0.3"
http = "0.2"
http-body = "0.4"
hyper = "0.14.20"
hyper = "0.14"
lambda_runtime = { path = "../lambda-runtime", version = "0.7" }
serde = { version = "^1", features = ["derive"] }
serde_json = "^1"
serde_urlencoded = "0.7.0"
mime = "0.3.16"
encoding_rs = "0.8.31"
url = "2.2.2"
percent-encoding = "2.2.0"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
serde_urlencoded = "0.7"
mime = "0.3"
encoding_rs = "0.8"
url = "2.2"
percent-encoding = "2.2"

[dependencies.aws_lambda_events]
version = "^0.7.2"
Expand Down
3 changes: 3 additions & 0 deletions lambda-http/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,9 @@ use std::{
task::{Context as TaskContext, Poll},
};

mod streaming;
pub use streaming::run_with_streaming_response;

/// Type alias for `http::Request`s with a fixed [`Body`](enum.Body.html) type
pub type Request = http::Request<Body>;

Expand Down
34 changes: 34 additions & 0 deletions lambda-http/src/streaming.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
use crate::request::LambdaRequest;
use crate::tower::ServiceBuilder;
use crate::{Request, RequestExt};
pub use aws_lambda_events::encodings::Body as LambdaEventBody;
use bytes::Bytes;
pub use http::{self, Response};
use http_body::Body;
use lambda_runtime::LambdaEvent;
pub use lambda_runtime::{self, service_fn, tower, Context, Error, Service};
use std::fmt::{Debug, Display};

/// Starts the Lambda Rust runtime and stream response back [Configure Lambda
/// Streaming Response](https://docs.aws.amazon.com/lambda/latest/dg/configuration-response-streaming.html).
///
/// This takes care of transforming the LambdaEvent into a [`Request`] and
/// accepts [`http::Response<http_body::Body>`] as response.
pub async fn run_with_streaming_response<'a, S, B, E>(handler: S) -> Result<(), Error>
where
S: Service<Request, Response = Response<B>, Error = E>,
S::Future: Send + 'a,
E: Debug + Display,
B: Body + Unpin + Send + 'static,
B::Data: Into<Bytes> + Send,
B::Error: Into<Error> + Send + Debug,
{
let svc = ServiceBuilder::new()
.map_request(|req: LambdaEvent<LambdaRequest>| {
let event: Request = req.payload.into();
event.with_lambda_context(req.context)
})
.service(handler);

lambda_runtime::run_with_streaming_response(svc).await
}
4 changes: 3 additions & 1 deletion lambda-runtime-api-client/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,9 @@ where

/// Create a new client with a given base URI and HTTP connector.
pub fn with(base: Uri, connector: C) -> Self {
let client = hyper::Client::builder().build(connector);
let client = hyper::Client::builder()
.http1_max_buf_size(1024 * 1024)
.build(connector);
Self { base, client }
}

Expand Down
3 changes: 3 additions & 0 deletions lambda-runtime/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ mod simulated;
/// Types available to a Lambda function.
mod types;

mod streaming;
pub use streaming::run_with_streaming_response;

use requests::{EventCompletionRequest, EventErrorRequest, IntoRequest, NextEventRequest};
pub use types::{Context, LambdaEvent};

Expand Down
Loading