diff --git a/apis/v1alpha1/ack-generate-metadata.yaml b/apis/v1alpha1/ack-generate-metadata.yaml index b0111ff7..84528365 100755 --- a/apis/v1alpha1/ack-generate-metadata.yaml +++ b/apis/v1alpha1/ack-generate-metadata.yaml @@ -7,7 +7,7 @@ api_directory_checksum: c67645b15db39980ba51ff6303c34c5aafc55a9e api_version: v1alpha1 aws_sdk_go_version: v1.44.181 generator_config_info: - file_checksum: f0fa26f0d3c577f5800eb183e0ec1cea865a75ee + file_checksum: 4038e6b669d1d6966f8e729a3ed323bf03593680 original_file_name: generator.yaml last_modification: reason: API generation diff --git a/apis/v1alpha1/generator.yaml b/apis/v1alpha1/generator.yaml index a816db8d..e3559558 100644 --- a/apis/v1alpha1/generator.yaml +++ b/apis/v1alpha1/generator.yaml @@ -38,6 +38,13 @@ resources: - path: Status.State in: [ "Active" ] fields: + Code.SHA256: + type: string + compare: + is_ignored: true + set: + - ignore: "to" + method: Create Code.S3Bucket: references: resource: Bucket @@ -72,7 +79,7 @@ resources: path: ReservedConcurrentExecutions Code: compare: - is_ignored: true + is_ignored: false set: - ignore: true operation: ReadOne diff --git a/apis/v1alpha1/types.go b/apis/v1alpha1/types.go index 43a87004..b1f328f2 100644 --- a/apis/v1alpha1/types.go +++ b/apis/v1alpha1/types.go @@ -215,6 +215,7 @@ type FunctionCode struct { S3BucketRef *ackv1alpha1.AWSResourceReferenceWrapper `json:"s3BucketRef,omitempty"` S3Key *string `json:"s3Key,omitempty"` S3ObjectVersion *string `json:"s3ObjectVersion,omitempty"` + SHA256 *string `json:"sha256,omitempty"` ZipFile []byte `json:"zipFile,omitempty"` } diff --git a/apis/v1alpha1/zz_generated.deepcopy.go b/apis/v1alpha1/zz_generated.deepcopy.go index b819b77a..a677b2c1 100644 --- a/apis/v1alpha1/zz_generated.deepcopy.go +++ b/apis/v1alpha1/zz_generated.deepcopy.go @@ -1353,6 +1353,11 @@ func (in *FunctionCode) DeepCopyInto(out *FunctionCode) { *out = new(string) **out = **in } + if in.SHA256 != nil { + in, out := &in.SHA256, &out.SHA256 + *out = new(string) + **out = **in + } if in.ZipFile != nil { in, out := &in.ZipFile, &out.ZipFile *out = make([]byte, len(*in)) diff --git a/config/crd/bases/lambda.services.k8s.aws_functions.yaml b/config/crd/bases/lambda.services.k8s.aws_functions.yaml index e108d53a..c866c409 100644 --- a/config/crd/bases/lambda.services.k8s.aws_functions.yaml +++ b/config/crd/bases/lambda.services.k8s.aws_functions.yaml @@ -70,6 +70,8 @@ spec: type: string s3ObjectVersion: type: string + sha256: + type: string zipFile: format: byte type: string diff --git a/generator.yaml b/generator.yaml index a816db8d..e3559558 100644 --- a/generator.yaml +++ b/generator.yaml @@ -38,6 +38,13 @@ resources: - path: Status.State in: [ "Active" ] fields: + Code.SHA256: + type: string + compare: + is_ignored: true + set: + - ignore: "to" + method: Create Code.S3Bucket: references: resource: Bucket @@ -72,7 +79,7 @@ resources: path: ReservedConcurrentExecutions Code: compare: - is_ignored: true + is_ignored: false set: - ignore: true operation: ReadOne diff --git a/helm/crds/lambda.services.k8s.aws_functions.yaml b/helm/crds/lambda.services.k8s.aws_functions.yaml index 052ae74a..6fed3bb6 100644 --- a/helm/crds/lambda.services.k8s.aws_functions.yaml +++ b/helm/crds/lambda.services.k8s.aws_functions.yaml @@ -70,6 +70,8 @@ spec: type: string s3ObjectVersion: type: string + sha256: + type: string zipFile: format: byte type: string diff --git a/pkg/resource/function/delta.go b/pkg/resource/function/delta.go index 5a0d6166..4ea81af3 100644 --- a/pkg/resource/function/delta.go +++ b/pkg/resource/function/delta.go @@ -51,6 +51,41 @@ func newResourceDelta( delta.Add("Spec.Architectures", a.ko.Spec.Architectures, b.ko.Spec.Architectures) } } + if ackcompare.HasNilDifference(a.ko.Spec.Code, b.ko.Spec.Code) { + delta.Add("Spec.Code", a.ko.Spec.Code, b.ko.Spec.Code) + } else if a.ko.Spec.Code != nil && b.ko.Spec.Code != nil { + if ackcompare.HasNilDifference(a.ko.Spec.Code.ImageURI, b.ko.Spec.Code.ImageURI) { + delta.Add("Spec.Code.ImageURI", a.ko.Spec.Code.ImageURI, b.ko.Spec.Code.ImageURI) + } else if a.ko.Spec.Code.ImageURI != nil && b.ko.Spec.Code.ImageURI != nil { + if *a.ko.Spec.Code.ImageURI != *b.ko.Spec.Code.ImageURI { + delta.Add("Spec.Code.ImageURI", a.ko.Spec.Code.ImageURI, b.ko.Spec.Code.ImageURI) + } + } + if ackcompare.HasNilDifference(a.ko.Spec.Code.S3Bucket, b.ko.Spec.Code.S3Bucket) { + delta.Add("Spec.Code.S3Bucket", a.ko.Spec.Code.S3Bucket, b.ko.Spec.Code.S3Bucket) + } else if a.ko.Spec.Code.S3Bucket != nil && b.ko.Spec.Code.S3Bucket != nil { + if *a.ko.Spec.Code.S3Bucket != *b.ko.Spec.Code.S3Bucket { + delta.Add("Spec.Code.S3Bucket", a.ko.Spec.Code.S3Bucket, b.ko.Spec.Code.S3Bucket) + } + } + if ackcompare.HasNilDifference(a.ko.Spec.Code.S3Key, b.ko.Spec.Code.S3Key) { + delta.Add("Spec.Code.S3Key", a.ko.Spec.Code.S3Key, b.ko.Spec.Code.S3Key) + } else if a.ko.Spec.Code.S3Key != nil && b.ko.Spec.Code.S3Key != nil { + if *a.ko.Spec.Code.S3Key != *b.ko.Spec.Code.S3Key { + delta.Add("Spec.Code.S3Key", a.ko.Spec.Code.S3Key, b.ko.Spec.Code.S3Key) + } + } + if ackcompare.HasNilDifference(a.ko.Spec.Code.S3ObjectVersion, b.ko.Spec.Code.S3ObjectVersion) { + delta.Add("Spec.Code.S3ObjectVersion", a.ko.Spec.Code.S3ObjectVersion, b.ko.Spec.Code.S3ObjectVersion) + } else if a.ko.Spec.Code.S3ObjectVersion != nil && b.ko.Spec.Code.S3ObjectVersion != nil { + if *a.ko.Spec.Code.S3ObjectVersion != *b.ko.Spec.Code.S3ObjectVersion { + delta.Add("Spec.Code.S3ObjectVersion", a.ko.Spec.Code.S3ObjectVersion, b.ko.Spec.Code.S3ObjectVersion) + } + } + if !bytes.Equal(a.ko.Spec.Code.ZipFile, b.ko.Spec.Code.ZipFile) { + delta.Add("Spec.Code.ZipFile", a.ko.Spec.Code.ZipFile, b.ko.Spec.Code.ZipFile) + } + } if ackcompare.HasNilDifference(a.ko.Spec.CodeSigningConfigARN, b.ko.Spec.CodeSigningConfigARN) { delta.Add("Spec.CodeSigningConfigARN", a.ko.Spec.CodeSigningConfigARN, b.ko.Spec.CodeSigningConfigARN) } else if a.ko.Spec.CodeSigningConfigARN != nil && b.ko.Spec.CodeSigningConfigARN != nil { diff --git a/pkg/resource/function/hooks.go b/pkg/resource/function/hooks.go index 68221374..6b65318d 100644 --- a/pkg/resource/function/hooks.go +++ b/pkg/resource/function/hooks.go @@ -102,25 +102,15 @@ func (rm *resourceManager) customUpdateFunction( } } } - if delta.DifferentAt("Spec.Architectures") { - err = rm.updateFunctionArchitectures(ctx, desired, latest) - if err != nil { - return nil, err - } - } // Only try to update Spec.Code or Spec.Configuration at once. It is // not correct to sequentially call UpdateFunctionConfiguration and // UpdateFunctionCode because both of them can put the function in a // Pending state. switch { - case delta.DifferentAt("Spec.Code"): - err = rm.updateFunctionCode(ctx, desired, delta) + case delta.DifferentAt("Spec.Code.ImageURI") || delta.DifferentAt("Spec.Code.SHA256") || delta.DifferentAt("Spec.Architectures"): + err = rm.updateFunctionCode(ctx, desired, delta, latest) if err != nil { - // If the source image is not available, we get an error like this: - // "InvalidParameterValueException: Source image 1234567890.dkr.ecr.us-east-2.amazonaws.com/my-lambda:my-tag does not exist. Provide a valid source image." - // Because this may be recoverable (i.e. the image may be pushed once a build completes), - // we requeue the function for reconciliation after one minute. if strings.Contains(err.Error(), "Provide a valid source image.") { return nil, requeueWaitWhileSourceImageDoesNotExist } else { @@ -131,6 +121,7 @@ func (rm *resourceManager) customUpdateFunction( "Spec.Code", "Spec.Tags", "Spec.ReservedConcurrentExecutions", + "Spec.FunctionEventInvokeConfig", "Spec.CodeSigningConfigARN"): err = rm.updateFunctionConfiguration(ctx, desired, delta) if err != nil { @@ -377,16 +368,17 @@ func (rm *resourceManager) updateFunctionTags( return nil } -// updateFunctionArchitectures calls UpdateFunctionCode to update architecture for lambda +// updateFunctionsCode calls UpdateFunctionCode to update a specific lambda // function code. -func (rm *resourceManager) updateFunctionArchitectures( +func (rm *resourceManager) updateFunctionCode( ctx context.Context, desired *resource, + delta *ackcompare.Delta, latest *resource, ) error { var err error rlog := ackrtlog.FromContext(ctx) - exit := rlog.Trace("rm.updateFunctionArchitectures") + exit := rlog.Trace("rm.updateFunctionCode") defer exit(err) dspec := desired.ko.Spec @@ -400,60 +392,30 @@ func (rm *resourceManager) updateFunctionArchitectures( input.Architectures = nil } - if latest.ko.Spec.Code != nil { - if latest.ko.Spec.PackageType != nil && *latest.ko.Spec.PackageType == "Image" { - input.ImageUri = latest.ko.Spec.Code.ImageURI - } else if latest.ko.Spec.PackageType != nil && *latest.ko.Spec.PackageType == "Zip" { - input.S3Bucket = latest.ko.Spec.Code.S3Bucket - input.S3Key = latest.ko.Spec.Code.S3Key - } - } - - _, err = rm.sdkapi.UpdateFunctionCodeWithContext(ctx, input) - rm.metrics.RecordAPICall("UPDATE", "UpdateFunctionArchitectures", err) - if err != nil { - return err - } - - return nil -} - -// updateFunctionsCode calls UpdateFunctionCode to update a specific lambda -// function code. -func (rm *resourceManager) updateFunctionCode( - ctx context.Context, - desired *resource, - delta *ackcompare.Delta, -) error { - var err error - rlog := ackrtlog.FromContext(ctx) - exit := rlog.Trace("rm.updateFunctionCode") - defer exit(err) - - if delta.DifferentAt("Spec.Code.S3Key") && - !delta.DifferentAt("Spec.Code.S3Bucket") && - !delta.DifferentAt("Spec.Code.S3ObjectVersion") && - !delta.DifferentAt("Spec.Code.ImageURI") { - log := ackrtlog.FromContext(ctx) - log.Info("updating code.s3Key field is not currently supported.") - return nil - } - - dspec := desired.ko.Spec - input := &svcsdk.UpdateFunctionCodeInput{ - FunctionName: aws.String(*dspec.Name), - } - if dspec.Code != nil { - switch { - case dspec.Code.ImageURI != nil: - input.ImageUri = dspec.Code.ImageURI - case dspec.Code.S3Bucket != nil, - dspec.Code.S3Key != nil, - dspec.Code.S3ObjectVersion != nil: - input.S3Bucket = dspec.Code.S3Bucket - input.S3Key = dspec.Code.S3Key - input.S3ObjectVersion = dspec.Code.S3ObjectVersion + if delta.DifferentAt("Spec.Code.SHA256") && dspec.Code.SHA256 != nil { + if dspec.Code.S3Key != nil { + input.S3Key = aws.String(*dspec.Code.S3Key) + } + if dspec.Code.S3Bucket != nil { + input.S3Bucket = aws.String(*dspec.Code.S3Bucket) + } + if dspec.Code.S3ObjectVersion != nil { + input.S3ObjectVersion = aws.String(*dspec.Code.S3ObjectVersion) + } + } else if delta.DifferentAt("Spec.Code.ImageURI") && dspec.Code.ImageURI != nil { + if dspec.Code.ImageURI != nil { + input.ImageUri = aws.String(*dspec.Code.ImageURI) + } + + } else { // We need to pass the latest code to Update API call, + //if there is change in architecture and no change in Code + if latest.ko.Spec.PackageType != nil && *latest.ko.Spec.PackageType == "Image" { + input.ImageUri = latest.ko.Spec.Code.ImageURI + } else if latest.ko.Spec.PackageType != nil && *latest.ko.Spec.PackageType == "Zip" { + input.S3Bucket = latest.ko.Spec.Code.S3Bucket + input.S3Key = latest.ko.Spec.Code.S3Key + } } } @@ -501,36 +463,27 @@ func customPreCompare( a *resource, b *resource, ) { + // No need to compare difference in S3 Key/Bucket/ObjectVersion. As in sdkFind() there is a copy 'ko := r.ko.DeepCopy()' + // of S3 Key/Bucket/ObjectVersion passed. This 'ko' then stores the values of latest S3 fields which API returns + // and compares it with desired field values. Since the API doesn't return values of S3 fields, it doesn't + // notice any changes between desired and latest, hence fails to recognize the update in the values. + + // To solve this we created a new field 'Code.SHA256' to store the hash value of deployment package. Any change + // in hash value refers to change in S3 Key/Bucket/ObjectVersion and controller can recognize the change in + // desired and latest value of 'Code.SHA256' and hence calls the update function. + if ackcompare.HasNilDifference(a.ko.Spec.Code, b.ko.Spec.Code) { delta.Add("Spec.Code", a.ko.Spec.Code, b.ko.Spec.Code) } else if a.ko.Spec.Code != nil && b.ko.Spec.Code != nil { - if ackcompare.HasNilDifference(a.ko.Spec.Code.ImageURI, b.ko.Spec.Code.ImageURI) { - delta.Add("Spec.Code.ImageURI", a.ko.Spec.Code.ImageURI, b.ko.Spec.Code.ImageURI) - } else if a.ko.Spec.Code.ImageURI != nil && b.ko.Spec.Code.ImageURI != nil { - if *a.ko.Spec.Code.ImageURI != *b.ko.Spec.Code.ImageURI { - delta.Add("Spec.Code.ImageURI", a.ko.Spec.Code.ImageURI, b.ko.Spec.Code.ImageURI) - } - } - //TODO(hialylmh) handle Spec.Code.S3bucket changes - // if ackcompare.HasNilDifference(a.ko.Spec.Code.S3Bucket, b.ko.Spec.Code.S3Bucket) { - // delta.Add("Spec.Code.S3Bucket", a.ko.Spec.Code.S3Bucket, b.ko.Spec.Code.S3Bucket) - // } else if a.ko.Spec.Code.S3Bucket != nil && b.ko.Spec.Code.S3Bucket != nil { - // if *a.ko.Spec.Code.S3Bucket != *b.ko.Spec.Code.S3Bucket { - // delta.Add("Spec.Code.S3Bucket", a.ko.Spec.Code.S3Bucket, b.ko.Spec.Code.S3Bucket) - // } - // } - if ackcompare.HasNilDifference(a.ko.Spec.Code.S3Key, b.ko.Spec.Code.S3Key) { - delta.Add("Spec.Code.S3Key", a.ko.Spec.Code.S3Key, b.ko.Spec.Code.S3Key) - } else if a.ko.Spec.Code.S3Key != nil && b.ko.Spec.Code.S3Key != nil { - if *a.ko.Spec.Code.S3Key != *b.ko.Spec.Code.S3Key { - delta.Add("Spec.Code.S3Key", a.ko.Spec.Code.S3Key, b.ko.Spec.Code.S3Key) - } - } - if ackcompare.HasNilDifference(a.ko.Spec.Code.S3ObjectVersion, b.ko.Spec.Code.S3ObjectVersion) { - delta.Add("Spec.Code.S3ObjectVersion", a.ko.Spec.Code.S3ObjectVersion, b.ko.Spec.Code.S3ObjectVersion) - } else if a.ko.Spec.Code.S3ObjectVersion != nil && b.ko.Spec.Code.S3ObjectVersion != nil { - if *a.ko.Spec.Code.S3ObjectVersion != *b.ko.Spec.Code.S3ObjectVersion { - delta.Add("Spec.Code.S3ObjectVersion", a.ko.Spec.Code.S3ObjectVersion, b.ko.Spec.Code.S3ObjectVersion) + if a.ko.Spec.PackageType != nil && *a.ko.Spec.PackageType == "Zip" { + if a.ko.Spec.Code.SHA256 != nil { + if ackcompare.HasNilDifference(a.ko.Spec.Code.SHA256, b.ko.Status.CodeSHA256) { + delta.Add("Spec.Code.SHA256", a.ko.Spec.Code.SHA256, b.ko.Status.CodeSHA256) + } else if a.ko.Spec.Code.SHA256 != nil && b.ko.Status.CodeSHA256 != nil { + if *a.ko.Spec.Code.SHA256 != *b.ko.Status.CodeSHA256 { + delta.Add("Spec.Code.SHA256", a.ko.Spec.Code.SHA256, b.ko.Status.CodeSHA256) + } + } } } } diff --git a/test/e2e/resources/function_code_s3.yaml b/test/e2e/resources/function_code_s3.yaml new file mode 100644 index 00000000..d0c1b830 --- /dev/null +++ b/test/e2e/resources/function_code_s3.yaml @@ -0,0 +1,19 @@ +apiVersion: lambda.services.k8s.aws/v1alpha1 +kind: Function +metadata: + name: $FUNCTION_NAME + annotations: + services.k8s.aws/region: $AWS_REGION +spec: + name: $FUNCTION_NAME + architectures: [$ARCHITECTURES] + code: + s3Bucket: $BUCKET_NAME + s3Key: $LAMBDA_FILE_NAME + sha256: $HASH + role: $LAMBDA_ROLE + runtime: python3.9 + handler: main + description: function created by ACK lambda-controller e2e tests + reservedConcurrentExecutions: $RESERVED_CONCURRENT_EXECUTIONS + codeSigningConfigARN: "$CODE_SIGNING_CONFIG_ARN" \ No newline at end of file diff --git a/test/e2e/resources/lambda_function/updated_main.py b/test/e2e/resources/lambda_function/updated_main.py new file mode 100644 index 00000000..3f830187 --- /dev/null +++ b/test/e2e/resources/lambda_function/updated_main.py @@ -0,0 +1,2 @@ +if __name__ == "__main__": + print("Updated Hello ACK!") \ No newline at end of file diff --git a/test/e2e/service_bootstrap.py b/test/e2e/service_bootstrap.py index 79dc82bf..613c00fb 100644 --- a/test/e2e/service_bootstrap.py +++ b/test/e2e/service_bootstrap.py @@ -44,6 +44,11 @@ LAMBDA_FUNCTION_FILE_PATH = f"./resources/lambda_function/{LAMBDA_FUNCTION_FILE}" LAMBDA_FUNCTION_FILE_PATH_ZIP = f"./resources/lambda_function/{LAMBDA_FUNCTION_FILE_ZIP}" +LAMBDA_FUNCTION_UPDATED_FILE = "updated_main.py" +LAMBDA_FUNCTION_UPDATED_FILE_ZIP = "updated_main.zip" +LAMBDA_FUNCTION_UPDATED_FILE_PATH = f"./resources/lambda_function/{LAMBDA_FUNCTION_UPDATED_FILE}" +LAMBDA_FUNCTION_UPDATED_FILE_PATH_ZIP = f"./resources/lambda_function/{LAMBDA_FUNCTION_UPDATED_FILE_ZIP}" + AWS_SIGNING_PLATFORM_ID = "AWSLambda-SHA384-ECDSA" def zip_function_file(src: str, dst: str): @@ -139,6 +144,12 @@ def service_bootstrap() -> Resources: LAMBDA_FUNCTION_FILE_PATH_ZIP, resources.FunctionsBucket.name, ) + + zip_function_file(LAMBDA_FUNCTION_UPDATED_FILE_PATH, LAMBDA_FUNCTION_UPDATED_FILE_PATH_ZIP) + upload_function_to_bucket( + LAMBDA_FUNCTION_UPDATED_FILE_PATH_ZIP, + resources.FunctionsBucket.name, + ) except BootstrapFailureException as ex: exit(254) return resources diff --git a/test/e2e/tests/test_function.py b/test/e2e/tests/test_function.py index 0bb0edad..92390ab6 100644 --- a/test/e2e/tests/test_function.py +++ b/test/e2e/tests/test_function.py @@ -17,6 +17,8 @@ import pytest import time import logging +import hashlib +import base64 from acktest import tags from acktest.resources import random_suffix_name @@ -26,7 +28,8 @@ from e2e import service_marker, CRD_GROUP, CRD_VERSION, load_lambda_resource from e2e.replacement_values import REPLACEMENT_VALUES from e2e.bootstrap_resources import get_bootstrap_resources -from e2e.service_bootstrap import LAMBDA_FUNCTION_FILE_ZIP +from e2e.service_bootstrap import LAMBDA_FUNCTION_FILE_ZIP, LAMBDA_FUNCTION_FILE_PATH_ZIP +from e2e.service_bootstrap import LAMBDA_FUNCTION_UPDATED_FILE_ZIP, LAMBDA_FUNCTION_UPDATED_FILE_PATH_ZIP from e2e.tests.helper import LambdaValidator RESOURCE_PLURAL = "functions" @@ -864,5 +867,173 @@ def test_function_event_invoke_config(self, lambda_client): time.sleep(DELETE_WAIT_AFTER_SECONDS) + # Check Lambda function doesn't exist + assert not lambda_validator.function_exists(resource_name) + + def test_function_code_s3(self, lambda_client): + resource_name = random_suffix_name("functioncodes3", 24) + + resources = get_bootstrap_resources() + logging.debug(resources) + + archive_1 = open(LAMBDA_FUNCTION_FILE_PATH_ZIP, 'rb') + readFile_1 = archive_1.read() + hash_1 = hashlib.sha256(readFile_1) + binary_hash_1 = hash_1.digest() + base64_hash_1 = base64.b64encode(binary_hash_1).decode('utf-8') + + archive_2 = open(LAMBDA_FUNCTION_UPDATED_FILE_PATH_ZIP, 'rb') + readFile_2 = archive_2.read() + hash_2 = hashlib.sha256(readFile_2) + binary_hash_2 = hash_2.digest() + base64_hash_2 = base64.b64encode(binary_hash_2).decode('utf-8') + + replacements = REPLACEMENT_VALUES.copy() + replacements["FUNCTION_NAME"] = resource_name + replacements["BUCKET_NAME"] = resources.FunctionsBucket.name + replacements["LAMBDA_ROLE"] = resources.BasicRole.arn + replacements["LAMBDA_FILE_NAME"] = LAMBDA_FUNCTION_FILE_ZIP + replacements["RESERVED_CONCURRENT_EXECUTIONS"] = "0" + replacements["CODE_SIGNING_CONFIG_ARN"] = "" + replacements["AWS_REGION"] = get_region() + replacements["ARCHITECTURES"] = 'x86_64' + replacements["HASH"] = base64_hash_1 + + # Load Lambda CR + resource_data = load_lambda_resource( + "function_code_s3", + additional_replacements=replacements, + ) + logging.debug(resource_data) + + # Create k8s resource + ref = k8s.CustomResourceReference( + CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, + resource_name, namespace="default", + ) + k8s.create_custom_resource(ref, resource_data) + cr = k8s.wait_resource_consumed_by_controller(ref) + + assert cr is not None + assert k8s.get_resource_exists(ref) + + time.sleep(CREATE_WAIT_AFTER_SECONDS) + + cr = k8s.wait_resource_consumed_by_controller(ref) + + lambda_validator = LambdaValidator(lambda_client) + + # Assert that the original code.s3Bucket and code.s3Key is still part of + # the function's CR + assert cr["spec"]["code"]["s3Bucket"] == resources.FunctionsBucket.name + assert cr["spec"]["code"]["s3Key"] == LAMBDA_FUNCTION_FILE_ZIP + + # Check Lambda function exists + assert lambda_validator.function_exists(resource_name) + + # Update cr + cr["spec"]["code"]["sha256"] = base64_hash_2 + cr["spec"]["code"]["s3Key"] = LAMBDA_FUNCTION_UPDATED_FILE_ZIP + + # Patch k8s resource + k8s.patch_custom_resource(ref, cr) + time.sleep(UPDATE_WAIT_AFTER_SECONDS) + + # Check function updated fields + function = lambda_validator.get_function(resource_name) + assert function is not None + assert function["Configuration"]["CodeSha256"] == base64_hash_2 + + # Delete k8s resource + _, deleted = k8s.delete_custom_resource(ref) + assert deleted is True + + time.sleep(DELETE_WAIT_AFTER_SECONDS) + + # Check Lambda function doesn't exist + assert not lambda_validator.function_exists(resource_name) + + def test_function_update_code_and_architecture(self, lambda_client): + resource_name = random_suffix_name("functionupdatecode", 24) + + resources = get_bootstrap_resources() + logging.debug(resources) + + archive_1 = open(LAMBDA_FUNCTION_FILE_PATH_ZIP, 'rb') + readFile_1 = archive_1.read() + hash_1 = hashlib.sha256(readFile_1) + binary_hash_1 = hash_1.digest() + base64_hash_1 = base64.b64encode(binary_hash_1).decode('utf-8') + + archive_2 = open(LAMBDA_FUNCTION_UPDATED_FILE_PATH_ZIP, 'rb') + readFile_2 = archive_2.read() + hash_2 = hashlib.sha256(readFile_2) + binary_hash_2 = hash_2.digest() + base64_hash_2 = base64.b64encode(binary_hash_2).decode('utf-8') + + replacements = REPLACEMENT_VALUES.copy() + replacements["FUNCTION_NAME"] = resource_name + replacements["BUCKET_NAME"] = resources.FunctionsBucket.name + replacements["LAMBDA_ROLE"] = resources.BasicRole.arn + replacements["LAMBDA_FILE_NAME"] = LAMBDA_FUNCTION_FILE_ZIP + replacements["RESERVED_CONCURRENT_EXECUTIONS"] = "0" + replacements["CODE_SIGNING_CONFIG_ARN"] = "" + replacements["AWS_REGION"] = get_region() + replacements["ARCHITECTURES"] = 'x86_64' + replacements["HASH"] = base64_hash_1 + + # Load Lambda CR + resource_data = load_lambda_resource( + "function_code_s3", + additional_replacements=replacements, + ) + logging.debug(resource_data) + + # Create k8s resource + ref = k8s.CustomResourceReference( + CRD_GROUP, CRD_VERSION, RESOURCE_PLURAL, + resource_name, namespace="default", + ) + k8s.create_custom_resource(ref, resource_data) + cr = k8s.wait_resource_consumed_by_controller(ref) + + assert cr is not None + assert k8s.get_resource_exists(ref) + + time.sleep(CREATE_WAIT_AFTER_SECONDS) + + cr = k8s.wait_resource_consumed_by_controller(ref) + + lambda_validator = LambdaValidator(lambda_client) + + # Assert that the original code.s3Bucket and code.s3Key is still part of + # the function's CR + assert cr["spec"]["code"]["s3Bucket"] == resources.FunctionsBucket.name + assert cr["spec"]["code"]["s3Key"] == LAMBDA_FUNCTION_FILE_ZIP + + # Check Lambda function exists + assert lambda_validator.function_exists(resource_name) + + # Update cr + cr["spec"]["code"]["sha256"] = base64_hash_2 + cr["spec"]["code"]["s3Key"] = LAMBDA_FUNCTION_UPDATED_FILE_ZIP + cr["spec"]["architectures"] = ['arm64'] + + # Patch k8s resource + k8s.patch_custom_resource(ref, cr) + time.sleep(UPDATE_WAIT_AFTER_SECONDS) + + # Check function updated fields + function = lambda_validator.get_function(resource_name) + assert function is not None + assert function["Configuration"]["CodeSha256"] == base64_hash_2 + assert function["Configuration"]["Architectures"] == ['arm64'] + + # Delete k8s resource + _, deleted = k8s.delete_custom_resource(ref) + assert deleted is True + + time.sleep(DELETE_WAIT_AFTER_SECONDS) + # Check Lambda function doesn't exist assert not lambda_validator.function_exists(resource_name) \ No newline at end of file