From 55e99d3287cfb1a4f860145dad71a71d0bd5cfd2 Mon Sep 17 00:00:00 2001 From: fred Date: Tue, 17 Dec 2024 10:46:08 -0800 Subject: [PATCH] Adding ARM Support to build (#81) * Adding ARM Support (#10) Adding ARM support to build script * addressing ci --- package.py | 48 ++++++++++++++++++++++----------------- serverless/template.yml | 7 +++--- timestream/client.go | 15 ++++++++---- timestream/client_test.go | 17 ++++---------- 4 files changed, 45 insertions(+), 42 deletions(-) diff --git a/package.py b/package.py index bb72c8a..489eb76 100644 --- a/package.py +++ b/package.py @@ -42,30 +42,30 @@ def create_directory(dir_name): copy_tree("documentation", dir_name + "/documentation") -def run_build(target_bin): +def run_build(target_bin, arch): """ Compiles a binary for the target OS. :type target_bin: str :param target_bin: The target OS for the binary. + :type arch: str + :param arch: The target architecture. :return: The name of the binary. """ - os.environ['GOOS'] = target_bin - arch = "amd64" - os.environ['GOARCH'] = arch - - if os.getenv('GOOS') is None or os.getenv('GOARCH') is None: - logging.error("Environment variables GOOS or GOARCH are not set.") + if not target_bin or not arch: + logging.error("Target binary or architecture not specified.") return None # Required for Lambda runtime platform.al2023 file_name = "bootstrap" - build_command = "go build -o {}/{}".format(target_bin, file_name) + build_command = f"GOOS={target_bin} GOARCH={arch} go build -o {target_bin}/{file_name}" if target_bin == "windows": build_command += ".exe" - logging.debug("Compiling binary for {} with command: {}".format(target_bin, build_command)) - subprocess.Popen(build_command, shell=True, stdout=subprocess.PIPE) + logging.debug("Compiling binary for {}-{} with command: {}".format(target_bin, arch, build_command)) + + process = subprocess.Popen(build_command, shell=True, stdout=subprocess.PIPE) + process.communicate() return file_name @@ -111,19 +111,22 @@ def zip_dir(file_name): shutil.make_archive(file_name, 'zip', "linux") -def package_sam_template(linux_bin_name, source_dir, version): +def package_sam_template(linux_bin_name, arch, source_dir, version): """ Package all relevant artifacts for serverless deployment in a tarball. :type linux_bin_name: str :param linux_bin_name: The name of the precompiled binary for Linux. + :type arch: str + :param arch: The target architecture. :type source_dir: str :param source_dir: The directory containing the SAM template and its documentation. :type version: str :param version: The artifact version. :return: None """ - tarfile_name = "timestream-prometheus-connector-serverless-application-{version}.tar.gz".format(version=version) + tarfile_name = "timestream-prometheus-connector-serverless-application-{arch}-{version}.tar.gz".format( + arch=arch, version=version) linux_zip = "{file_name}.zip".format(file_name=linux_bin_name) with tarfile.open(tarfile_name, "w:gz") as tar: @@ -149,24 +152,25 @@ def tar_dir(file_name, dir_name): subprocess.Popen(tar_command, shell=True, stdout=subprocess.PIPE) -def create_tarball(target_folder, version): +def create_tarball(target_folder, arch, version): """ Create a tarball containing a precompiled binary and all documentation. :type target_folder: str :param target_folder: The temporary folder containing the precompiled binary and all documentation. + :type arch: str + :param arch: The target architecture. :type version: str :param version: The version of the Prometheus Connector. :return: The name of the precompiled binary. """ create_directory(target_folder) - bin_name = run_build(target_folder) + bin_name = run_build(target_folder, arch) if bin_name is None: logging.error("Cannot create binary for packaging.") return check_binary(target_folder, bin_name) - arch = "amd64" archive_name = "timestream-prometheus-connector-{}-{}-{}".format(target_folder, arch, version) tar_dir(archive_name, target_folder) return archive_name @@ -179,15 +183,17 @@ def create_tarball(target_folder, version): connector_version = args.version logging.basicConfig(level=logging.INFO) - targets = ["windows", "darwin"] - linux_file_name = "" + targets = ["windows", "darwin", "linux"] + archs = ["amd64", "arm64"] + try: for target in targets: - create_tarball(target, connector_version) + for arch in archs: + bin_name = create_tarball(target, arch, connector_version) + if target == "linux": + zip_dir(bin_name) + package_sam_template(bin_name, arch, "./serverless", connector_version) - bin_name = create_tarball("linux", connector_version) - zip_dir(bin_name) - package_sam_template(bin_name, "./serverless", connector_version) logging.info("Done running script.") except OSError: diff --git a/serverless/template.yml b/serverless/template.yml index 6bdcd07..f8b1bb7 100644 --- a/serverless/template.yml +++ b/serverless/template.yml @@ -91,7 +91,7 @@ Resources: - "logs:CreateLogStream" - "logs:PutLogEvents" Resource: - Fn::Sub: "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/PrometheusTimestreamConnector-LambdaFunction-*:*" + Fn::Sub: "arn:aws:logs:${AWS::Region}:${AWS::AccountId}:log-group:/aws/lambda/${AWS::StackName}-LambdaFunction-*:*" - Effect: "Allow" Action: - "timestream:DescribeEndpoints" @@ -106,14 +106,15 @@ Resources: Type: "AWS::Serverless::Function" Properties: Role: !GetAtt IAMLambdaRole.Arn - CodeUri: + CodeUri: Bucket: !Sub 'timestreamassets-${AWS::Region}' - Key: "timestream-prometheus-connector/timestream-prometheus-connector-linux-amd64-1.3.0.zip" + Key: "timestream-prometheus-connector/timestream-prometheus-connector-linux-arm64-1.3.0.zip" Description: "Prometheus remote storage connector for Amazon Timestream" Handler: "bootstrap" MemorySize: !Ref MemorySize Timeout: !Ref LambdaTimeoutInSeconds Runtime: "provided.al2023" + Architectures: ["arm64"] Environment: Variables: default_database: !Ref DefaultDatabase diff --git a/timestream/client.go b/timestream/client.go index dc02073..6557638 100644 --- a/timestream/client.go +++ b/timestream/client.go @@ -580,14 +580,14 @@ func (qc *QueryClient) buildCommands(queries []*prompb.Query) ([]*timestreamquer switch matcher.Type { case prompb.LabelMatcher_EQ: - matchers = append(matchers, fmt.Sprintf("%s = '%s'", matcherName, matcher.Value)) + matchers = append(matchers, fmt.Sprintf("\"%s\" = '%s'", matcherName, matcher.Value)) case prompb.LabelMatcher_NEQ: - matchers = append(matchers, fmt.Sprintf("%s != '%s'", matcherName, matcher.Value)) + matchers = append(matchers, fmt.Sprintf("\"%s\" != '%s'", matcherName, matcher.Value)) case prompb.LabelMatcher_RE: - matchers = append(matchers, fmt.Sprintf("REGEXP_LIKE(%s, '%s')", matcherName, matcher.Value)) + matchers = append(matchers, fmt.Sprintf("REGEXP_LIKE(\"%s\", '%s')", matcherName, matcher.Value)) isRelatedToRegex = true case prompb.LabelMatcher_NRE: - matchers = append(matchers, fmt.Sprintf("NOT REGEXP_LIKE(%s, '%s')", matcherName, matcher.Value)) + matchers = append(matchers, fmt.Sprintf("NOT REGEXP_LIKE(\"%s\", '%s')", matcherName, matcher.Value)) isRelatedToRegex = true default: err := errors.NewUnknownMatcherError() @@ -615,7 +615,12 @@ func (qc *QueryClient) buildCommands(queries []*prompb.Query) ([]*timestreamquer } timestreamQueries = append(timestreamQueries, ×treamquery.QueryInput{ - QueryString: aws.String(fmt.Sprintf("SELECT * FROM %s.%s WHERE %v", qc.client.defaultDataBase, qc.client.defaultTable, strings.Join(matchers, " AND "))), + QueryString: aws.String(fmt.Sprintf( + "SELECT * FROM \"%s\".\"%s\" WHERE %s", + qc.client.defaultDataBase, + qc.client.defaultTable, + strings.Join(matchers, " AND "), + )), }) } diff --git a/timestream/client_test.go b/timestream/client_test.go index 794e033..cd68521 100644 --- a/timestream/client_test.go +++ b/timestream/client_test.go @@ -167,7 +167,7 @@ func TestQueryClientRead(t *testing.T) { } queryInput := ×treamquery.QueryInput{ - QueryString: aws.String(fmt.Sprintf("SELECT * FROM %s.%s WHERE %s = '%s' AND %s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", + QueryString: aws.String(fmt.Sprintf("SELECT * FROM \"%s\".\"%s\" WHERE \"%s\" = '%s' AND %s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", mockDatabaseName, mockTableName, measureNameColumnName, metricName, timeColumnName, startUnixInSeconds, endUnixInSeconds)), } @@ -264,7 +264,7 @@ func TestQueryClientRead(t *testing.T) { expectedBuildCommand := []*timestreamquery.QueryInput{ { - QueryString: aws.String(fmt.Sprintf("SELECT * FROM %s.%s WHERE %s = '%s' AND quantile != '%s' AND REGEXP_LIKE(job, '%s') AND NOT REGEXP_LIKE(instance, '%s') AND %s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", + QueryString: aws.String(fmt.Sprintf("SELECT * FROM \"%s\".\"%s\" WHERE \"%s\" = '%s' AND \"quantile\" != '%s' AND REGEXP_LIKE(\"job\", '%s') AND NOT REGEXP_LIKE(\"instance\", '%s') AND %s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", mockDatabaseName, mockTableName, measureNameColumnName, metricName, quantile, jobRegex, instanceRegex, timeColumnName, startUnixInSeconds, endUnixInSeconds)), }, } @@ -297,17 +297,8 @@ func TestQueryClientRead(t *testing.T) { } queryInputWithInvalidRegex := ×treamquery.QueryInput{ - QueryString: aws.String(fmt.Sprintf( - "SELECT * FROM %s.%s WHERE %s = '%s' AND REGEXP_LIKE(job, '%s') AND %s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", - mockDatabaseName, - mockTableName, - measureNameColumnName, - metricName, - invalidRegex, - timeColumnName, - startUnixInSeconds, - endUnixInSeconds, - )), + QueryString: aws.String(fmt.Sprintf("SELECT * FROM \"%s\".\"%s\" WHERE \"%s\" = '%s' AND REGEXP_LIKE(\"job\", '%s') AND %s BETWEEN FROM_UNIXTIME(%d) AND FROM_UNIXTIME(%d)", + mockDatabaseName, mockTableName, measureNameColumnName, metricName, invalidRegex, timeColumnName, startUnixInSeconds, endUnixInSeconds)), } t.Run("success", func(t *testing.T) {