From 12510ff657b7b9e2ea4a40c24529fad5285fac83 Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Tue, 9 Jan 2024 08:49:23 +0100 Subject: [PATCH 1/9] map paths when upgrading using .tar.gz packages --- .../agent/application/upgrade/step_unpack.go | 109 +++++++++++++++++- 1 file changed, 108 insertions(+), 1 deletion(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 4f5b0bd7440..d72b5f9aa1a 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -7,6 +7,7 @@ package upgrade import ( "archive/tar" "archive/zip" + "bytes" "compress/gzip" "fmt" "io" @@ -19,6 +20,7 @@ import ( "github.com/hashicorp/go-multierror" "github.com/elastic/elastic-agent/internal/pkg/agent/errors" + v1 "github.com/elastic/elastic-agent/pkg/api/v1" "github.com/elastic/elastic-agent/pkg/core/logger" ) @@ -53,6 +55,22 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (string, error) { fileNamePrefix := strings.TrimSuffix(filepath.Base(archivePath), ".zip") + "/" // omitting `elastic-agent-{version}-{os}-{arch}/` in filename + pm := pathMapper{} + manifestFile, err := r.Open("manifest.yaml") + if err != nil && !errors.Is(err, fs.ErrNotExist) { + // we got a real error looking up for the manifest + return "", fmt.Errorf("looking up manifest in package: %w", err) + } + if err == nil { + // load manifest + defer manifestFile.Close() + manifest, err := v1.ParseManifest(manifestFile) + if err != nil { + return "", fmt.Errorf("parsing package manifest: %w", err) + } + pm.mappings = manifest.Package.PathMappings + } + unpackFile := func(f *zip.File) (err error) { rc, err := f.Open() if err != nil { @@ -127,6 +145,27 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (string, error) { } func untar(log *logger.Logger, version string, archivePath, dataDir string) (string, error) { + + // Look up manifest in the archive and prepare path mappings, if any + pm := pathMapper{} + + // quickly open the archive and look up manifest.yaml file + manifestReader, err := getManifestFromTar(archivePath) + + if err != nil { + return "", fmt.Errorf("looking for package manifest: %w", err) + } + + if manifestReader != nil { + manifest, err := v1.ParseManifest(manifestReader) + if err != nil { + return "", fmt.Errorf("parsing package manifest: %w", err) + } + + // set the path mappings + pm.mappings = manifest.Package.PathMappings + } + r, err := os.Open(archivePath) if err != nil { return "", errors.New(fmt.Sprintf("artifact for 'elastic-agent' version '%s' could not be found at '%s'", version, archivePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, archivePath)) @@ -141,7 +180,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str tr := tar.NewReader(zr) var rootDir string var hash string - fileNamePrefix := strings.TrimSuffix(filepath.Base(archivePath), ".tar.gz") + "/" // omitting `elastic-agent-{version}-{os}-{arch}/` in filename + fileNamePrefix := getFileNamePrefix(archivePath) // go through all the content of a tar archive // if elastic-agent.active.commit file is found, get commit of the version unpacked @@ -173,6 +212,12 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str continue } + // map the filename + fileName = pm.Map(fileName) + + // we should check that the path is a local one but since we discard anything that is not under "data/" we can + // skip the additional check + // skip everything outside data/ if !strings.HasPrefix(fileName, "data/") { continue @@ -236,9 +281,71 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str return hash, nil } +func getFileNamePrefix(archivePath string) string { + return strings.TrimSuffix(filepath.Base(archivePath), ".tar.gz") + "/" // omitting `elastic-agent-{version}-{os}-{arch}/` in filename +} + func validFileName(p string) bool { if p == "" || strings.Contains(p, `\`) || strings.HasPrefix(p, "/") || strings.Contains(p, "../") { return false } return true } + +type pathMapper struct { + mappings []map[string]string +} + +func (pm pathMapper) Map(path string) string { + for _, mapping := range pm.mappings { + for pkgPath, mappedPath := range mapping { + if strings.HasPrefix(path, pkgPath) { + return filepath.Join(mappedPath, path[len(pkgPath):]) + } + } + } + return path +} + +func getManifestFromTar(archivePath string) (io.Reader, error) { + r, err := os.Open(archivePath) + if err != nil { + return nil, fmt.Errorf("opening package %s: %w", archivePath, err) + } + defer r.Close() + + zr, err := gzip.NewReader(r) + if err != nil { + return nil, fmt.Errorf("package %s does not seem to have a valid gzip compression: %w", archivePath, err) + } + + tr := tar.NewReader(zr) + prefix := getFileNamePrefix(archivePath) + + // go through all the content of a tar archive + // if manifest.yaml is found, read the contents and return a bytereader, nil otherwise , + for { + f, err := tr.Next() + if errors.Is(err, io.EOF) { + break + } + + if err != nil { + return nil, fmt.Errorf("reading archive: %w", err) + } + + fileName := strings.TrimPrefix(f.Name, prefix) + if fileName == "manifest.yaml" { + manifestBytes, err := io.ReadAll(tr) + if err != nil { + return nil, fmt.Errorf("reading manifest bytes: %w", err) + } + + reader := bytes.NewReader(manifestBytes) + return reader, nil + } + + } + + return nil, nil +} From 3d8a1ce1e5d4a0287e6dd9b6b30c52542cd1f4fa Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Wed, 10 Jan 2024 11:16:36 +0100 Subject: [PATCH 2/9] return structured output from unpack step during upgrade --- .../agent/application/upgrade/step_unpack.go | 70 ++++++++++++------- 1 file changed, 45 insertions(+), 25 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index d72b5f9aa1a..730ce7d7d8c 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -24,8 +24,15 @@ import ( "github.com/elastic/elastic-agent/pkg/core/logger" ) +type unpackResult struct { + hash string + // TODO add mapped path of executable + // agentExecutable string + versionedHome string +} + // unpack unpacks archive correctly, skips root (symlink, config...) unpacks data/* -func (u *Upgrader) unpack(version, archivePath, dataDir string) (string, error) { +func (u *Upgrader) unpack(version, archivePath, dataDir string) (unpackResult, error) { // unpack must occur in directory that holds the installation directory // or the extraction will be double nested var hash string @@ -38,37 +45,42 @@ func (u *Upgrader) unpack(version, archivePath, dataDir string) (string, error) if err != nil { u.log.Errorw("Failed to unpack upgrade artifact", "error.message", err, "version", version, "file.path", archivePath, "hash", hash) - return "", err + return unpackResult{}, err } u.log.Infow("Unpacked upgrade artifact", "version", version, "file.path", archivePath, "hash", hash) - return hash, nil + return unpackResult{ + hash: hash, + versionedHome: "", + }, nil } -func unzip(log *logger.Logger, archivePath, dataDir string) (string, error) { +func unzip(log *logger.Logger, archivePath, dataDir string) (unpackResult, error) { var hash, rootDir string r, err := zip.OpenReader(archivePath) if err != nil { - return "", err + return unpackResult{}, err } defer r.Close() fileNamePrefix := strings.TrimSuffix(filepath.Base(archivePath), ".zip") + "/" // omitting `elastic-agent-{version}-{os}-{arch}/` in filename pm := pathMapper{} + versionedHome := "" manifestFile, err := r.Open("manifest.yaml") if err != nil && !errors.Is(err, fs.ErrNotExist) { // we got a real error looking up for the manifest - return "", fmt.Errorf("looking up manifest in package: %w", err) + return unpackResult{}, fmt.Errorf("looking up manifest in package: %w", err) } if err == nil { // load manifest defer manifestFile.Close() manifest, err := v1.ParseManifest(manifestFile) if err != nil { - return "", fmt.Errorf("parsing package manifest: %w", err) + return unpackResult{}, fmt.Errorf("parsing package manifest: %w", err) } pm.mappings = manifest.Package.PathMappings + versionedHome = filepath.Clean(manifest.Package.VersionedHome) } unpackFile := func(f *zip.File) (err error) { @@ -137,14 +149,17 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (string, error) { } if err := unpackFile(f); err != nil { - return "", err + return unpackResult{}, err } } - return hash, nil + return unpackResult{ + hash: hash, + versionedHome: "", + }, nil } -func untar(log *logger.Logger, version string, archivePath, dataDir string) (string, error) { +func untar(log *logger.Logger, version string, archivePath, dataDir string) (unpackResult, error) { // Look up manifest in the archive and prepare path mappings, if any pm := pathMapper{} @@ -153,28 +168,30 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str manifestReader, err := getManifestFromTar(archivePath) if err != nil { - return "", fmt.Errorf("looking for package manifest: %w", err) + return unpackResult{}, fmt.Errorf("looking for package manifest: %w", err) } + versionedHome := "" if manifestReader != nil { manifest, err := v1.ParseManifest(manifestReader) if err != nil { - return "", fmt.Errorf("parsing package manifest: %w", err) + return unpackResult{}, fmt.Errorf("parsing package manifest: %w", err) } // set the path mappings pm.mappings = manifest.Package.PathMappings + versionedHome = filepath.Clean(manifest.Package.VersionedHome) } r, err := os.Open(archivePath) if err != nil { - return "", errors.New(fmt.Sprintf("artifact for 'elastic-agent' version '%s' could not be found at '%s'", version, archivePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, archivePath)) + return unpackResult{}, errors.New(fmt.Sprintf("artifact for 'elastic-agent' version '%s' could not be found at '%s'", version, archivePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, archivePath)) } defer r.Close() zr, err := gzip.NewReader(r) if err != nil { - return "", errors.New("requires gzip-compressed body", err, errors.TypeFilesystem) + return unpackResult{}, errors.New("requires gzip-compressed body", err, errors.TypeFilesystem) } tr := tar.NewReader(zr) @@ -192,11 +209,11 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str break } if err != nil { - return "", err + return unpackResult{}, err } if !validFileName(f.Name) { - return "", errors.New("tar contained invalid filename: %q", f.Name, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, f.Name)) + return unpackResult{}, errors.New("tar contained invalid filename: %q", f.Name, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, f.Name)) } //get hash @@ -205,7 +222,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str if fileName == agentCommitFile { hashBytes, err := io.ReadAll(tr) if err != nil || len(hashBytes) < hashLen { - return "", err + return unpackResult{}, err } hash = string(hashBytes[:hashLen]) @@ -239,13 +256,13 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str // just to be sure, it should already be created by Dir type // remove any world permissions from the directory if err = os.MkdirAll(filepath.Dir(abs), 0o750); err != nil { - return "", errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return unpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } // remove any world permissions from the file wf, err := os.OpenFile(abs, os.O_RDWR|os.O_CREATE|os.O_TRUNC, mode.Perm()&0770) if err != nil { - return "", errors.New(err, "TarInstaller: creating file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return unpackResult{}, errors.New(err, "TarInstaller: creating file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } //nolint:gosec // legacy @@ -254,7 +271,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str err = closeErr } if err != nil { - return "", fmt.Errorf("TarInstaller: error writing to %s: %w", abs, err) + return unpackResult{}, fmt.Errorf("TarInstaller: error writing to %s: %w", abs, err) } case mode.IsDir(): log.Debugw("Unpacking directory", "archive", "tar", "file.path", abs) @@ -262,23 +279,26 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (str _, err = os.Stat(abs) if errors.Is(err, fs.ErrNotExist) { if err := os.MkdirAll(abs, mode.Perm()&0770); err != nil { - return "", errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return unpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } else if err != nil { - return "", errors.New(err, "TarInstaller: stat() directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return unpackResult{}, errors.New(err, "TarInstaller: stat() directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } else { // set the appropriate permissions err = os.Chmod(abs, mode.Perm()&0o770) if err != nil { - return "", errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0o770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return unpackResult{}, errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0o770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } default: - return "", errors.New(fmt.Sprintf("tar file entry %s contained unsupported file type %v", fileName, mode), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fileName)) + return unpackResult{}, errors.New(fmt.Sprintf("tar file entry %s contained unsupported file type %v", fileName, mode), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fileName)) } } - return hash, nil + return unpackResult{ + hash: hash, + versionedHome: versionedHome, + }, nil } func getFileNamePrefix(archivePath string) string { From 097d0ad5e708e6ea616bce3adefa39400c375fc7 Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Wed, 10 Jan 2024 18:10:46 +0100 Subject: [PATCH 3/9] use structured output from unpack to identify old and new agents directories --- .../agent/application/upgrade/step_relink.go | 11 ++- .../agent/application/upgrade/step_unpack.go | 77 +++++++++---------- .../pkg/agent/application/upgrade/upgrade.go | 35 ++++++--- 3 files changed, 70 insertions(+), 53 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_relink.go b/internal/pkg/agent/application/upgrade/step_relink.go index 13c49693062..f272083bfe9 100644 --- a/internal/pkg/agent/application/upgrade/step_relink.go +++ b/internal/pkg/agent/application/upgrade/step_relink.go @@ -32,21 +32,26 @@ func ChangeSymlink(ctx context.Context, log *logger.Logger, targetHash string) e // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. newPath := paths.BinaryPath(filepath.Join(paths.Top(), "data", hashedDir), agentName) + return changeSymlinkInternal(log, symlinkPath, newPath) +} + +func changeSymlinkInternal(log *logger.Logger, symlinkPath, newTarget string) error { + // handle windows suffixes if runtime.GOOS == windows { symlinkPath += exe - newPath += exe + newTarget += exe } prevNewPath := prevSymlinkPath() - log.Infow("Changing symlink", "symlink_path", symlinkPath, "new_path", newPath, "prev_path", prevNewPath) + log.Infow("Changing symlink", "symlink_path", symlinkPath, "new_path", newTarget, "prev_path", prevNewPath) // remove symlink to avoid upgrade failures if err := os.Remove(prevNewPath); !os.IsNotExist(err) { return err } - if err := os.Symlink(newPath, prevNewPath); err != nil { + if err := os.Symlink(newTarget, prevNewPath); err != nil { return errors.New(err, errors.TypeFilesystem, "failed to update agent symlink") } diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 730ce7d7d8c..e7b9b6854a9 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -24,42 +24,39 @@ import ( "github.com/elastic/elastic-agent/pkg/core/logger" ) -type unpackResult struct { - hash string +type UnpackResult struct { + Hash string `json:"hash" yaml:"hash"` // TODO add mapped path of executable // agentExecutable string - versionedHome string + versionedHome string `json:"versioned-home" yaml:"versioned-home"` } // unpack unpacks archive correctly, skips root (symlink, config...) unpacks data/* -func (u *Upgrader) unpack(version, archivePath, dataDir string) (unpackResult, error) { +func (u *Upgrader) unpack(version, archivePath, dataDir string) (UnpackResult, error) { // unpack must occur in directory that holds the installation directory // or the extraction will be double nested - var hash string + var unpackRes UnpackResult var err error if runtime.GOOS == windows { - hash, err = unzip(u.log, archivePath, dataDir) + unpackRes, err = unzip(u.log, archivePath, dataDir) } else { - hash, err = untar(u.log, version, archivePath, dataDir) + unpackRes, err = untar(u.log, version, archivePath, dataDir) } if err != nil { - u.log.Errorw("Failed to unpack upgrade artifact", "error.message", err, "version", version, "file.path", archivePath, "hash", hash) - return unpackResult{}, err + u.log.Errorw("Failed to unpack upgrade artifact", "error.message", err, "version", version, "file.path", archivePath, "unpackResult", unpackRes) + return UnpackResult{}, err } - u.log.Infow("Unpacked upgrade artifact", "version", version, "file.path", archivePath, "hash", hash) - return unpackResult{ - hash: hash, - versionedHome: "", - }, nil + u.log.Infow("Unpacked upgrade artifact", "version", version, "file.path", archivePath, "unpackResult", unpackRes) + return unpackRes, nil } -func unzip(log *logger.Logger, archivePath, dataDir string) (unpackResult, error) { +func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error) { var hash, rootDir string r, err := zip.OpenReader(archivePath) if err != nil { - return unpackResult{}, err + return UnpackResult{}, err } defer r.Close() @@ -70,17 +67,17 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (unpackResult, error manifestFile, err := r.Open("manifest.yaml") if err != nil && !errors.Is(err, fs.ErrNotExist) { // we got a real error looking up for the manifest - return unpackResult{}, fmt.Errorf("looking up manifest in package: %w", err) + return UnpackResult{}, fmt.Errorf("looking up manifest in package: %w", err) } if err == nil { // load manifest defer manifestFile.Close() manifest, err := v1.ParseManifest(manifestFile) if err != nil { - return unpackResult{}, fmt.Errorf("parsing package manifest: %w", err) + return UnpackResult{}, fmt.Errorf("parsing package manifest: %w", err) } pm.mappings = manifest.Package.PathMappings - versionedHome = filepath.Clean(manifest.Package.VersionedHome) + versionedHome = filepath.Clean(pm.Map(manifest.Package.VersionedHome)) } unpackFile := func(f *zip.File) (err error) { @@ -149,17 +146,17 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (unpackResult, error } if err := unpackFile(f); err != nil { - return unpackResult{}, err + return UnpackResult{}, err } } - return unpackResult{ - hash: hash, - versionedHome: "", + return UnpackResult{ + Hash: hash, + versionedHome: versionedHome, }, nil } -func untar(log *logger.Logger, version string, archivePath, dataDir string) (unpackResult, error) { +func untar(log *logger.Logger, version string, archivePath, dataDir string) (UnpackResult, error) { // Look up manifest in the archive and prepare path mappings, if any pm := pathMapper{} @@ -168,14 +165,14 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (unp manifestReader, err := getManifestFromTar(archivePath) if err != nil { - return unpackResult{}, fmt.Errorf("looking for package manifest: %w", err) + return UnpackResult{}, fmt.Errorf("looking for package manifest: %w", err) } versionedHome := "" if manifestReader != nil { manifest, err := v1.ParseManifest(manifestReader) if err != nil { - return unpackResult{}, fmt.Errorf("parsing package manifest: %w", err) + return UnpackResult{}, fmt.Errorf("parsing package manifest: %w", err) } // set the path mappings @@ -185,13 +182,13 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (unp r, err := os.Open(archivePath) if err != nil { - return unpackResult{}, errors.New(fmt.Sprintf("artifact for 'elastic-agent' version '%s' could not be found at '%s'", version, archivePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, archivePath)) + return UnpackResult{}, errors.New(fmt.Sprintf("artifact for 'elastic-agent' version '%s' could not be found at '%s'", version, archivePath), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, archivePath)) } defer r.Close() zr, err := gzip.NewReader(r) if err != nil { - return unpackResult{}, errors.New("requires gzip-compressed body", err, errors.TypeFilesystem) + return UnpackResult{}, errors.New("requires gzip-compressed body", err, errors.TypeFilesystem) } tr := tar.NewReader(zr) @@ -209,11 +206,11 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (unp break } if err != nil { - return unpackResult{}, err + return UnpackResult{}, err } if !validFileName(f.Name) { - return unpackResult{}, errors.New("tar contained invalid filename: %q", f.Name, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, f.Name)) + return UnpackResult{}, errors.New("tar contained invalid filename: %q", f.Name, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, f.Name)) } //get hash @@ -222,7 +219,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (unp if fileName == agentCommitFile { hashBytes, err := io.ReadAll(tr) if err != nil || len(hashBytes) < hashLen { - return unpackResult{}, err + return UnpackResult{}, err } hash = string(hashBytes[:hashLen]) @@ -256,13 +253,13 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (unp // just to be sure, it should already be created by Dir type // remove any world permissions from the directory if err = os.MkdirAll(filepath.Dir(abs), 0o750); err != nil { - return unpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return UnpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } // remove any world permissions from the file wf, err := os.OpenFile(abs, os.O_RDWR|os.O_CREATE|os.O_TRUNC, mode.Perm()&0770) if err != nil { - return unpackResult{}, errors.New(err, "TarInstaller: creating file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return UnpackResult{}, errors.New(err, "TarInstaller: creating file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } //nolint:gosec // legacy @@ -271,7 +268,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (unp err = closeErr } if err != nil { - return unpackResult{}, fmt.Errorf("TarInstaller: error writing to %s: %w", abs, err) + return UnpackResult{}, fmt.Errorf("TarInstaller: error writing to %s: %w", abs, err) } case mode.IsDir(): log.Debugw("Unpacking directory", "archive", "tar", "file.path", abs) @@ -279,24 +276,24 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (unp _, err = os.Stat(abs) if errors.Is(err, fs.ErrNotExist) { if err := os.MkdirAll(abs, mode.Perm()&0770); err != nil { - return unpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return UnpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } else if err != nil { - return unpackResult{}, errors.New(err, "TarInstaller: stat() directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return UnpackResult{}, errors.New(err, "TarInstaller: stat() directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } else { // set the appropriate permissions err = os.Chmod(abs, mode.Perm()&0o770) if err != nil { - return unpackResult{}, errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0o770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return UnpackResult{}, errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0o770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } default: - return unpackResult{}, errors.New(fmt.Sprintf("tar file entry %s contained unsupported file type %v", fileName, mode), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fileName)) + return UnpackResult{}, errors.New(fmt.Sprintf("tar file entry %s contained unsupported file type %v", fileName, mode), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, fileName)) } } - return unpackResult{ - hash: hash, + return UnpackResult{ + Hash: hash, versionedHome: versionedHome, }, nil } diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 46cf449f963..e898aa9d25d 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -169,32 +169,50 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string } det.SetState(details.StateExtracting) - - newHash, err := u.unpack(version, archivePath, paths.Data()) + // TODO: add check that no archive files end up in the current versioned home + unpackRes, err := u.unpack(version, archivePath, paths.Data()) if err != nil { return nil, err } + newHash := unpackRes.Hash if newHash == "" { return nil, errors.New("unknown hash") } + if unpackRes.versionedHome == "" { + // FIXME this should be treated as an error + return nil, fmt.Errorf("versionedhome is empty: %v", unpackRes) + } if strings.HasPrefix(release.Commit(), newHash) { u.log.Warn("Upgrade action skipped: upgrade did not occur because its the same version") return nil, nil } - if err := copyActionStore(u.log, newHash); err != nil { + newHome := filepath.Join(paths.Top(), unpackRes.versionedHome) + + if err := copyActionStore(u.log, newHome); err != nil { return nil, errors.New(err, "failed to copy action store") } - if err := copyRunDirectory(u.log, newHash); err != nil { + newRunPath := filepath.Join(newHome, "run") + oldRunPath := filepath.Join(paths.Home(), "run") + + if err := copyRunDirectory(u.log, oldRunPath, newRunPath); err != nil { return nil, errors.New(err, "failed to copy run directory") } det.SetState(details.StateReplacing) - if err := ChangeSymlink(ctx, u.log, newHash); err != nil { + // create symlink to the /elastic-agent + hashedDir := unpackRes.versionedHome + + symlinkPath := filepath.Join(paths.Top(), agentName) + + // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. + newPath := paths.BinaryPath(filepath.Join(paths.Top(), hashedDir), agentName) + + if err := changeSymlinkInternal(u.log, symlinkPath, newPath); err != nil { u.log.Errorw("Rolling back: changing symlink failed", "error.message", err) rollbackInstall(ctx, u.log, newHash) return nil, err @@ -274,10 +292,9 @@ func rollbackInstall(ctx context.Context, log *logger.Logger, hash string) { _ = ChangeSymlink(ctx, log, release.ShortCommit()) } -func copyActionStore(log *logger.Logger, newHash string) error { +func copyActionStore(log *logger.Logger, newHome string) error { // copies legacy action_store.yml, state.yml and state.enc encrypted file if exists storePaths := []string{paths.AgentActionStoreFile(), paths.AgentStateStoreYmlFile(), paths.AgentStateStoreFile()} - newHome := filepath.Join(filepath.Dir(paths.Home()), fmt.Sprintf("%s-%s", agentName, newHash)) log.Infow("Copying action store", "new_home_path", newHome) for _, currentActionStorePath := range storePaths { @@ -300,9 +317,7 @@ func copyActionStore(log *logger.Logger, newHash string) error { return nil } -func copyRunDirectory(log *logger.Logger, newHash string) error { - newRunPath := filepath.Join(filepath.Dir(paths.Home()), fmt.Sprintf("%s-%s", agentName, newHash), "run") - oldRunPath := filepath.Join(filepath.Dir(paths.Home()), fmt.Sprintf("%s-%s", agentName, release.ShortCommit()), "run") +func copyRunDirectory(log *logger.Logger, oldRunPath, newRunPath string) error { log.Infow("Copying run directory", "new_run_path", newRunPath, "old_run_path", oldRunPath) From d34895c0c715d96d79fa97f92287ab980c033db5 Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Thu, 11 Jan 2024 11:12:25 +0100 Subject: [PATCH 4/9] copy state directories and rotate symlink correctly --- .../pkg/agent/application/upgrade/step_unpack.go | 8 ++++---- internal/pkg/agent/application/upgrade/upgrade.go | 15 ++++++++------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index e7b9b6854a9..5400368bc4e 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -28,7 +28,7 @@ type UnpackResult struct { Hash string `json:"hash" yaml:"hash"` // TODO add mapped path of executable // agentExecutable string - versionedHome string `json:"versioned-home" yaml:"versioned-home"` + VersionedHome string `json:"versioned-home" yaml:"versioned-home"` } // unpack unpacks archive correctly, skips root (symlink, config...) unpacks data/* @@ -152,7 +152,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error return UnpackResult{ Hash: hash, - versionedHome: versionedHome, + VersionedHome: versionedHome, }, nil } @@ -177,7 +177,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp // set the path mappings pm.mappings = manifest.Package.PathMappings - versionedHome = filepath.Clean(manifest.Package.VersionedHome) + versionedHome = filepath.Clean(pm.Map(manifest.Package.VersionedHome)) } r, err := os.Open(archivePath) @@ -294,7 +294,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp return UnpackResult{ Hash: hash, - versionedHome: versionedHome, + VersionedHome: versionedHome, }, nil } diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index e898aa9d25d..47f0b96600e 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -179,17 +179,18 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string if newHash == "" { return nil, errors.New("unknown hash") } - if unpackRes.versionedHome == "" { + + if unpackRes.VersionedHome == "" { // FIXME this should be treated as an error return nil, fmt.Errorf("versionedhome is empty: %v", unpackRes) } - if strings.HasPrefix(release.Commit(), newHash) { - u.log.Warn("Upgrade action skipped: upgrade did not occur because its the same version") - return nil, nil - } + //if strings.HasPrefix(release.Commit(), newHash) { + // u.log.Warn("Upgrade action skipped: upgrade did not occur because its the same version") + // return nil, nil + //} - newHome := filepath.Join(paths.Top(), unpackRes.versionedHome) + newHome := filepath.Join(paths.Top(), unpackRes.VersionedHome) if err := copyActionStore(u.log, newHome); err != nil { return nil, errors.New(err, "failed to copy action store") @@ -205,7 +206,7 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string det.SetState(details.StateReplacing) // create symlink to the /elastic-agent - hashedDir := unpackRes.versionedHome + hashedDir := unpackRes.VersionedHome symlinkPath := filepath.Join(paths.Top(), agentName) From cd2e9deffb3c9f8a8314b25f40cc994e9fc703bc Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Fri, 12 Jan 2024 10:59:42 +0100 Subject: [PATCH 5/9] set versionedHome in update marker --- .../pkg/agent/application/upgrade/rollback.go | 30 +++++- .../agent/application/upgrade/step_mark.go | 99 ++++++++++++------- .../pkg/agent/application/upgrade/upgrade.go | 18 ++-- internal/pkg/agent/cmd/watch.go | 6 +- 4 files changed, 103 insertions(+), 50 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/rollback.go b/internal/pkg/agent/application/upgrade/rollback.go index bea5c8c4f23..e3f4a4faf90 100644 --- a/internal/pkg/agent/application/upgrade/rollback.go +++ b/internal/pkg/agent/application/upgrade/rollback.go @@ -33,9 +33,20 @@ const ( ) // Rollback rollbacks to previous version which was functioning before upgrade. -func Rollback(ctx context.Context, log *logger.Logger, prevHash string, currentHash string) error { +func Rollback(ctx context.Context, log *logger.Logger, prevVersionedHome, prevHash string, currentHash string) error { + symlinkPath := filepath.Join(paths.Top(), agentName) + + var symlinkTarget string + if prevVersionedHome != "" { + symlinkTarget = paths.BinaryPath(filepath.Join(paths.Top(), prevVersionedHome), agentName) + } else { + // fallback for upgrades that didn't use the manifest and path remapping + hashedDir := fmt.Sprintf("%s-%s", agentName, prevHash) + // paths.BinaryPath properly derives the binary directory depending on the platform. The path to the binary for macOS is inside of the app bundle. + symlinkTarget = paths.BinaryPath(filepath.Join(paths.Top(), "data", hashedDir), agentName) + } // change symlink - if err := ChangeSymlink(ctx, log, prevHash); err != nil { + if err := changeSymlinkInternal(log, symlinkPath, symlinkTarget); err != nil { return err } @@ -51,11 +62,11 @@ func Rollback(ctx context.Context, log *logger.Logger, prevHash string, currentH } // cleanup everything except version we're rolling back into - return Cleanup(log, prevHash, true, true) + return Cleanup(log, prevVersionedHome, prevHash, true, true) } // Cleanup removes all artifacts and files related to a specified version. -func Cleanup(log *logger.Logger, currentHash string, removeMarker bool, keepLogs bool) error { +func Cleanup(log *logger.Logger, currentVersionedHome, currentHash string, removeMarker bool, keepLogs bool) error { log.Infow("Cleaning up upgrade", "hash", currentHash, "remove_marker", removeMarker) <-time.After(afterRestartDelay) @@ -83,7 +94,16 @@ func Cleanup(log *logger.Logger, currentHash string, removeMarker bool, keepLogs _ = os.Remove(prevSymlink) dirPrefix := fmt.Sprintf("%s-", agentName) - currentDir := fmt.Sprintf("%s-%s", agentName, currentHash) + var currentDir string + if currentVersionedHome != "" { + currentDir, err = filepath.Rel("data", currentVersionedHome) + if err != nil { + return fmt.Errorf("extracting elastic-agent path relative to data directory from %s: %w", currentVersionedHome, err) + } + } else { + currentDir = fmt.Sprintf("%s-%s", agentName, currentHash) + } + for _, dir := range subdirs { if dir == currentDir { continue diff --git a/internal/pkg/agent/application/upgrade/step_mark.go b/internal/pkg/agent/application/upgrade/step_mark.go index 90bc11dfda6..5fc004fda54 100644 --- a/internal/pkg/agent/application/upgrade/step_mark.go +++ b/internal/pkg/agent/application/upgrade/step_mark.go @@ -6,6 +6,7 @@ package upgrade import ( "context" + "fmt" "os" "path/filepath" "time" @@ -24,8 +25,13 @@ const markerFilename = ".update-marker" // UpdateMarker is a marker holding necessary information about ongoing upgrade. type UpdateMarker struct { + // Version represents the version the agent is upgraded to + Version string `json:"version" yaml:"version"` // Hash agent is updated to Hash string `json:"hash" yaml:"hash"` + // VersionedHome represents the path where the new agent is located relative to top path + VersionedHome string `json:"versioned_home" yaml:"versioned_home"` + //UpdatedOn marks a date when update happened UpdatedOn time.Time `json:"updated_on" yaml:"updated_on"` @@ -33,6 +39,8 @@ type UpdateMarker struct { PrevVersion string `json:"prev_version" yaml:"prev_version"` // PrevHash is a hash agent is updated from PrevHash string `json:"prev_hash" yaml:"prev_hash"` + // PrevVersionedHome represents the path where the new agent is located relative to top path + PrevVersionedHome string `json:"prev_versioned_home" yaml:"prev_versioned_home"` // Acked is a flag marking whether or not action was acked Acked bool `json:"acked" yaml:"acked"` @@ -83,42 +91,55 @@ func convertToActionUpgrade(a *MarkerActionUpgrade) *fleetapi.ActionUpgrade { } type updateMarkerSerializer struct { - Hash string `yaml:"hash"` - UpdatedOn time.Time `yaml:"updated_on"` - PrevVersion string `yaml:"prev_version"` - PrevHash string `yaml:"prev_hash"` - Acked bool `yaml:"acked"` - Action *MarkerActionUpgrade `yaml:"action"` - Details *details.Details `yaml:"details"` + Version string `yaml:"version"` + Hash string `yaml:"hash"` + VersionedHome string `yaml:"versioned_home"` + UpdatedOn time.Time `yaml:"updated_on"` + PrevVersion string `yaml:"prev_version"` + PrevHash string `yaml:"prev_hash"` + PrevVersionedHome string `yaml:"prev_versioned_home"` + Acked bool `yaml:"acked"` + Action *MarkerActionUpgrade `yaml:"action"` + Details *details.Details `yaml:"details"` } func newMarkerSerializer(m *UpdateMarker) *updateMarkerSerializer { return &updateMarkerSerializer{ - Hash: m.Hash, - UpdatedOn: m.UpdatedOn, - PrevVersion: m.PrevVersion, - PrevHash: m.PrevHash, - Acked: m.Acked, - Action: convertToMarkerAction(m.Action), - Details: m.Details, + Version: m.Version, + Hash: m.Hash, + VersionedHome: m.VersionedHome, + UpdatedOn: m.UpdatedOn, + PrevVersion: m.PrevVersion, + PrevHash: m.PrevHash, + PrevVersionedHome: m.PrevVersionedHome, + Acked: m.Acked, + Action: convertToMarkerAction(m.Action), + Details: m.Details, } } // markUpgrade marks update happened so we can handle grace period -func (u *Upgrader) markUpgrade(_ context.Context, log *logger.Logger, hash string, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details) error { - prevVersion := release.Version() +func (u *Upgrader) markUpgrade(_ context.Context, log *logger.Logger, version, hash, versionedHome string, action *fleetapi.ActionUpgrade, upgradeDetails *details.Details) error { + prevVersion := release.VersionWithSnapshot() prevHash := release.Commit() + prevVersionedHome, err := filepath.Rel(paths.Top(), paths.Home()) + if err != nil { + return fmt.Errorf("calculating home path relative to top, home: %q top: %q : %w", paths.Home(), paths.Top(), err) + } if len(prevHash) > hashLen { prevHash = prevHash[:hashLen] } marker := &UpdateMarker{ - Hash: hash, - UpdatedOn: time.Now(), - PrevVersion: prevVersion, - PrevHash: prevHash, - Action: action, - Details: upgradeDetails, + Version: version, + Hash: hash, + VersionedHome: versionedHome, + UpdatedOn: time.Now(), + PrevVersion: prevVersion, + PrevHash: prevHash, + PrevVersionedHome: prevVersionedHome, + Action: action, + Details: upgradeDetails, } markerBytes, err := yaml.Marshal(newMarkerSerializer(marker)) @@ -183,13 +204,16 @@ func loadMarker(markerFile string) (*UpdateMarker, error) { } return &UpdateMarker{ - Hash: marker.Hash, - UpdatedOn: marker.UpdatedOn, - PrevVersion: marker.PrevVersion, - PrevHash: marker.PrevHash, - Acked: marker.Acked, - Action: convertToActionUpgrade(marker.Action), - Details: marker.Details, + Version: marker.Version, + Hash: marker.Hash, + VersionedHome: marker.VersionedHome, + UpdatedOn: marker.UpdatedOn, + PrevVersion: marker.PrevVersion, + PrevHash: marker.PrevHash, + PrevVersionedHome: marker.PrevVersionedHome, + Acked: marker.Acked, + Action: convertToActionUpgrade(marker.Action), + Details: marker.Details, }, nil } @@ -198,13 +222,16 @@ func loadMarker(markerFile string) (*UpdateMarker, error) { // file is immediately flushed to persistent storage. func SaveMarker(marker *UpdateMarker, shouldFsync bool) error { makerSerializer := &updateMarkerSerializer{ - Hash: marker.Hash, - UpdatedOn: marker.UpdatedOn, - PrevVersion: marker.PrevVersion, - PrevHash: marker.PrevHash, - Acked: marker.Acked, - Action: convertToMarkerAction(marker.Action), - Details: marker.Details, + Version: marker.Version, + Hash: marker.Hash, + VersionedHome: marker.VersionedHome, + UpdatedOn: marker.UpdatedOn, + PrevVersion: marker.PrevVersion, + PrevHash: marker.PrevHash, + PrevVersionedHome: marker.PrevVersionedHome, + Acked: marker.Acked, + Action: convertToMarkerAction(marker.Action), + Details: marker.Details, } markerBytes, err := yaml.Marshal(makerSerializer) if err != nil { diff --git a/internal/pkg/agent/application/upgrade/upgrade.go b/internal/pkg/agent/application/upgrade/upgrade.go index 47f0b96600e..23a7456c0e7 100644 --- a/internal/pkg/agent/application/upgrade/upgrade.go +++ b/internal/pkg/agent/application/upgrade/upgrade.go @@ -7,6 +7,7 @@ package upgrade import ( "context" "fmt" + "io/fs" "os" "path/filepath" "runtime" @@ -215,19 +216,19 @@ func (u *Upgrader) Upgrade(ctx context.Context, version string, sourceURI string if err := changeSymlinkInternal(u.log, symlinkPath, newPath); err != nil { u.log.Errorw("Rolling back: changing symlink failed", "error.message", err) - rollbackInstall(ctx, u.log, newHash) + rollbackInstall(ctx, u.log, hashedDir) return nil, err } - if err := u.markUpgrade(ctx, u.log, newHash, action, det); err != nil { + if err := u.markUpgrade(ctx, u.log, version, unpackRes.Hash, unpackRes.VersionedHome, action, det); err != nil { u.log.Errorw("Rolling back: marking upgrade failed", "error.message", err) - rollbackInstall(ctx, u.log, newHash) + rollbackInstall(ctx, u.log, hashedDir) return nil, err } if err := InvokeWatcher(u.log); err != nil { u.log.Errorw("Rolling back: starting watcher failed", "error.message", err) - rollbackInstall(ctx, u.log, newHash) + rollbackInstall(ctx, u.log, hashedDir) return nil, err } @@ -288,8 +289,13 @@ func (u *Upgrader) sourceURI(retrievedURI string) string { return u.settings.SourceURI } -func rollbackInstall(ctx context.Context, log *logger.Logger, hash string) { - os.RemoveAll(filepath.Join(paths.Data(), fmt.Sprintf("%s-%s", agentName, hash))) +func rollbackInstall(ctx context.Context, log *logger.Logger, versionedHome string) { + err := os.RemoveAll(filepath.Join(paths.Top(), versionedHome)) + if err != nil && !errors.Is(err, fs.ErrNotExist) { + // TODO should this be a warning or an error ? + log.Warnw("error rolling back install", "error.message", err) + } + // FIXME update _ = ChangeSymlink(ctx, log, release.ShortCommit()) } diff --git a/internal/pkg/agent/cmd/watch.go b/internal/pkg/agent/cmd/watch.go index 69f0b0b033f..91425bec5eb 100644 --- a/internal/pkg/agent/cmd/watch.go +++ b/internal/pkg/agent/cmd/watch.go @@ -97,7 +97,7 @@ func watchCmd(log *logp.Logger, cfg *configuration.Configuration) error { // if we're not within grace and marker is still there it might mean // that cleanup was not performed ok, cleanup everything except current version // hash is the same as hash of agent which initiated watcher. - if err := upgrade.Cleanup(log, release.ShortCommit(), true, false); err != nil { + if err := upgrade.Cleanup(log, marker.VersionedHome, release.ShortCommit(), true, false); err != nil { log.Error("clean up of prior watcher run failed", err) } // exit nicely @@ -114,7 +114,7 @@ func watchCmd(log *logp.Logger, cfg *configuration.Configuration) error { log.Error("Error detected, proceeding to rollback: %v", err) upgradeDetails.SetState(details.StateRollback) - err = upgrade.Rollback(ctx, log, marker.PrevHash, marker.Hash) + err = upgrade.Rollback(ctx, log, marker.PrevVersionedHome, marker.PrevHash, marker.Hash) if err != nil { log.Error("rollback failed", err) upgradeDetails.Fail(err) @@ -132,7 +132,7 @@ func watchCmd(log *logp.Logger, cfg *configuration.Configuration) error { // Why is this being skipped on Windows? The comment above is not clear. // issue: https://github.com/elastic/elastic-agent/issues/3027 removeMarker := !isWindows() - err = upgrade.Cleanup(log, marker.Hash, removeMarker, false) + err = upgrade.Cleanup(log, marker.VersionedHome, marker.Hash, removeMarker, false) if err != nil { log.Error("cleanup after successful watch failed", err) } From bf2e849b16f8852a59593e45b8ada9095e190ad1 Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Mon, 15 Jan 2024 15:35:56 +0100 Subject: [PATCH 6/9] add tests for step_unpack --- .../agent/application/upgrade/step_unpack.go | 181 +++++++++--- .../application/upgrade/step_unpack_test.go | 275 ++++++++++++++++-- 2 files changed, 396 insertions(+), 60 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 5400368bc4e..b32253d2607 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -13,6 +13,7 @@ import ( "io" "io/fs" "os" + "path" "path/filepath" "runtime" "strings" @@ -24,10 +25,12 @@ import ( "github.com/elastic/elastic-agent/pkg/core/logger" ) +// UnpackResult contains the location and hash of the unpacked agent files type UnpackResult struct { + // Hash contains the unpacked agent commit hash, limited to a length of 6 for backward compatibility Hash string `json:"hash" yaml:"hash"` - // TODO add mapped path of executable - // agentExecutable string + // VersionedHome indicates the path (forward slash separated) where to find the unpacked agent files + // The value depends on the mappings specified in manifest.yaml, if no manifest is found it assumes the legacy data/elastic-agent- format VersionedHome string `json:"versioned-home" yaml:"versioned-home"` } @@ -64,7 +67,9 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error pm := pathMapper{} versionedHome := "" - manifestFile, err := r.Open("manifest.yaml") + + // Load manifest, the use of path.Join is intentional since in .zip file paths use slash ('/') as separator + manifestFile, err := r.Open(path.Join(fileNamePrefix, "manifest.yaml")) if err != nil && !errors.Is(err, fs.ErrNotExist) { // we got a real error looking up for the manifest return UnpackResult{}, fmt.Errorf("looking up manifest in package: %w", err) @@ -77,7 +82,28 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error return UnpackResult{}, fmt.Errorf("parsing package manifest: %w", err) } pm.mappings = manifest.Package.PathMappings - versionedHome = filepath.Clean(pm.Map(manifest.Package.VersionedHome)) + versionedHome = path.Clean(pm.Map(manifest.Package.VersionedHome)) + } + + // Load hash, the use of path.Join is intentional since in .zip file paths use slash ('/') as separator + hashFile, err := r.Open(path.Join(fileNamePrefix, agentCommitFile)) + if err != nil { + // we got a real error looking up for the manifest + return UnpackResult{}, fmt.Errorf("looking up %q in package: %w", agentCommitFile, err) + } + defer hashFile.Close() + + hashBytes, err := io.ReadAll(hashFile) + if err != nil { + return UnpackResult{}, fmt.Errorf("reading elastic-agent hash file content: %w", err) + } + if len(hashBytes) < hashLen { + return UnpackResult{}, fmt.Errorf("elastic-agent hash %q is too short (minimum %d)", string(hashBytes), hashLen) + } + hash = string(hashBytes[:hashLen]) + if versionedHome == "" { + // if at this point we didn't load the manifest et the versioned to the backup value + versionedHome = createVersionedHomeFromHash(hash) } unpackFile := func(f *zip.File) (err error) { @@ -91,34 +117,46 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error } }() - //get hash fileName := strings.TrimPrefix(f.Name, fileNamePrefix) if fileName == agentCommitFile { - hashBytes, err := io.ReadAll(rc) - if err != nil || len(hashBytes) < hashLen { - return err - } - - hash = string(hashBytes[:hashLen]) + // we already loaded the hash, skip this one return nil } + mappedPackagePath := pm.Map(fileName) + // skip everything outside data/ - if !strings.HasPrefix(fileName, "data/") { + if !strings.HasPrefix(mappedPackagePath, "data/") { return nil } - path := filepath.Join(dataDir, strings.TrimPrefix(fileName, "data/")) + dstPath := strings.TrimPrefix(mappedPackagePath, "data/") + dstPath = filepath.Join(dataDir, dstPath) if f.FileInfo().IsDir() { - log.Debugw("Unpacking directory", "archive", "zip", "file.path", path) + log.Debugw("Unpacking directory", "archive", "zip", "file.path", dstPath) // remove any world permissions from the directory - _ = os.MkdirAll(path, f.Mode()&0770) + _, err = os.Stat(dstPath) + if errors.Is(err, fs.ErrNotExist) { + if err := os.MkdirAll(dstPath, f.Mode().Perm()&0770); err != nil { + return fmt.Errorf("creating directory %q: %w", dstPath, err) + } + } else if err != nil { + return fmt.Errorf("stat() directory %q: %w", dstPath, err) + } else { + // set the appropriate permissions + err = os.Chmod(dstPath, f.Mode().Perm()&0o770) + if err != nil { + return fmt.Errorf("setting permissions %O for directory %q: %w", f.Mode().Perm()&0o770, dstPath, err) + } + } + + _ = os.MkdirAll(dstPath, f.Mode()&0770) } else { - log.Debugw("Unpacking file", "archive", "zip", "file.path", path) + log.Debugw("Unpacking file", "archive", "zip", "file.path", dstPath) // remove any world permissions from the directory/file - _ = os.MkdirAll(filepath.Dir(path), f.Mode()&0770) - f, err := os.OpenFile(path, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()&0770) + _ = os.MkdirAll(filepath.Dir(dstPath), 0770) + f, err := os.OpenFile(dstPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()&0770) if err != nil { return err } @@ -158,17 +196,20 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error func untar(log *logger.Logger, version string, archivePath, dataDir string) (UnpackResult, error) { + var versionedHome string + var rootDir string + var hash string + // Look up manifest in the archive and prepare path mappings, if any pm := pathMapper{} // quickly open the archive and look up manifest.yaml file - manifestReader, err := getManifestFromTar(archivePath) - + fileContents, err := getFilesContentFromTar(archivePath, "manifest.yaml", agentCommitFile) if err != nil { - return UnpackResult{}, fmt.Errorf("looking for package manifest: %w", err) + return UnpackResult{}, fmt.Errorf("looking for package metadata files: %w", err) } - versionedHome := "" + manifestReader := fileContents["manifest.yaml"] if manifestReader != nil { manifest, err := v1.ParseManifest(manifestReader) if err != nil { @@ -177,7 +218,24 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp // set the path mappings pm.mappings = manifest.Package.PathMappings - versionedHome = filepath.Clean(pm.Map(manifest.Package.VersionedHome)) + versionedHome = path.Clean(pm.Map(manifest.Package.VersionedHome)) + } + + if agentCommitReader, ok := fileContents[agentCommitFile]; ok { + commitBytes, err := io.ReadAll(agentCommitReader) + if err != nil { + return UnpackResult{}, fmt.Errorf("reading agent commit hash file: %w", err) + } + if len(commitBytes) < hashLen { + return UnpackResult{}, fmt.Errorf("hash %q is shorter than minimum length %d", string(commitBytes), hashLen) + } + + agentCommitHash := string(commitBytes) + hash = agentCommitHash[:hashLen] + if versionedHome == "" { + // set default value of versioned home if it wasn't set by reading the manifest + versionedHome = createVersionedHomeFromHash(agentCommitHash) + } } r, err := os.Open(archivePath) @@ -192,8 +250,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp } tr := tar.NewReader(zr) - var rootDir string - var hash string + fileNamePrefix := getFileNamePrefix(archivePath) // go through all the content of a tar archive @@ -213,16 +270,9 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp return UnpackResult{}, errors.New("tar contained invalid filename: %q", f.Name, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, f.Name)) } - //get hash fileName := strings.TrimPrefix(f.Name, fileNamePrefix) if fileName == agentCommitFile { - hashBytes, err := io.ReadAll(tr) - if err != nil || len(hashBytes) < hashLen { - return UnpackResult{}, err - } - - hash = string(hashBytes[:hashLen]) continue } @@ -324,23 +374,67 @@ func (pm pathMapper) Map(path string) string { return path } -func getManifestFromTar(archivePath string) (io.Reader, error) { +type tarCloser struct { + tarFile *os.File + gzipReader *gzip.Reader +} + +func (tc *tarCloser) Close() error { + var err error + if tc.gzipReader != nil { + err = multierror.Append(err, tc.gzipReader.Close()) + } + // prevent double Close() call to fzip reader + tc.gzipReader = nil + if tc.tarFile != nil { + err = multierror.Append(err, tc.tarFile.Close()) + } + // prevent double Close() call the underlying file + tc.tarFile = nil + return err +} + +// openTar is a convenience function to open a tar.gz file. +// It returns a *tar.Reader, an io.Closer implementation to be called to release resources and an error +// In case of errors the *tar.Reader will be nil, but the io.Closer is always returned and must be called also in case +// of errors to close the underlying readers. +func openTar(archivePath string) (*tar.Reader, io.Closer, error) { + tc := new(tarCloser) r, err := os.Open(archivePath) if err != nil { - return nil, fmt.Errorf("opening package %s: %w", archivePath, err) + return nil, tc, fmt.Errorf("opening package %s: %w", archivePath, err) } - defer r.Close() + tc.tarFile = r zr, err := gzip.NewReader(r) if err != nil { - return nil, fmt.Errorf("package %s does not seem to have a valid gzip compression: %w", archivePath, err) + return nil, tc, fmt.Errorf("package %s does not seem to have a valid gzip compression: %w", archivePath, err) } + tc.gzipReader = zr + + return tar.NewReader(zr), tc, nil +} + +// getFilesContentFromTar is a small utility function which will load in memory the contents of a list of files from the tar archive. +// It's meant to be used to load package information/metadata stored in small files within the .tar.gz archive +func getFilesContentFromTar(archivePath string, files ...string) (map[string]io.Reader, error) { + tr, tc, err := openTar(archivePath) + if err != nil { + return nil, fmt.Errorf("opening tar.gz package %s: %w", archivePath, err) + } + defer tc.Close() - tr := tar.NewReader(zr) prefix := getFileNamePrefix(archivePath) + result := make(map[string]io.Reader, len(files)) + fileset := make(map[string]struct{}, len(files)) + // load the fileset with the names we are looking for + for _, fName := range files { + fileset[fName] = struct{}{} + } + // go through all the content of a tar archive - // if manifest.yaml is found, read the contents and return a bytereader, nil otherwise , + // if one of the listed files is found, read the contents and set a byte reader into the result map for { f, err := tr.Next() if errors.Is(err, io.EOF) { @@ -352,17 +446,22 @@ func getManifestFromTar(archivePath string) (io.Reader, error) { } fileName := strings.TrimPrefix(f.Name, prefix) - if fileName == "manifest.yaml" { + if _, ok := fileset[fileName]; ok { + // it's one of the files we are looking for, retrieve the content and set a reader into the result map manifestBytes, err := io.ReadAll(tr) if err != nil { return nil, fmt.Errorf("reading manifest bytes: %w", err) } reader := bytes.NewReader(manifestBytes) - return reader, nil + result[fileName] = reader } } - return nil, nil + return result, nil +} + +func createVersionedHomeFromHash(hash string) string { + return fmt.Sprintf("data/elastic-agent-%s", hash[:hashLen]) } diff --git a/internal/pkg/agent/application/upgrade/step_unpack_test.go b/internal/pkg/agent/application/upgrade/step_unpack_test.go index c2d5bf0ece7..493190d3454 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack_test.go +++ b/internal/pkg/agent/application/upgrade/step_unpack_test.go @@ -6,6 +6,7 @@ package upgrade import ( "archive/tar" + "archive/zip" "compress/gzip" "fmt" "io" @@ -13,7 +14,6 @@ import ( "os" "path" "path/filepath" - "runtime" "strings" "testing" "time" @@ -24,6 +24,19 @@ import ( "github.com/elastic/elastic-agent/pkg/core/logger" ) +const agentBinaryPlaceholderContent = "Placeholder for the elastic-agent binary" + +const ea_123_manifest = ` +version: co.elastic.agent/v1 +kind: PackageManifest +package: + version: 1.2.3 + snapshot: true + versioned-home: data/elastic-agent-abcdef + path-mappings: + - data/elastic-agent-abcdef: data/elastic-agent-1.2.3-SNAPSHOT-abcdef + manifest.yaml: data/elastic-agent-1.2.3-SNAPSHOT-abcdef/manifest.yaml +` const foo_component_spec = ` version: 2 inputs: @@ -87,21 +100,22 @@ func (f files) Sys() any { type createArchiveFunc func(t *testing.T, archiveFiles []files) (string, error) type checkExtractedPath func(t *testing.T, testDataDir string) -func TestUpgrader_unpack(t *testing.T) { +func TestUpgrader_unpackTarGz(t *testing.T) { type args struct { version string archiveGenerator createArchiveFunc archiveFiles []files } + tests := []struct { name string args args - want string + want UnpackResult wantErr assert.ErrorAssertionFunc checkFiles checkExtractedPath }{ { - name: "targz with file before containing folder", + name: "file before containing folder", args: args{ version: "1.2.3", archiveFiles: []files{ @@ -110,7 +124,7 @@ func TestUpgrader_unpack(t *testing.T) { {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/package.version", content: "1.2.3", mode: fs.ModePerm & 0o640}, {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef", mode: fs.ModeDir | (fs.ModePerm & 0o700)}, - {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/" + agentName, content: "Placeholder for the elastic-agent binary", mode: fs.ModePerm & 0o750}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/" + agentName, content: agentBinaryPlaceholderContent, mode: fs.ModePerm & 0o750}, {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/components", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/components/comp1", content: "Placeholder for component", mode: fs.ModePerm & 0o750}, {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/components/comp1.spec.yml", content: foo_component_spec, mode: fs.ModePerm & 0o640}, @@ -120,10 +134,12 @@ func TestUpgrader_unpack(t *testing.T) { return createTarArchive(t, "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz", i) }, }, - want: "abcdef", + want: UnpackResult{ + Hash: "abcdef", + VersionedHome: filepath.Join("data", "elastic-agent-abcdef"), + }, wantErr: assert.NoError, checkFiles: func(t *testing.T, testDataDir string) { - versionedHome := filepath.Join(testDataDir, "elastic-agent-abcdef") require.DirExists(t, versionedHome, "directory for package.version does not exists") stat, err := os.Stat(versionedHome) @@ -133,30 +149,189 @@ func TestUpgrader_unpack(t *testing.T) { assert.Equalf(t, expectedPermissions, actualPermissions, "Wrong permissions set on versioned home %q: expected %O, got %O", versionedHome, expectedPermissions, actualPermissions) }, }, + { + name: "package with manifest file", + args: args{ + version: "1.2.3", + archiveFiles: []files{ + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/manifest.yaml", content: ea_123_manifest, mode: fs.ModePerm & 0o640}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/" + agentCommitFile, content: "abcdefghijklmnopqrstuvwxyz", mode: fs.ModePerm & 0o640}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/" + agentName, content: agentBinaryPlaceholderContent, mode: fs.ModePerm & 0o750}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/package.version", content: "1.2.3", mode: fs.ModePerm & 0o640}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/components", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/components/comp1", content: "Placeholder for component", mode: fs.ModePerm & 0o750}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/data/elastic-agent-abcdef/components/comp1.spec.yml", content: foo_component_spec, mode: fs.ModePerm & 0o640}, + {fType: SYMLINK, path: "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64/" + agentName, content: "data/elastic-agent-abcdef/" + agentName, mode: fs.ModeSymlink | (fs.ModePerm & 0o750)}, + }, + archiveGenerator: func(t *testing.T, i []files) (string, error) { + return createTarArchive(t, "elastic-agent-1.2.3-SNAPSHOT-linux-x86_64.tar.gz", i) + }, + }, + want: UnpackResult{ + Hash: "abcdef", + VersionedHome: "data/elastic-agent-1.2.3-SNAPSHOT-abcdef", + }, + wantErr: assert.NoError, + checkFiles: func(t *testing.T, testDataDir string) { + versionedHome := filepath.Join(testDataDir, "elastic-agent-1.2.3-SNAPSHOT-abcdef") + require.DirExists(t, versionedHome, "mapped versioned home directory does not exists") + mappedAgentExecutable := filepath.Join(versionedHome, agentName) + if assert.FileExistsf(t, mappedAgentExecutable, "agent executable %q is not found in mapped versioned home directory %q", mappedAgentExecutable, versionedHome) { + fileBytes, err := os.ReadFile(mappedAgentExecutable) + if assert.NoErrorf(t, err, "error reading elastic-agent executable %q", mappedAgentExecutable) { + assert.Equal(t, agentBinaryPlaceholderContent, string(fileBytes), "agent binary placeholder content does not match") + } + } + mappedPackageManifest := filepath.Join(versionedHome, "manifest.yaml") + if assert.FileExistsf(t, mappedPackageManifest, "package manifest %q is not found in mapped versioned home directory %q", mappedPackageManifest, versionedHome) { + fileBytes, err := os.ReadFile(mappedPackageManifest) + if assert.NoErrorf(t, err, "error reading package manifest %q", mappedPackageManifest) { + assert.Equal(t, ea_123_manifest, string(fileBytes), "package manifest content does not match") + } + } + }, + }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - if runtime.GOOS == "windows" { - t.Skip("tar.gz tests only run on Linux/MacOS") + testTop := t.TempDir() + testDataDir := filepath.Join(testTop, "data") + err := os.MkdirAll(testDataDir, 0o777) + assert.NoErrorf(t, err, "error creating initial structure %q", testDataDir) + log, _ := logger.NewTesting(tt.name) + + archiveFile, err := tt.args.archiveGenerator(t, tt.args.archiveFiles) + require.NoError(t, err, "creation of test archive file failed") + + got, err := untar(log, tt.args.version, archiveFile, testDataDir) + if !tt.wantErr(t, err, fmt.Sprintf("untar(%v, %v, %v)", tt.args.version, archiveFile, testDataDir)) { + return + } + assert.Equalf(t, tt.want, got, "untar(%v, %v, %v)", tt.args.version, archiveFile, testDataDir) + if tt.checkFiles != nil { + tt.checkFiles(t, testDataDir) } + }) + } +} + +func TestUpgrader_unpackZip(t *testing.T) { + type args struct { + archiveGenerator createArchiveFunc + archiveFiles []files + } + + tests := []struct { + name string + args args + want UnpackResult + wantErr assert.ErrorAssertionFunc + checkFiles checkExtractedPath + }{ + { + name: "file before containing folder", + args: args{ + archiveFiles: []files{ + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/" + agentCommitFile, content: "abcdefghijklmnopqrstuvwxyz", mode: fs.ModePerm & 0o640}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/package.version", content: "1.2.3", mode: fs.ModePerm & 0o640}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef", mode: fs.ModeDir | (fs.ModePerm & 0o700)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/" + agentName, content: agentBinaryPlaceholderContent, mode: fs.ModePerm & 0o750}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/components", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/components/comp1", content: "Placeholder for component", mode: fs.ModePerm & 0o750}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/components/comp1.spec.yml", content: foo_component_spec, mode: fs.ModePerm & 0o640}, + }, + archiveGenerator: func(t *testing.T, i []files) (string, error) { + return createZipArchive(t, "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64.zip", i) + }, + }, + want: UnpackResult{ + Hash: "abcdef", + VersionedHome: filepath.Join("data", "elastic-agent-abcdef"), + }, + wantErr: assert.NoError, + checkFiles: func(t *testing.T, testDataDir string) { + versionedHome := filepath.Join(testDataDir, "elastic-agent-abcdef") + require.DirExists(t, versionedHome, "directory for package.version does not exists") + stat, err := os.Stat(versionedHome) + require.NoErrorf(t, err, "error calling Stat() for versionedHome %q", versionedHome) + expectedPermissions := fs.ModePerm & 0o700 + actualPermissions := fs.ModePerm & stat.Mode() + assert.Equalf(t, expectedPermissions, actualPermissions, "Wrong permissions set on versioned home %q: expected %O, got %O", versionedHome, expectedPermissions, actualPermissions) + agentExecutable := filepath.Join(versionedHome, agentName) + if assert.FileExistsf(t, agentExecutable, "agent executable %q is not found in versioned home directory %q", agentExecutable, versionedHome) { + fileBytes, err := os.ReadFile(agentExecutable) + if assert.NoErrorf(t, err, "error reading elastic-agent executable %q", agentExecutable) { + assert.Equal(t, agentBinaryPlaceholderContent, string(fileBytes), "agent binary placeholder content does not match") + } + } + }, + }, + { + name: "package with manifest file", + args: args{ + archiveFiles: []files{ + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/manifest.yaml", content: ea_123_manifest, mode: fs.ModePerm & 0o640}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/" + agentCommitFile, content: "abcdefghijklmnopqrstuvwxyz", mode: fs.ModePerm & 0o640}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/" + agentName, content: agentBinaryPlaceholderContent, mode: fs.ModePerm & 0o750}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/package.version", content: "1.2.3", mode: fs.ModePerm & 0o640}, + {fType: DIRECTORY, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/components", mode: fs.ModeDir | (fs.ModePerm & 0o750)}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/components/comp1", content: "Placeholder for component", mode: fs.ModePerm & 0o750}, + {fType: REGULAR, path: "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64/data/elastic-agent-abcdef/components/comp1.spec.yml", content: foo_component_spec, mode: fs.ModePerm & 0o640}, + }, + archiveGenerator: func(t *testing.T, i []files) (string, error) { + return createZipArchive(t, "elastic-agent-1.2.3-SNAPSHOT-windows-x86_64.zip", i) + }, + }, + want: UnpackResult{ + Hash: "abcdef", + VersionedHome: filepath.Join("data", "elastic-agent-1.2.3-SNAPSHOT-abcdef"), + }, + wantErr: assert.NoError, + checkFiles: func(t *testing.T, testDataDir string) { + versionedHome := filepath.Join(testDataDir, "elastic-agent-1.2.3-SNAPSHOT-abcdef") + require.DirExists(t, versionedHome, "mapped versioned home directory does not exists") + mappedAgentExecutable := filepath.Join(versionedHome, agentName) + if assert.FileExistsf(t, mappedAgentExecutable, "agent executable %q is not found in mapped versioned home directory %q", mappedAgentExecutable, versionedHome) { + fileBytes, err := os.ReadFile(mappedAgentExecutable) + if assert.NoErrorf(t, err, "error reading elastic-agent executable %q", mappedAgentExecutable) { + assert.Equal(t, agentBinaryPlaceholderContent, string(fileBytes), "agent binary placeholder content does not match") + } + } + mappedPackageManifest := filepath.Join(versionedHome, "manifest.yaml") + if assert.FileExistsf(t, mappedPackageManifest, "package manifest %q is not found in mapped versioned home directory %q", mappedPackageManifest, versionedHome) { + fileBytes, err := os.ReadFile(mappedPackageManifest) + if assert.NoErrorf(t, err, "error reading package manifest %q", mappedPackageManifest) { + assert.Equal(t, ea_123_manifest, string(fileBytes), "package manifest content does not match") + } + } + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { testTop := t.TempDir() testDataDir := filepath.Join(testTop, "data") err := os.MkdirAll(testDataDir, 0o777) assert.NoErrorf(t, err, "error creating initial structure %q", testDataDir) log, _ := logger.NewTesting(tt.name) - u := &Upgrader{ - log: log, - } archiveFile, err := tt.args.archiveGenerator(t, tt.args.archiveFiles) require.NoError(t, err, "creation of test archive file failed") - got, err := u.unpack(tt.args.version, archiveFile, testDataDir) - if !tt.wantErr(t, err, fmt.Sprintf("unpack(%v, %v, %v)", tt.args.version, archiveFile, testDataDir)) { + got, err := unzip(log, archiveFile, testDataDir) + if !tt.wantErr(t, err, fmt.Sprintf("unzip(%v, %v)", archiveFile, testDataDir)) { return } - assert.Equalf(t, tt.want, got, "unpack(%v, %v, %v)", tt.args.version, archiveFile, testDataDir) + assert.Equalf(t, tt.want, got, "unzip(%v, %v)", archiveFile, testDataDir) if tt.checkFiles != nil { tt.checkFiles(t, testDataDir) } @@ -165,20 +340,23 @@ func TestUpgrader_unpack(t *testing.T) { } func createTarArchive(t *testing.T, archiveName string, archiveFiles []files) (string, error) { - + t.Helper() outDir := t.TempDir() outFilePath := filepath.Join(outDir, archiveName) file, err := os.OpenFile(outFilePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0o644) require.NoErrorf(t, err, "error creating output archive %q", outFilePath) - defer file.Close() + defer func(file *os.File) { + err := file.Close() + assert.NoError(t, err, "error closing tar.gz archive file") + }(file) zipWriter := gzip.NewWriter(file) writer := tar.NewWriter(zipWriter) defer func(writer *tar.Writer) { err := writer.Close() - require.NoError(t, err, "error closing tar writer") + assert.NoError(t, err, "error closing tar writer") err = zipWriter.Close() - require.NoError(t, err, "error closing gzip writer") + assert.NoError(t, err, "error closing gzip writer") }(writer) for _, af := range archiveFiles { @@ -210,3 +388,62 @@ func addEntryToTarArchive(af files, writer *tar.Writer) error { } return nil } + +func createZipArchive(t *testing.T, archiveName string, archiveFiles []files) (string, error) { + t.Helper() + outDir := t.TempDir() + + outFilePath := filepath.Join(outDir, archiveName) + file, err := os.OpenFile(outFilePath, os.O_CREATE|os.O_RDWR|os.O_TRUNC, 0o644) + require.NoErrorf(t, err, "error creating output archive %q", outFilePath) + defer func(file *os.File) { + err := file.Close() + assert.NoError(t, err, "error closing zip archive file") + }(file) + + w := zip.NewWriter(file) + defer func(writer *zip.Writer) { + err := writer.Close() + assert.NoError(t, err, "error closing tar writer") + }(w) + + for _, af := range archiveFiles { + if af.fType == SYMLINK { + return "", fmt.Errorf("entry %q is a symlink. Not supported in .zip files", af.path) + } + + err = addEntryToZipArchive(af, w) + require.NoErrorf(t, err, "error adding %q to tar archive", af.path) + } + return outFilePath, nil +} + +func addEntryToZipArchive(af files, writer *zip.Writer) error { + header, err := zip.FileInfoHeader(&af) + if err != nil { + return fmt.Errorf("creating header for %q: %w", af.path, err) + } + + header.SetMode(af.Mode() & os.ModePerm) + header.Name = af.path + if af.IsDir() { + header.Name += string(filepath.Separator) + } else { + header.Method = zip.Deflate + } + + w, err := writer.CreateHeader(header) + if err != nil { + return err + } + + if af.IsDir() { + return nil + } + + if _, err = io.Copy(w, strings.NewReader(af.content)); err != nil { + return err + } + + return nil +} From ee9979ca0194eedb5c3d8e04e36389e5754ec287 Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Fri, 2 Feb 2024 18:23:15 +0100 Subject: [PATCH 7/9] fix typos --- internal/pkg/agent/application/upgrade/step_mark.go | 2 +- internal/pkg/agent/application/upgrade/step_unpack.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_mark.go b/internal/pkg/agent/application/upgrade/step_mark.go index 5fc004fda54..71834f64a90 100644 --- a/internal/pkg/agent/application/upgrade/step_mark.go +++ b/internal/pkg/agent/application/upgrade/step_mark.go @@ -39,7 +39,7 @@ type UpdateMarker struct { PrevVersion string `json:"prev_version" yaml:"prev_version"` // PrevHash is a hash agent is updated from PrevHash string `json:"prev_hash" yaml:"prev_hash"` - // PrevVersionedHome represents the path where the new agent is located relative to top path + // PrevVersionedHome represents the path where the old agent is located relative to top path PrevVersionedHome string `json:"prev_versioned_home" yaml:"prev_versioned_home"` // Acked is a flag marking whether or not action was acked diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index b32253d2607..9d9bcb99fa8 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -102,7 +102,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error } hash = string(hashBytes[:hashLen]) if versionedHome == "" { - // if at this point we didn't load the manifest et the versioned to the backup value + // if at this point we didn't load the manifest, set the versioned to the backup value versionedHome = createVersionedHomeFromHash(hash) } From 9f2a4cc546d99c58e4e4325a46a97abedbffb874 Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Sat, 3 Feb 2024 10:14:44 +0100 Subject: [PATCH 8/9] add clarification comments in unzip() and untar() --- .../agent/application/upgrade/step_unpack.go | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index 9d9bcb99fa8..c072e22c348 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -135,26 +135,28 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error if f.FileInfo().IsDir() { log.Debugw("Unpacking directory", "archive", "zip", "file.path", dstPath) - // remove any world permissions from the directory + // check if the directory already exists _, err = os.Stat(dstPath) if errors.Is(err, fs.ErrNotExist) { + // the directory does not exist, create it and any non-existing parent directory with the same permissions if err := os.MkdirAll(dstPath, f.Mode().Perm()&0770); err != nil { return fmt.Errorf("creating directory %q: %w", dstPath, err) } } else if err != nil { return fmt.Errorf("stat() directory %q: %w", dstPath, err) } else { - // set the appropriate permissions - err = os.Chmod(dstPath, f.Mode().Perm()&0o770) + // directory already exists, set the appropriate permissions + err = os.Chmod(dstPath, f.Mode().Perm()&0770) if err != nil { - return fmt.Errorf("setting permissions %O for directory %q: %w", f.Mode().Perm()&0o770, dstPath, err) + return fmt.Errorf("setting permissions %O for directory %q: %w", f.Mode().Perm()&0770, dstPath, err) } } _ = os.MkdirAll(dstPath, f.Mode()&0770) } else { log.Debugw("Unpacking file", "archive", "zip", "file.path", dstPath) - // remove any world permissions from the directory/file + // create non-existing containing folders with 0770 permissions right now, we'll fix the permission of each + // directory as we come across them while processing the other package entries _ = os.MkdirAll(filepath.Dir(dstPath), 0770) f, err := os.OpenFile(dstPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, f.Mode()&0770) if err != nil { @@ -273,6 +275,7 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp fileName := strings.TrimPrefix(f.Name, fileNamePrefix) if fileName == agentCommitFile { + // we already loaded the hash, skip this one continue } @@ -300,9 +303,9 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp switch { case mode.IsRegular(): log.Debugw("Unpacking file", "archive", "tar", "file.path", abs) - // just to be sure, it should already be created by Dir type - // remove any world permissions from the directory - if err = os.MkdirAll(filepath.Dir(abs), 0o750); err != nil { + // create non-existing containing folders with 0750 permissions right now, we'll fix the permission of each + // directory as we come across them while processing the other package entries + if err = os.MkdirAll(filepath.Dir(abs), 0750); err != nil { return UnpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } @@ -322,19 +325,20 @@ func untar(log *logger.Logger, version string, archivePath, dataDir string) (Unp } case mode.IsDir(): log.Debugw("Unpacking directory", "archive", "tar", "file.path", abs) - // remove any world permissions from the directory + // check if the directory already exists _, err = os.Stat(abs) if errors.Is(err, fs.ErrNotExist) { + // the directory does not exist, create it and any non-existing parent directory with the same permissions if err := os.MkdirAll(abs, mode.Perm()&0770); err != nil { return UnpackResult{}, errors.New(err, "TarInstaller: creating directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } else if err != nil { return UnpackResult{}, errors.New(err, "TarInstaller: stat() directory for file "+abs, errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } else { - // set the appropriate permissions - err = os.Chmod(abs, mode.Perm()&0o770) + // directory already exists, set the appropriate permissions + err = os.Chmod(abs, mode.Perm()&0770) if err != nil { - return UnpackResult{}, errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0o770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) + return UnpackResult{}, errors.New(err, fmt.Sprintf("TarInstaller: setting permissions %O for directory %q", mode.Perm()&0770, abs), errors.TypeFilesystem, errors.M(errors.MetaKeyPath, abs)) } } default: From 9945a142d817b9ae2c21437e6b5a9cb813d61801 Mon Sep 17 00:00:00 2001 From: Paolo Chila Date: Sat, 3 Feb 2024 10:19:51 +0100 Subject: [PATCH 9/9] remove multierror package from step_unpack.go --- internal/pkg/agent/application/upgrade/step_unpack.go | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/internal/pkg/agent/application/upgrade/step_unpack.go b/internal/pkg/agent/application/upgrade/step_unpack.go index c072e22c348..b9af3ed5620 100644 --- a/internal/pkg/agent/application/upgrade/step_unpack.go +++ b/internal/pkg/agent/application/upgrade/step_unpack.go @@ -9,6 +9,7 @@ import ( "archive/zip" "bytes" "compress/gzip" + goerrors "errors" "fmt" "io" "io/fs" @@ -18,8 +19,6 @@ import ( "runtime" "strings" - "github.com/hashicorp/go-multierror" - "github.com/elastic/elastic-agent/internal/pkg/agent/errors" v1 "github.com/elastic/elastic-agent/pkg/api/v1" "github.com/elastic/elastic-agent/pkg/core/logger" @@ -113,7 +112,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error } defer func() { if cerr := rc.Close(); cerr != nil { - err = multierror.Append(err, cerr) + err = goerrors.Join(err, cerr) } }() @@ -164,7 +163,7 @@ func unzip(log *logger.Logger, archivePath, dataDir string) (UnpackResult, error } defer func() { if cerr := f.Close(); cerr != nil { - err = multierror.Append(err, cerr) + err = goerrors.Join(err, cerr) } }() @@ -386,12 +385,12 @@ type tarCloser struct { func (tc *tarCloser) Close() error { var err error if tc.gzipReader != nil { - err = multierror.Append(err, tc.gzipReader.Close()) + err = goerrors.Join(err, tc.gzipReader.Close()) } // prevent double Close() call to fzip reader tc.gzipReader = nil if tc.tarFile != nil { - err = multierror.Append(err, tc.tarFile.Close()) + err = goerrors.Join(err, tc.tarFile.Close()) } // prevent double Close() call the underlying file tc.tarFile = nil