From 4c5663c3db6d3666784e2d7926efbb041b9f026e Mon Sep 17 00:00:00 2001 From: mik-dass Date: Tue, 6 Apr 2021 17:16:40 +0530 Subject: [PATCH] Adds dev.odo.push.file attribute support for pushing only mentioned files --- pkg/component/component.go | 2 +- pkg/devfile/adapters/common/types.go | 1 + pkg/devfile/adapters/common/utils.go | 16 + .../adapters/kubernetes/component/adapter.go | 2 + pkg/sync/adapter.go | 25 +- pkg/sync/adapter_test.go | 13 +- pkg/sync/sync.go | 151 +- pkg/sync/sync_test.go | 369 +++++ pkg/util/file_indexer.go | 484 ++++++- pkg/util/file_indexer_test.go | 1254 +++++++++++++++++ .../devfile-with-remote-attributes.yaml | 52 + .../devfile/cmd_devfile_push_test.go | 28 + 12 files changed, 2304 insertions(+), 93 deletions(-) create mode 100644 pkg/sync/sync_test.go create mode 100644 tests/examples/source/devfiles/nodejs/devfile-with-remote-attributes.yaml diff --git a/pkg/component/component.go b/pkg/component/component.go index cec69f203fd..2f76a842e80 100644 --- a/pkg/component/component.go +++ b/pkg/component/component.go @@ -729,7 +729,7 @@ func PushLocal(client *occlient.Client, componentName string, applicationName st compInfo := common.ComponentInfo{ PodName: pod.Name, } - err = sync.CopyFile(adapter, path, compInfo, targetPath, files, globExps) + err = sync.CopyFile(adapter, path, compInfo, targetPath, files, globExps, util.IndexerRet{}) if err != nil { s.End(false) return errors.Wrap(err, "unable push files to pod") diff --git a/pkg/devfile/adapters/common/types.go b/pkg/devfile/adapters/common/types.go index 5c035d93ea9..dc8b0242c27 100644 --- a/pkg/devfile/adapters/common/types.go +++ b/pkg/devfile/adapters/common/types.go @@ -39,6 +39,7 @@ type SyncParameters struct { CompInfo ComponentInfo PodChanged bool ComponentExists bool + Files map[string]string } // ComponentInfo is a struct that holds information about a component i.e.; pod name, container name, and source mount (if applicable) diff --git a/pkg/devfile/adapters/common/utils.go b/pkg/devfile/adapters/common/utils.go index 355f3fd9a88..4f869897af5 100644 --- a/pkg/devfile/adapters/common/utils.go +++ b/pkg/devfile/adapters/common/utils.go @@ -2,6 +2,7 @@ package common import ( "os" + "path/filepath" "strings" "k8s.io/klog" @@ -203,3 +204,18 @@ func GetCommandsMap(commands []devfilev1.Command) map[string]devfilev1.Command { } return commandMap } + +// GetSyncFilesFromAttributes gets the target files and folders along with their respective remote destination from the devfile +// it uses the "dev.odo.push.path" attribute in the run command +func GetSyncFilesFromAttributes(commandsMap PushCommandsMap) map[string]string { + syncMap := make(map[string]string) + if value, ok := commandsMap[devfilev1.RunCommandGroupKind]; ok { + for key, value := range value.Attributes.Strings(nil) { + if strings.HasPrefix(key, "dev.odo.push.path:") { + localValue := strings.ReplaceAll(key, "dev.odo.push.path:", "") + syncMap[filepath.Clean(localValue)] = filepath.ToSlash(filepath.Clean(value)) + } + } + } + return syncMap +} diff --git a/pkg/devfile/adapters/kubernetes/component/adapter.go b/pkg/devfile/adapters/kubernetes/component/adapter.go index 724d8865fba..e35c82939a8 100644 --- a/pkg/devfile/adapters/kubernetes/component/adapter.go +++ b/pkg/devfile/adapters/kubernetes/component/adapter.go @@ -253,7 +253,9 @@ func (a Adapter) Push(parameters common.PushParameters) (err error) { CompInfo: compInfo, ComponentExists: componentExists, PodChanged: podChanged, + Files: common.GetSyncFilesFromAttributes(pushDevfileCommands), } + execRequired, err := syncAdapter.SyncFiles(syncParams) if err != nil { return errors.Wrapf(err, "Failed to sync to component with name %s", a.ComponentName) diff --git a/pkg/sync/adapter.go b/pkg/sync/adapter.go index 1d262075644..e6dcf6587fb 100644 --- a/pkg/sync/adapter.go +++ b/pkg/sync/adapter.go @@ -123,7 +123,7 @@ func (a Adapter) SyncFiles(syncParameters common.SyncParameters) (isPushRequired } // Run the indexer and find the modified/added/deleted/renamed files - ret, err = util.RunIndexer(pushParameters.Path, absIgnoreRules) + ret, err = util.RunIndexerWithRemote(pushParameters.Path, absIgnoreRules, syncParameters.Files) s.End(true) if err != nil { @@ -138,12 +138,8 @@ func (a Adapter) SyncFiles(syncParameters common.SyncParameters) (isPushRequired // and ignore the files on which the rules apply and filter them out filesChangedFiltered, filesDeletedFiltered := util.FilterIgnores(ret.FilesChanged, ret.FilesDeleted, absIgnoreRules) - // Remove the relative file directory from the list of deleted files - // in order to make the changes correctly within the Kubernetes pod - deletedFiles, err = util.RemoveRelativePathFromFiles(filesDeletedFiltered, pushParameters.Path) - if err != nil { - return false, errors.Wrap(err, "unable to remove relative path from list of changed/deleted files") - } + deletedFiles = append(filesDeletedFiltered, ret.RemoteDeleted...) + deletedFiles = append(deletedFiles, ret.RemoteDeleted...) klog.V(4).Infof("List of files to be deleted: +%v", deletedFiles) changedFiles = filesChangedFiltered klog.V(4).Infof("List of files changed: +%v", changedFiles) @@ -151,6 +147,10 @@ func (a Adapter) SyncFiles(syncParameters common.SyncParameters) (isPushRequired if len(filesChangedFiltered) == 0 && len(filesDeletedFiltered) == 0 && !isForcePush { return false, nil } + + if isForcePush { + deletedFiles = append(deletedFiles, "*") + } } err = a.pushLocal(pushParameters.Path, @@ -159,6 +159,7 @@ func (a Adapter) SyncFiles(syncParameters common.SyncParameters) (isPushRequired isForcePush, util.GetAbsGlobExps(pushParameters.Path, pushParameters.IgnoredFiles), syncParameters.CompInfo, + ret, ) if err != nil { return false, errors.Wrapf(err, "failed to sync to component with name %s", a.ComponentName) @@ -174,7 +175,7 @@ func (a Adapter) SyncFiles(syncParameters common.SyncParameters) (isPushRequired } // pushLocal syncs source code from the user's disk to the component -func (a Adapter) pushLocal(path string, files []string, delFiles []string, isForcePush bool, globExps []string, compInfo common.ComponentInfo) error { +func (a Adapter) pushLocal(path string, files []string, delFiles []string, isForcePush bool, globExps []string, compInfo common.ComponentInfo, ret util.IndexerRet) error { klog.V(4).Infof("Push: componentName: %s, path: %s, files: %s, delFiles: %s, isForcePush: %+v", a.ComponentName, path, files, delFiles, isForcePush) // Edge case: check to see that the path is NOT empty. @@ -221,7 +222,7 @@ func (a Adapter) pushLocal(path string, files []string, delFiles []string, isFor if isForcePush || len(files) > 0 { klog.V(4).Infof("Copying files %s to pod", strings.Join(files, " ")) - err = CopyFile(a.Client, path, compInfo, syncFolder, files, globExps) + err = CopyFile(a.Client, path, compInfo, syncFolder, files, globExps, ret) if err != nil { s.End(false) return errors.Wrap(err, "unable push files to pod") @@ -301,5 +302,9 @@ func getCmdToDeleteFiles(delFiles []string, syncFolder string) []string { rmPaths := util.GetRemoteFilesMarkedForDeletion(delFiles, syncFolder) klog.V(4).Infof("remote files marked for deletion are %+v", rmPaths) cmdArr := []string{"rm", "-rf"} - return append(cmdArr, rmPaths...) + + for _, remote := range rmPaths { + cmdArr = append(cmdArr, filepath.ToSlash(remote)) + } + return cmdArr } diff --git a/pkg/sync/adapter_test.go b/pkg/sync/adapter_test.go index 560454fb633..eaff4add4b8 100644 --- a/pkg/sync/adapter_test.go +++ b/pkg/sync/adapter_test.go @@ -90,6 +90,11 @@ func TestSyncFiles(t *testing.T) { t.Errorf("TestSyncFiles error: error creating temporary directory for the indexer: %v", err) } + jsFile, err := os.Create(filepath.Join(directory, "red.js")) + if err != nil { + t.Errorf("TestSyncFiles error: error creating temporary file for the indexer: %v", err) + } + ctrl := gomock.NewController(t) // Assert that Bar() is invoked. @@ -218,10 +223,14 @@ func TestSyncFiles(t *testing.T) { }) } + err = jsFile.Close() + if err != nil { + t.Errorf("TestSyncFiles error: error deleting the temp dir %s, err: %v", directory, err) + } // Remove the temp dir created for the file indexer err = os.RemoveAll(directory) if err != nil { - t.Errorf("TestSyncFiles error: error deleting the temp dir %s", directory) + t.Errorf("TestSyncFiles error: error deleting the temp dir %s, err: %v", directory, err) } } @@ -359,7 +368,7 @@ func TestPushLocal(t *testing.T) { } syncAdapter := New(adapterCtx, syncClient) - err := syncAdapter.pushLocal(tt.path, tt.files, tt.delFiles, tt.isForcePush, []string{}, tt.compInfo) + err := syncAdapter.pushLocal(tt.path, tt.files, tt.delFiles, tt.isForcePush, []string{}, tt.compInfo, util.IndexerRet{}) if !tt.wantErr && err != nil { t.Errorf("TestPushLocal error: error pushing files: %v", err) } diff --git a/pkg/sync/sync.go b/pkg/sync/sync.go index 3d6fc42eac9..fcc96fccca6 100644 --- a/pkg/sync/sync.go +++ b/pkg/sync/sync.go @@ -2,13 +2,14 @@ package sync import ( taro "archive/tar" + "fmt" "io" - "io/ioutil" "os" "path/filepath" "github.com/openshift/odo/pkg/devfile/adapters/common" "github.com/openshift/odo/pkg/log" + "github.com/openshift/odo/pkg/testingutil/filesystem" "github.com/openshift/odo/pkg/util" "k8s.io/klog" @@ -24,7 +25,7 @@ type SyncClient interface { // During copying binary components, localPath represent base directory path to binary and copyFiles contains path of binary // During copying local source components, localPath represent base directory path whereas copyFiles is empty // During `odo watch`, localPath represent base directory path whereas copyFiles contains list of changed Files -func CopyFile(client SyncClient, localPath string, compInfo common.ComponentInfo, targetPath string, copyFiles []string, globExps []string) error { +func CopyFile(client SyncClient, localPath string, compInfo common.ComponentInfo, targetPath string, copyFiles []string, globExps []string, ret util.IndexerRet) error { // Destination is set to "ToSlash" as all containers being ran within OpenShift / S2I are all // Linux based and thus: "\opt\app-root\src" would not work correctly. @@ -37,7 +38,7 @@ func CopyFile(client SyncClient, localPath string, compInfo common.ComponentInfo go func() { defer writer.Close() - err := makeTar(localPath, dest, writer, copyFiles, globExps) + err := makeTar(localPath, dest, writer, copyFiles, globExps, ret, filesystem.DefaultFs{}) if err != nil { log.Errorf("Error while creating tar: %#v", err) os.Exit(1) @@ -54,14 +55,14 @@ func CopyFile(client SyncClient, localPath string, compInfo common.ComponentInfo } // checkFileExist check if given file exists or not -func checkFileExist(fileName string) bool { - _, err := os.Stat(fileName) +func checkFileExistWithFS(fileName string, fs filesystem.Filesystem) bool { + _, err := fs.Stat(fileName) return !os.IsNotExist(err) } // makeTar function is copied from https://github.com/kubernetes/kubernetes/blob/master/pkg/kubectl/cmd/cp.go#L309 // srcPath is ignored if files is set -func makeTar(srcPath, destPath string, writer io.Writer, files []string, globExps []string) error { +func makeTar(srcPath, destPath string, writer io.Writer, files []string, globExps []string, ret util.IndexerRet, fs filesystem.Filesystem) error { // TODO: use compression here? tarWriter := taro.NewWriter(writer) defer tarWriter.Close() @@ -71,12 +72,28 @@ func makeTar(srcPath, destPath string, writer io.Writer, files []string, globExp // and thus \opt\app-root\src would be an invalid path. Backward slashes // are converted to forward. destPath = filepath.ToSlash(filepath.Clean(destPath)) - + uniquePaths := make(map[string]bool) klog.V(4).Infof("makeTar arguments: srcPath: %s, destPath: %s, files: %+v", srcPath, destPath, files) if len(files) != 0 { //watchTar for _, fileName := range files { - if checkFileExist(fileName) { + + if _, ok := uniquePaths[fileName]; ok { + continue + } else { + uniquePaths[fileName] = true + } + + if checkFileExistWithFS(fileName, fs) { + + matched, err := util.IsGlobExpMatch(fileName, globExps) + if err != nil { + return err + } + if matched { + continue + } + // Fetch path of source file relative to that of source base path so that it can be passed to recursiveTar // which uses path relative to base path for taro header to correctly identify file location when untarred @@ -99,25 +116,31 @@ func makeTar(srcPath, destPath string, writer io.Writer, files []string, globExp // Now we get the source file and join it to the base directory. srcFile := filepath.Join(filepath.Base(srcPath), destFile) + if value, ok := ret.NewFileMap[destFile]; ok && value.RemoteAttribute != "" { + destFile = value.RemoteAttribute + } + klog.V(4).Infof("makeTar srcFile: %s", srcFile) klog.V(4).Infof("makeTar destFile: %s", destFile) // The file could be a regular file or even a folder, so use recursiveTar which handles symlinks, regular files and folders - err = recursiveTar(filepath.Dir(srcPath), srcFile, filepath.Dir(destPath), destFile, tarWriter, globExps) + err = linearTar(filepath.Dir(srcPath), srcFile, filepath.Dir(destPath), destFile, tarWriter, fs) if err != nil { return err } } } - } else { - return recursiveTar(filepath.Dir(srcPath), filepath.Base(srcPath), filepath.Dir(destPath), "", tarWriter, globExps) } return nil } -// recursiveTar function is copied from https://github.com/kubernetes/kubernetes/blob/master/pkg/kubectl/cmd/cp.go#L319 -func recursiveTar(srcBase, srcFile, destBase, destFile string, tw *taro.Writer, globExps []string) error { +// linearTar function is a modified version of https://github.com/kubernetes/kubernetes/blob/master/pkg/kubectl/cmd/cp.go#L319 +func linearTar(srcBase, srcFile, destBase, destFile string, tw *taro.Writer, fs filesystem.Filesystem) error { + if destFile == "" { + return fmt.Errorf("linear Tar error, destFile cannot be empty") + } + klog.V(4).Infof("recursiveTar arguments: srcBase: %s, srcFile: %s, destBase: %s, destFile: %s", srcBase, srcFile, destBase, destFile) // The destination is a LINUX container and thus we *must* use ToSlash in order @@ -127,86 +150,62 @@ func recursiveTar(srcBase, srcFile, destBase, destFile string, tw *taro.Writer, klog.V(4).Infof("Corrected destinations: base: %s file: %s", destBase, destFile) joinedPath := filepath.Join(srcBase, srcFile) - matchedPathsDir, err := filepath.Glob(joinedPath) + + stat, err := fs.Stat(joinedPath) if err != nil { return err } - matchedPaths := []string{} - - // checking the files which are allowed by glob matching - for _, path := range matchedPathsDir { - matched, err := util.IsGlobExpMatch(path, globExps) - if err != nil { - return err - } - if !matched { - matchedPaths = append(matchedPaths, path) - } - } - - // adding the files for taring - for _, matchedPath := range matchedPaths { - stat, err := os.Lstat(matchedPath) + if stat.IsDir() { + files, err := fs.ReadDir(joinedPath) if err != nil { return err } - if stat.IsDir() { - files, err := ioutil.ReadDir(matchedPath) - if err != nil { - return err - } - if len(files) == 0 { - //case empty directory - hdr, _ := taro.FileInfoHeader(stat, matchedPath) - hdr.Name = destFile - if err := tw.WriteHeader(hdr); err != nil { - return err - } - } - for _, f := range files { - if err := recursiveTar(srcBase, filepath.Join(srcFile, f.Name()), destBase, filepath.Join(destFile, f.Name()), tw, globExps); err != nil { - return err - } - } - return nil - } else if stat.Mode()&os.ModeSymlink != 0 { - //case soft link + if len(files) == 0 { + //case empty directory hdr, _ := taro.FileInfoHeader(stat, joinedPath) - target, err := os.Readlink(joinedPath) - if err != nil { - return err - } - - hdr.Linkname = target hdr.Name = destFile if err := tw.WriteHeader(hdr); err != nil { return err } - } else { - //case regular file or other file type like pipe - hdr, err := taro.FileInfoHeader(stat, joinedPath) - if err != nil { - return err - } - hdr.Name = destFile + } + return nil + } else if stat.Mode()&os.ModeSymlink != 0 { + //case soft link + hdr, _ := taro.FileInfoHeader(stat, joinedPath) + target, err := os.Readlink(joinedPath) + if err != nil { + return err + } - if err := tw.WriteHeader(hdr); err != nil { - return err - } + hdr.Linkname = target + hdr.Name = destFile + if err := tw.WriteHeader(hdr); err != nil { + return err + } + } else { + //case regular file or other file type like pipe + hdr, err := taro.FileInfoHeader(stat, joinedPath) + if err != nil { + return err + } + hdr.Name = destFile - f, err := os.Open(joinedPath) - if err != nil { - return err - } - defer f.Close() // #nosec G307 + if err := tw.WriteHeader(hdr); err != nil { + return err + } - if _, err := io.Copy(tw, f); err != nil { - return err - } + f, err := fs.Open(joinedPath) + if err != nil { + return err + } + defer f.Close() // #nosec G307 - return f.Close() + if _, err := io.Copy(tw, f); err != nil { + return err } + + return f.Close() } return nil diff --git a/pkg/sync/sync_test.go b/pkg/sync/sync_test.go new file mode 100644 index 00000000000..0f1d3e7f353 --- /dev/null +++ b/pkg/sync/sync_test.go @@ -0,0 +1,369 @@ +package sync + +import ( + taro "archive/tar" + "bytes" + "io" + "path" + "path/filepath" + "testing" + + "github.com/openshift/odo/pkg/testingutil/filesystem" + "github.com/openshift/odo/pkg/util" +) + +func Test_linearTar(t *testing.T) { + // FileType custom type to indicate type of file + type FileType int + + const ( + // RegularFile enum to represent regular file + RegularFile FileType = 0 + // Directory enum to represent directory + Directory FileType = 1 + ) + + fs := filesystem.NewFakeFs() + + type args struct { + srcBase string + srcFile string + destBase string + destFile string + data string + } + tests := []struct { + name string + args args + fileType FileType + notExistError bool + wantErr bool + }{ + { + name: "case 1: write a regular file", + args: args{ + srcBase: filepath.Join("tmp", "dir1"), + srcFile: "red.js", + destBase: filepath.Join("tmp1", "dir2"), + destFile: "red.js", + data: "hi", + }, + fileType: RegularFile, + wantErr: false, + }, + { + name: "case 2: write a folder", + args: args{ + srcBase: filepath.Join("tmp", "dir1"), + srcFile: "dir0", + destBase: filepath.Join("tmp1", "dir2"), + destFile: "dir2", + }, + fileType: Directory, + wantErr: false, + }, + { + name: "case 3: file source doesn't exist", + args: args{ + srcBase: filepath.Join("tmp", "dir1"), + srcFile: "red.js", + destBase: filepath.Join("tmp1", "dir2"), + destFile: "red.js", + data: "hi", + }, + fileType: RegularFile, + notExistError: true, + wantErr: true, + }, + { + name: "case 4: folder source doesn't exist", + args: args{ + srcBase: filepath.Join("tmp", "dir1"), + srcFile: "dir0", + destBase: filepath.Join("tmp1", "dir2"), + destFile: "dir2", + }, + fileType: Directory, + notExistError: true, + wantErr: true, + }, + { + name: "case 5: dest is empty", + args: args{ + srcBase: filepath.Join("tmp", "dir1"), + srcFile: "dir0", + destBase: "", + destFile: "", + }, + fileType: Directory, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + filepath := path.Join(tt.args.srcBase, tt.args.srcFile) + + if tt.fileType == RegularFile { + f, err := fs.Create(filepath) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if _, err := io.Copy(f, bytes.NewBuffer([]byte(tt.args.data))); err != nil { + t.Fatalf("unexpected error: %v", err) + } + defer f.Close() + } else { + if err := fs.MkdirAll(filepath, 0755); err != nil { + t.Errorf("unexpected error: %v", err) + } + } + + if tt.notExistError == true { + tt.args.srcBase += "blah" + } + + reader, writer := io.Pipe() + defer reader.Close() + defer writer.Close() + + tarWriter := taro.NewWriter(writer) + + go func() { + defer tarWriter.Close() + if err := linearTar(tt.args.srcBase, tt.args.srcFile, tt.args.destBase, tt.args.destFile, tarWriter, fs); (err != nil) != tt.wantErr { + t.Errorf("linearTar() error = %v, wantErr %v", err, tt.wantErr) + } + }() + + tarReader := taro.NewReader(reader) + for { + hdr, err := tarReader.Next() + if err == io.EOF { + break + } else if err != nil { + t.Errorf("unexpected error: %v", err) + } + + if hdr.Name != tt.args.destFile { + t.Errorf("expected %q as destination filename, saw: %q", tt.args.destFile, hdr.Name) + } + } + }) + } +} + +func Test_makeTar(t *testing.T) { + fs := filesystem.NewFakeFs() + + dir0, err := fs.TempDir("", "dir0") + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + _, err = fs.Create(filepath.Join(dir0, "red.js")) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + _, err = fs.Create(filepath.Join(dir0, "README.txt")) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + err = fs.MkdirAll(filepath.Join(dir0, "views"), 0644) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + _, err = fs.Create(filepath.Join(dir0, "views", "view.html")) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + type args struct { + srcPath string + destPath string + files []string + globExps []string + ret util.IndexerRet + } + tests := []struct { + name string + args args + wantFiles map[string]bool + wantErr bool + }{ + { + name: "case 1: normal tar making", + args: args{ + srcPath: dir0, + destPath: filepath.Join("tmp", "dir1"), + files: []string{ + filepath.Join(dir0, "red.js"), + filepath.Join(dir0, "README.txt"), + filepath.Join(dir0, "views"), + filepath.Join(dir0, "views", "view.html")}, + globExps: []string{}, + ret: util.IndexerRet{ + NewFileMap: map[string]util.FileData{ + "red.js": { + RemoteAttribute: "red.js", + }, + "README.txt": { + RemoteAttribute: "README.txt", + }, + "views": { + RemoteAttribute: "views", + }, + filepath.Join("views", "view.html"): { + RemoteAttribute: "views/view.html", + }, + }, + }, + }, + wantFiles: map[string]bool{ + "red.js": true, + "views/view.html": true, + "README.txt": true, + }, + }, + { + name: "case 2: normal tar making with matching glob expression", + args: args{ + srcPath: dir0, + destPath: filepath.Join("tmp", "dir1"), + files: []string{ + filepath.Join(dir0, "red.js"), + filepath.Join(dir0, "README.txt"), + filepath.Join(dir0, "views"), + filepath.Join(dir0, "views", "view.html")}, + globExps: []string{filepath.Join(dir0, "README.txt")}, + ret: util.IndexerRet{ + NewFileMap: map[string]util.FileData{ + "red.js": { + RemoteAttribute: "red.js", + }, + "README.txt": { + RemoteAttribute: "README.txt", + }, + "views": { + RemoteAttribute: "views", + }, + filepath.Join("views", "view.html"): { + RemoteAttribute: "views/view.html", + }, + }, + }, + }, + wantFiles: map[string]bool{ + "red.js": true, + "views/view.html": true, + }, + }, + { + name: "case 3: normal tar making different remote than local", + args: args{ + srcPath: dir0, + destPath: filepath.Join("tmp", "dir1"), + files: []string{ + filepath.Join(dir0, "red.js"), + filepath.Join(dir0, "README.txt"), + filepath.Join(dir0, "views"), + filepath.Join(dir0, "views", "view.html")}, + globExps: []string{}, + ret: util.IndexerRet{ + NewFileMap: map[string]util.FileData{ + "red.js": { + RemoteAttribute: "red.js", + }, + "README.txt": { + RemoteAttribute: "text/README.txt", + }, + "views": { + RemoteAttribute: "views", + }, + filepath.Join("views", "view.html"): { + RemoteAttribute: "views/view.html", + }, + }, + }, + }, + wantFiles: map[string]bool{ + "red.js": true, + "views/view.html": true, + "text/README.txt": true, + }, + }, + { + name: "case 4: ignore no existent file or folder", + args: args{ + srcPath: dir0, + destPath: filepath.Join("tmp", "dir1"), + files: []string{ + filepath.Join(dir0, "red.js"), + filepath.Join(dir0, "README.txt"), + filepath.Join("blah", "views"), + filepath.Join(dir0, "views", "view.html")}, + globExps: []string{}, + ret: util.IndexerRet{ + NewFileMap: map[string]util.FileData{ + "red.js": { + RemoteAttribute: "red.js", + }, + "README.txt": { + RemoteAttribute: "text/README.txt", + }, + "views": { + RemoteAttribute: "views", + }, + filepath.Join("views", "view.html"): { + RemoteAttribute: "views/view.html", + }, + }, + }, + }, + wantFiles: map[string]bool{ + "red.js": true, + "views/view.html": true, + "text/README.txt": true, + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + reader, writer := io.Pipe() + defer reader.Close() + defer writer.Close() + + tarWriter := taro.NewWriter(writer) + go func() { + defer tarWriter.Close() + wantErr := tt.wantErr + if err := makeTar(tt.args.srcPath, tt.args.destPath, writer, tt.args.files, tt.args.globExps, tt.args.ret, fs); (err != nil) != wantErr { + t.Errorf("makeTar() error = %v, wantErr %v", err, tt.wantErr) + return + } + }() + + gotFiles := make(map[string]bool) + tarReader := taro.NewReader(reader) + for { + hdr, err := tarReader.Next() + if err == io.EOF { + break + } else if err != nil { + t.Errorf("unexpected error: %v", err) + } + + if _, ok := tt.wantFiles[hdr.Name]; !ok { + t.Errorf("unexpected file name in tar, : %q", hdr.Name) + } + + gotFiles[hdr.Name] = true + } + + for fileName := range tt.wantFiles { + if _, ok := gotFiles[fileName]; !ok { + t.Errorf("missed file, : %q", fileName) + } + } + }) + } +} diff --git a/pkg/util/file_indexer.go b/pkg/util/file_indexer.go index c25d13882bb..f88032d7995 100644 --- a/pkg/util/file_indexer.go +++ b/pkg/util/file_indexer.go @@ -5,8 +5,10 @@ import ( "io/ioutil" "os" "path/filepath" + "strings" "time" + "github.com/devfile/library/pkg/util" "github.com/openshift/odo/pkg/testingutil/filesystem" "github.com/pkg/errors" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" @@ -37,6 +39,7 @@ func NewFileIndex() *FileIndex { type FileData struct { Size int64 LastModifiedDate time.Time + RemoteAttribute string } // ReadFileIndex tries to read the odo index file from the given location and returns the data from the file @@ -138,10 +141,11 @@ func DeleteIndexFile(directory string) error { // IndexerRet is a struct that represent return value of RunIndexer function type IndexerRet struct { - FilesChanged []string - FilesDeleted []string - NewFileMap map[string]FileData - ResolvedPath string + FilesChanged []string + FilesDeleted []string + RemoteDeleted []string + NewFileMap map[string]FileData + ResolvedPath string } // RunIndexer walks the given directory and finds the files which have changed and which were deleted/renamed @@ -302,3 +306,475 @@ func WriteFile(newFileMap map[string]FileData, resolvedPath string) error { return err } + +// RunIndexerWithRemote reads the existing index from the given directory and runs the indexer on it +// with the given ignore rules +// it also adds the file index to the .gitignore file and resolves the path +func RunIndexerWithRemote(directory string, ignoreRules []string, remoteDirectories map[string]string) (ret IndexerRet, err error) { + directory = filepath.FromSlash(directory) + ret.ResolvedPath, err = ResolveIndexFilePath(directory) + if err != nil { + return ret, err + } + + // check for .gitignore file and add odo-file-index.json to .gitignore + gitIgnoreFile, err := CheckGitIgnoreFile(directory) + if err != nil { + return ret, err + } + + // add odo-file-index.json path to .gitignore + err = AddOdoFileIndex(gitIgnoreFile) + if err != nil { + return ret, err + } + + // read the odo index file + existingFileIndex, err := ReadFileIndex(ret.ResolvedPath) + if err != nil { + return ret, err + } + + returnedIndex, err := runIndexerWithExistingFileIndex(directory, ignoreRules, remoteDirectories, existingFileIndex) + if err != nil { + return IndexerRet{}, err + } + returnedIndex.ResolvedPath = ret.ResolvedPath + return returnedIndex, nil +} + +// runIndexerWithExistingFileIndex visits the given directory and creates the new index data +// it ignores the files and folders satisfying the ignoreRules +func runIndexerWithExistingFileIndex(directory string, ignoreRules []string, remoteDirectories map[string]string, existingFileIndex *FileIndex) (ret IndexerRet, err error) { + destPath := "" + srcPath := directory + + ret.NewFileMap = make(map[string]FileData) + + fileChanged := make(map[string]bool) + filesDeleted := make(map[string]bool) + fileRemoteChanged := make(map[string]bool) + + if len(remoteDirectories) == 0 { + // The file could be a regular file or even a folder, so use recursiveTar which handles symlinks, regular files and folders + innerRet, err := recursiveChecker(directory, filepath.Dir(srcPath), filepath.Base(srcPath), filepath.Dir(destPath), filepath.Base(destPath), ignoreRules, remoteDirectories, *existingFileIndex, filesystem.DefaultFs{}) + + if err != nil { + return IndexerRet{}, err + } + + for k, v := range innerRet.NewFileMap { + ret.NewFileMap[k] = v + } + + for _, remote := range innerRet.FilesChanged { + fileChanged[remote] = true + } + + for _, remote := range innerRet.RemoteDeleted { + fileRemoteChanged[remote] = true + } + + for _, remote := range innerRet.FilesDeleted { + filesDeleted[remote] = true + } + } + + for remoteAttribute := range remoteDirectories { + matches, err := filepath.Glob(filepath.Join(directory, remoteAttribute)) + if err != nil { + return IndexerRet{}, err + } + for _, fileName := range matches { + if checkFileExist(fileName) { + // Fetch path of source file relative to that of source base path so that it can be passed to recursiveTar + // which uses path relative to base path for taro header to correctly identify file location when untarred + + // Yes, now that the file exists, now we need to get the absolute path.. if we don't, then when we pass in: + // 'odo push --context foobar' instead of 'odo push --context ~/foobar' it will NOT work.. + fileAbsolutePath, err := util.GetAbsPath(fileName) + if err != nil { + return IndexerRet{}, err + } + klog.V(4).Infof("Got abs path: %s", fileAbsolutePath) + klog.V(4).Infof("Making %s relative to %s", srcPath, fileAbsolutePath) + + // We use "FromSlash" to make this OS-based (Windows uses \, Linux & macOS use /) + // we get the relative path by joining the two + destFile, err := filepath.Rel(filepath.FromSlash(srcPath), filepath.FromSlash(fileAbsolutePath)) + if err != nil { + return IndexerRet{}, err + } + + // Now we get the source file and join it to the base directory. + srcFile := filepath.Join(filepath.Base(srcPath), destFile) + + if value, ok := remoteDirectories[filepath.ToSlash(destFile)]; ok { + destFile = value + } + + klog.V(4).Infof("makeTar srcFile: %s", srcFile) + klog.V(4).Infof("makeTar destFile: %s", destFile) + + // The file could be a regular file or even a folder, so use recursiveTar which handles symlinks, regular files and folders + innerRet, err := recursiveChecker(directory, filepath.Dir(srcPath), srcFile, filepath.Dir(destPath), destFile, ignoreRules, remoteDirectories, *existingFileIndex, filesystem.DefaultFs{}) + if err != nil { + return IndexerRet{}, err + } + + for k, v := range innerRet.NewFileMap { + ret.NewFileMap[k] = v + } + + for _, remote := range innerRet.FilesChanged { + fileChanged[remote] = true + } + + for _, remote := range innerRet.RemoteDeleted { + fileRemoteChanged[remote] = true + } + + for _, remote := range innerRet.FilesDeleted { + filesDeleted[remote] = true + } + } + } + } + + // find files which are deleted/renamed + for fileName, value := range existingFileIndex.Files { + if _, ok := ret.NewFileMap[fileName]; !ok { + klog.V(4).Infof("Deleting file: %s", fileName) + + if value.RemoteAttribute != "" { + currentRemote := value.RemoteAttribute + for _, remote := range findRemoteFolderForDeletion(currentRemote, remoteDirectories) { + fileRemoteChanged[remote] = true + } + } else { + // check the *absolute* path to the file for glob rules + fileAbsolutePath, err := GetAbsPath(filepath.Join(directory, fileName)) + if err != nil { + return ret, errors.Wrapf(err, "unable to retrieve absolute path of file %s", fileName) + } + + matched, err := util.IsGlobExpMatch(fileAbsolutePath, ignoreRules) + if err != nil { + return IndexerRet{}, err + } + if matched { + continue + } + filesDeleted[fileName] = true + } + } + } + + if len(fileRemoteChanged) > 0 { + ret.RemoteDeleted = []string{} + } + if len(fileChanged) > 0 { + ret.FilesChanged = []string{} + } + if len(filesDeleted) > 0 { + ret.FilesDeleted = []string{} + } + for remote := range fileRemoteChanged { + ret.RemoteDeleted = append(ret.RemoteDeleted, remote) + } + for remote := range fileChanged { + ret.FilesChanged = append(ret.FilesChanged, remote) + } + for remote := range filesDeleted { + ret.FilesDeleted = append(ret.FilesDeleted, remote) + } + + return ret, nil +} + +// recursiveChecker visits the current source and it's inner files and folders, if any +// the destination values are used to record the appropriate remote location for file or folder +func recursiveChecker(directory, srcBase, srcFile, destBase, destFile string, ignoreRules []string, remoteDirectories map[string]string, existingFileIndex FileIndex, fs filesystem.Filesystem) (IndexerRet, error) { + klog.V(4).Infof("recursiveTar arguments: srcBase: %s, srcFile: %s, destBase: %s, destFile: %s", srcBase, srcFile, destBase, destFile) + + // The destination is a LINUX container and thus we *must* use ToSlash in order + // to get the copying over done correctly.. + destBase = filepath.ToSlash(destBase) + destFile = filepath.ToSlash(destFile) + klog.V(4).Infof("Corrected destinations: base: %s file: %s", destBase, destFile) + + joinedPath := filepath.Join(srcBase, srcFile) + matchedPathsDir, err := filepath.Glob(joinedPath) + if err != nil { + return IndexerRet{}, err + } + + joinedRelPath, err := filepath.Rel(directory, joinedPath) + if err != nil { + return IndexerRet{}, err + } + + var ret IndexerRet + ret.NewFileMap = make(map[string]FileData) + + fileChanged := make(map[string]bool) + fileRemoteChanged := make(map[string]bool) + + for _, matchedPath := range matchedPathsDir { + + // check if it matches a ignore rule + match, err := IsGlobExpMatch(matchedPath, ignoreRules) + if err != nil { + return IndexerRet{}, err + } + // the folder matches a glob rule and thus should be skipped + if match { + return IndexerRet{}, nil + } + + stat, err := fs.Stat(matchedPath) + if err != nil { + return IndexerRet{}, err + } + + if joinedRelPath != "." { + // check for changes in the size and the modified date of the file or folder + // and if the file is newly added + if _, ok := existingFileIndex.Files[joinedRelPath]; !ok { + fileChanged[matchedPath] = true + klog.V(4).Infof("file added: %s", matchedPath) + } else if !stat.ModTime().Equal(existingFileIndex.Files[joinedRelPath].LastModifiedDate) { + fileChanged[matchedPath] = true + klog.V(4).Infof("last modified date changed: %s", matchedPath) + } else if stat.Size() != existingFileIndex.Files[joinedRelPath].Size { + fileChanged[matchedPath] = true + klog.V(4).Infof("size changed: %s", matchedPath) + } + } + + if stat.IsDir() { + + if stat.Name() == fileIndexDirectory || stat.Name() == ".git" { + return IndexerRet{}, nil + } + + if joinedRelPath != "." { + folderData, folderChangedData, folderRemoteChangedData := handleRemoteDataFolder(destFile, matchedPath, joinedRelPath, remoteDirectories, existingFileIndex) + folderData.Size = stat.Size() + folderData.LastModifiedDate = stat.ModTime() + ret.NewFileMap[joinedRelPath] = folderData + + for data, value := range folderChangedData { + fileChanged[data] = value + } + + for data, value := range folderRemoteChangedData { + fileRemoteChanged[data] = value + } + } + + // read the current folder and read inner files and folders + files, err := fs.ReadDir(matchedPath) + if err != nil { + return IndexerRet{}, err + } + if len(files) == 0 { + continue + } + for _, f := range files { + if _, ok := remoteDirectories[filepath.Join(joinedRelPath, f.Name())]; ok { + continue + } + + innerRet, err := recursiveChecker(directory, srcBase, filepath.Join(srcFile, f.Name()), destBase, filepath.Join(destFile, f.Name()), ignoreRules, remoteDirectories, existingFileIndex, fs) + if err != nil { + return IndexerRet{}, err + } + + for k, v := range innerRet.NewFileMap { + ret.NewFileMap[k] = v + } + + for _, remote := range innerRet.FilesChanged { + fileChanged[remote] = true + } + for _, remote := range innerRet.RemoteDeleted { + fileRemoteChanged[remote] = true + } + } + } else { + fileData, fileChangedData, fileRemoteChangedData := handleRemoteDataFile(destFile, matchedPath, joinedRelPath, remoteDirectories, existingFileIndex) + fileData.Size = stat.Size() + fileData.LastModifiedDate = stat.ModTime() + ret.NewFileMap[joinedRelPath] = fileData + + for data, value := range fileChangedData { + fileChanged[data] = value + } + + for data, value := range fileRemoteChangedData { + fileRemoteChanged[data] = value + } + } + } + + // remove duplicates in the records + if len(fileRemoteChanged) > 0 { + ret.RemoteDeleted = []string{} + } + if len(fileChanged) > 0 { + ret.FilesChanged = []string{} + } + for remote := range fileRemoteChanged { + ret.RemoteDeleted = append(ret.RemoteDeleted, remote) + } + for file := range fileChanged { + ret.FilesChanged = append(ret.FilesChanged, file) + } + + return ret, nil +} + +// handleRemoteDataFile handles remote addition, deletion etc for the given file +func handleRemoteDataFile(destFile, path, relPath string, remoteDirectories map[string]string, existingFileIndex FileIndex) (FileData, map[string]bool, map[string]bool) { + destFile = filepath.ToSlash(destFile) + fileChanged := make(map[string]bool) + fileRemoteChanged := make(map[string]bool) + + remoteDeletionRequired := false + + remoteAttribute := destFile + if len(remoteDirectories) == 0 { + // if no remote attributes specified currently + remoteAttribute = "" + if existingFileIndex.Files[relPath].RemoteAttribute != "" && existingFileIndex.Files[relPath].RemoteAttribute != destFile { + // remote attribute for the file exists in the index + // but the value doesn't match the current relative path + // we need to push the current file again and delete the previous location from the container + + fileChanged[path] = true + if existingFileIndex.Files[relPath].RemoteAttribute != "" { + remoteDeletionRequired = true + } + } + } else { + if value, ok := remoteDirectories[relPath]; ok { + remoteAttribute = value + } + + if existingFileData, ok := existingFileIndex.Files[relPath]; !ok { + // if the file data doesn't exist in the existing index, we mark the file for pushing + fileChanged[path] = true + } else { + // if the remote attribute is different in the file data from the existing index + // and the remote attribute is not same as the current relative path + // we mark the file for pushing and delete the remote paths + if existingFileData.RemoteAttribute != remoteAttribute && (remoteAttribute != relPath || existingFileData.RemoteAttribute != "") { + fileChanged[path] = true + remoteDeletionRequired = true + } + } + } + + if remoteDeletionRequired { + // if remote deletion is required but the remote attribute is empty + // we use the relative path for deletion + currentRemote := existingFileIndex.Files[relPath].RemoteAttribute + if currentRemote == "" { + currentRemote = relPath + } + fileRemoteChanged[currentRemote] = true + for _, remote := range findRemoteFolderForDeletion(currentRemote, remoteDirectories) { + fileRemoteChanged[remote] = true + } + } + + return FileData{ + RemoteAttribute: filepath.ToSlash(remoteAttribute), + }, fileChanged, fileRemoteChanged +} + +// handleRemoteDataFolder handles remote addition, deletion etc for the given folder +func handleRemoteDataFolder(destFile, path, relPath string, remoteDirectories map[string]string, existingFileIndex FileIndex) (FileData, map[string]bool, map[string]bool) { + destFile = filepath.ToSlash(destFile) + remoteAttribute := destFile + + fileChanged := make(map[string]bool) + fileRemoteChanged := make(map[string]bool) + + remoteChanged := false + + if len(remoteDirectories) == 0 { + remoteAttribute = "" + // remote attribute for the folder exists in the index + // but the value doesn't match the current relative path + // we need to push the current folder again and delete the previous location from the container + + if existingFileIndex.Files[relPath].RemoteAttribute != "" && existingFileIndex.Files[relPath].RemoteAttribute != destFile { + fileChanged[path] = true + if existingFileIndex.Files[relPath].RemoteAttribute != "" { + remoteChanged = true + } + } + } else { + if value, ok := remoteDirectories[relPath]; ok { + remoteAttribute = value + } + + if existingFileData, ok := existingFileIndex.Files[relPath]; !ok { + fileChanged[path] = true + } else { + // if the remote attribute is different in the file data from the existing index + // and the remote attribute is not same as the current relative path + // we mark the file for pushing and delete the remote paths + if existingFileData.RemoteAttribute != remoteAttribute && (remoteAttribute != relPath || existingFileData.RemoteAttribute != "") { + fileChanged[path] = true + remoteChanged = true + } + } + } + + if remoteChanged { + // if remote deletion is required but the remote attribute is empty + // we use the relative path for deletion + currentRemote := existingFileIndex.Files[relPath].RemoteAttribute + if currentRemote == "" { + currentRemote = relPath + } + fileRemoteChanged[currentRemote] = true + for _, remote := range findRemoteFolderForDeletion(currentRemote, remoteDirectories) { + fileRemoteChanged[remote] = true + } + } + + return FileData{ + RemoteAttribute: filepath.ToSlash(remoteAttribute), + }, fileChanged, fileRemoteChanged +} + +// checkFileExist check if given file exists or not +func checkFileExist(fileName string) bool { + _, err := os.Stat(fileName) + return !os.IsNotExist(err) +} + +// findRemoteFolderForDeletion finds the remote directories which can be deleted by checking the remoteDirectories map +func findRemoteFolderForDeletion(currentRemote string, remoteDirectories map[string]string) []string { + var remoteDelete []string + currentRemote = filepath.ToSlash(currentRemote) + for currentRemote != "" && currentRemote != "." && currentRemote != "/" { + + found := false + for _, remote := range remoteDirectories { + if strings.HasPrefix(remote, currentRemote+"/") || remote == currentRemote { + found = true + break + } + } + if !found { + remoteDelete = append(remoteDelete, currentRemote) + } + currentRemote = filepath.ToSlash(filepath.Clean(filepath.Dir(currentRemote))) + } + return remoteDelete +} diff --git a/pkg/util/file_indexer_test.go b/pkg/util/file_indexer_test.go index d2ac1529a93..e6addad3507 100644 --- a/pkg/util/file_indexer_test.go +++ b/pkg/util/file_indexer_test.go @@ -4,9 +4,12 @@ import ( "io/ioutil" "os" "path/filepath" + "reflect" + "sort" "strings" "testing" + "github.com/kylelemons/godebug/pretty" "github.com/openshift/odo/pkg/testingutil/filesystem" ) @@ -243,3 +246,1254 @@ func TestGenerateNewFileDataEntry(t *testing.T) { }) } } + +func createAndStat(fileName, tempDirectoryName string, fs filesystem.Filesystem) (filesystem.File, os.FileInfo, error) { + file, err := fs.Create(filepath.Join(tempDirectoryName, fileName)) + if err != nil { + return nil, nil, err + } + stat, err := fs.Stat(file.Name()) + if err != nil { + return nil, nil, err + } + return file, stat, nil +} + +func createGitFolderAndFiles(tempDirectoryName string, fs filesystem.Filesystem) error { + err := fs.MkdirAll(filepath.Join(tempDirectoryName, ".git"), 0755) + if err != nil { + return err + } + + err = fs.MkdirAll(filepath.Join(tempDirectoryName, fileIndexDirectory), 0755) + if err != nil { + return err + } + + _, err = fs.Create(filepath.Join(tempDirectoryName, ".git", "someFile.txt")) + if err != nil { + return err + } + return nil +} + +func Test_recursiveChecker(t *testing.T) { + fs := filesystem.DefaultFs{} + + tempDirectoryName, err := fs.TempDir(os.TempDir(), "dir0") + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + jsFileName := "red.js" + jsFile, jsFileStat, err := createAndStat(jsFileName, tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + readmeFileName := "README.txt" + readmeFile, readmeFileStat, err := createAndStat(readmeFileName, tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + viewsFolderName := "views" + viewsFolderPath := filepath.Join(tempDirectoryName, viewsFolderName) + err = fs.MkdirAll(viewsFolderPath, 0755) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + err = createGitFolderAndFiles(tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + htmlRelFilePath := filepath.Join(viewsFolderName, "view.html") + htmlFile, htmlFileStat, err := createAndStat(filepath.Join("views", "view.html"), tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + viewsFolderStat, err := fs.Stat(filepath.Join(tempDirectoryName, viewsFolderName)) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + defer os.RemoveAll(tempDirectoryName) + + normalFileMap := map[string]FileData{ + readmeFileName: { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + }, + jsFileName: { + Size: jsFileStat.Size(), + LastModifiedDate: jsFileStat.ModTime(), + }, + viewsFolderName: { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + }, + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + }, + } + + type args struct { + directory string + srcBase string + srcFile string + destBase string + destFile string + ignoreRules []string + remoteDirectories map[string]string + existingFileIndex FileIndex + } + tests := []struct { + name string + args args + want IndexerRet + emptyDir bool + wantErr bool + }{ + { + name: "case 1: existing index is empty", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{}, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name(), jsFile.Name(), viewsFolderPath, htmlFile.Name()}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 2: existing index exists and no file or folder changes occurs", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: normalFileMap, + }, + }, + want: IndexerRet{ + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 3: file size changed", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + readmeFileStat.Name(): { + Size: readmeFileStat.Size() + 100, + LastModifiedDate: readmeFileStat.ModTime(), + }, + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name()}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 4: folder size changed", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + readmeFileStat.Name(): normalFileMap[readmeFileStat.Name()], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size() + 100, + LastModifiedDate: viewsFolderStat.ModTime(), + }, + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{viewsFolderPath}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 5: file modified", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime().Add(100), + }, + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name()}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 6: folder modified", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + readmeFileStat.Name(): normalFileMap[readmeFileStat.Name()], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime().Add(100), + }, + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{viewsFolderPath}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 7: both file and folder modified", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + readmeFileStat.Name(): { + Size: readmeFileStat.Size() + 100, + LastModifiedDate: readmeFileStat.ModTime(), + }, + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime().Add(100), + }, + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name(), viewsFolderPath}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + + { + name: "case 8: ignore file with changes if remote exists", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{ + htmlRelFilePath: "new/Folder/view.html", + }, + existingFileIndex: FileIndex{ + Files: normalFileMap, + }, + }, + want: IndexerRet{ + NewFileMap: map[string]FileData{ + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + RemoteAttribute: "README.txt", + }, + jsFileStat.Name(): { + Size: jsFileStat.Size(), + LastModifiedDate: jsFileStat.ModTime(), + RemoteAttribute: "red.js", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "views", + }, + }, + }, + wantErr: false, + }, + { + name: "case 9: remote removed for a file containing different remote destination", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder/view.html", + }, + readmeFileStat.Name(): normalFileMap["README.txt"], + jsFileStat.Name(): normalFileMap["red.js"], + viewsFolderStat.Name(): normalFileMap["views"], + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{htmlFile.Name()}, + RemoteDeleted: []string{"new", "new/Folder", "new/Folder/view.html"}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 10: remote removed for a folder containing different remote destination", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder/view.html", + }, + readmeFileStat.Name(): normalFileMap["README.txt"], + jsFileStat.Name(): normalFileMap["red.js"], + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder/views", + }, + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{viewsFolderPath, htmlFile.Name()}, + RemoteDeleted: []string{"new", "new/Folder", "new/Folder/view.html", "new/Folder/views"}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 11: folder remote changed to local path", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + srcFile: viewsFolderName, + destFile: viewsFolderName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{ + viewsFolderStat.Name(): viewsFolderStat.Name(), + }, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder/views", + }, + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder/views/view.html", + }, + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{viewsFolderPath, htmlFile.Name()}, + RemoteDeleted: []string{"new", "new/Folder", "new/Folder/views", "new/Folder/views/view.html"}, + NewFileMap: map[string]FileData{ + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: filepath.ToSlash(viewsFolderStat.Name()), + }, htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: filepath.ToSlash(htmlRelFilePath), + }}, + }, + wantErr: false, + }, + + { + name: "case 12: only a single file is checked and others are ignored", + args: args{ + directory: tempDirectoryName, + srcBase: filepath.Join(tempDirectoryName, "views"), + srcFile: "view.html", + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size() + 100, + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "", + }, + readmeFileStat.Name(): { + Size: readmeFileStat.Size() + 100, + LastModifiedDate: readmeFileStat.ModTime(), + }, + jsFileStat.Name(): normalFileMap["red.js"], + viewsFolderStat.Name(): normalFileMap["views"], + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{htmlFile.Name()}, + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + }, + }, + }, + wantErr: false, + }, + { + name: "case 13: only a single file with a different remote is checked", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + srcFile: "README.txt", + ignoreRules: []string{}, + remoteDirectories: map[string]string{ + readmeFileStat.Name(): "new/Folder/text/README.txt", + }, + existingFileIndex: FileIndex{ + Files: normalFileMap, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name()}, + RemoteDeleted: []string{filepath.ToSlash(readmeFileStat.Name())}, + NewFileMap: map[string]FileData{ + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + RemoteAttribute: "new/Folder/text/README.txt", + }}, + }, + wantErr: false, + }, + { + name: "case 14: only a single file is checked with a remote removed", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + srcFile: "README.txt", + destBase: tempDirectoryName, + destFile: "README.txt", + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap["views/view.html"], + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + RemoteAttribute: "new/Folder/text/README.txt", + }, + jsFileStat.Name(): normalFileMap["red.js"], + viewsFolderStat.Name(): normalFileMap["views"], + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name()}, + RemoteDeleted: []string{"new", "new/Folder", "new/Folder/text", "new/Folder/text/README.txt"}, + NewFileMap: map[string]FileData{ + readmeFileStat.Name(): normalFileMap["README.txt"], + }, + }, + wantErr: false, + }, + { + name: "case 15: only a single file is checked with the same remote path earlier", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + srcFile: "README.txt", + destBase: tempDirectoryName, + destFile: "README.txt", + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap["views/view.html"], + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + RemoteAttribute: "README.txt", + }, + jsFileStat.Name(): normalFileMap["red.js"], + viewsFolderStat.Name(): normalFileMap["views"], + }, + }, + }, + want: IndexerRet{ + NewFileMap: map[string]FileData{ + readmeFileStat.Name(): normalFileMap["README.txt"], + }, + }, + wantErr: false, + }, + { + name: "case 16: only a single file is checked and there is no modification", + args: args{ + directory: tempDirectoryName, + srcBase: viewsFolderPath, + srcFile: "view.html", + ignoreRules: []string{}, + remoteDirectories: map[string]string{ + htmlRelFilePath: "new/views/view.html", + }, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/views/view.html", + }, + }, + }, + }, + want: IndexerRet{ + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/views/view.html", + }, + }, + }, + wantErr: false, + }, + { + name: "case 17: file remote changed to local path", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + srcFile: "README.txt", + ignoreRules: []string{}, + remoteDirectories: map[string]string{ + readmeFileStat.Name(): "README.txt", + }, + existingFileIndex: FileIndex{ + Files: map[string]FileData{ + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + RemoteAttribute: "new/Folder/README.txt", + }, + }, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name()}, + RemoteDeleted: []string{"new", "new/Folder", "new/Folder/README.txt"}, + NewFileMap: map[string]FileData{ + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + RemoteAttribute: readmeFileStat.Name(), + }}, + }, + wantErr: false, + }, + + { + name: "case 18: file doesn't exist", + args: args{ + directory: tempDirectoryName, + srcBase: viewsFolderPath, + srcFile: "views.html", + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{}, + }, + want: IndexerRet{ + NewFileMap: map[string]FileData{}, + }, + wantErr: false, + }, + { + name: "case 19: folder doesn't exist", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName + "blah", + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{}, + }, + }, + want: IndexerRet{ + NewFileMap: map[string]FileData{}, + }, + wantErr: false, + }, + + { + name: "case 20: ignore given file", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{"*.html"}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{}, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name(), jsFile.Name(), viewsFolderPath}, + NewFileMap: map[string]FileData{ + jsFileStat.Name(): normalFileMap["red.js"], + viewsFolderStat.Name(): normalFileMap["views"], + readmeFileStat.Name(): normalFileMap["README.txt"], + }, + }, + wantErr: false, + }, + { + name: "case 21: ignore given folder", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{viewsFolderPath}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{}, + }, + }, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name(), jsFile.Name()}, + NewFileMap: map[string]FileData{ + jsFileStat.Name(): normalFileMap["red.js"], + readmeFileStat.Name(): normalFileMap["README.txt"], + }, + }, + wantErr: false, + }, + + { + name: "case 22: only empty Dir with different remote location is checked", + args: args{ + directory: tempDirectoryName, + srcBase: filepath.Join(tempDirectoryName, "emptyDir"), + srcFile: "", + destBase: filepath.Join(tempDirectoryName, "emptyDir"), + destFile: "", + ignoreRules: []string{}, + remoteDirectories: map[string]string{ + "emptyDir": "new/Folder/", + }, + existingFileIndex: FileIndex{ + Files: normalFileMap, + }, + }, + emptyDir: true, + want: IndexerRet{ + FilesChanged: []string{filepath.Join(tempDirectoryName, "emptyDir")}, + NewFileMap: map[string]FileData{}, + }, + wantErr: false, + }, + { + name: "case 23: folder containing a empty directory", + args: args{ + directory: tempDirectoryName, + srcBase: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: FileIndex{ + Files: map[string]FileData{}, + }, + }, + emptyDir: true, + want: IndexerRet{ + FilesChanged: []string{readmeFile.Name(), filepath.Join(tempDirectoryName, "emptyDir"), jsFile.Name(), viewsFolderPath, htmlFile.Name()}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.emptyDir { + emptyDirPath := filepath.Join(tempDirectoryName, "emptyDir") + err = fs.MkdirAll(emptyDirPath, 0755) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + defer func(name string) { + err := os.Remove(name) + if err != nil { + t.Errorf("enexpected error: %v", err) + } + }(emptyDirPath) + + emptyDirStat, err := fs.Stat(emptyDirPath) + if err != nil { + t.Errorf("enexpected error: %v", err) + } + + tt.want.NewFileMap[emptyDirStat.Name()] = FileData{ + Size: emptyDirStat.Size(), + LastModifiedDate: emptyDirStat.ModTime(), + RemoteAttribute: tt.args.remoteDirectories[emptyDirStat.Name()], + } + } + got, err := recursiveChecker(tt.args.directory, tt.args.srcBase, tt.args.srcFile, tt.args.destBase, tt.args.destFile, tt.args.ignoreRules, tt.args.remoteDirectories, tt.args.existingFileIndex, fs) + if (err != nil) != tt.wantErr { + t.Errorf("recursiveChecker() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if err != nil && tt.wantErr { + return + } + + sort.Strings(got.FilesDeleted) + sort.Strings(got.FilesChanged) + sort.Strings(got.RemoteDeleted) + if !reflect.DeepEqual(got.FilesChanged, tt.want.FilesChanged) { + t.Errorf("recursiveChecker() FilesChanged got = %v, want %v", got.FilesChanged, tt.want.FilesChanged) + } + + if !reflect.DeepEqual(got.FilesDeleted, tt.want.FilesDeleted) { + t.Errorf("recursiveChecker() FilesDeleted got = %v, want %v", got.FilesDeleted, tt.want.FilesDeleted) + } + + if !reflect.DeepEqual(got.RemoteDeleted, tt.want.RemoteDeleted) { + t.Errorf("recursiveChecker() RemoteDeleted got = %v, want %v", got.RemoteDeleted, tt.want.RemoteDeleted) + } + + if !reflect.DeepEqual(tt.want.NewFileMap, got.NewFileMap) { + t.Errorf("recursiveChecker() new file map is different, difference = %v", pretty.Compare(got.NewFileMap, tt.want.NewFileMap)) + } + }) + } +} + +func Test_runIndexerWithExistingFileIndex(t *testing.T) { + fs := filesystem.DefaultFs{} + + tempDirectoryName, err := fs.TempDir(os.TempDir(), "dir0") + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + jsFileName := "red.js" + jsFile, jsFileStat, err := createAndStat(jsFileName, tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + readmeFileName := "README.txt" + readmeFile, readmeFileStat, err := createAndStat(readmeFileName, tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + viewsFolderName := "views" + viewsFolderPath := filepath.Join(tempDirectoryName, viewsFolderName) + err = fs.MkdirAll(viewsFolderPath, 0755) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + err = createGitFolderAndFiles(tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + htmlRelFilePath := filepath.Join(viewsFolderName, "view.html") + htmlFile, htmlFileStat, err := createAndStat(filepath.Join("views", "view.html"), tempDirectoryName, fs) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + viewsFolderStat, err := fs.Stat(filepath.Join(tempDirectoryName, viewsFolderName)) + if err != nil { + t.Errorf("unexpected error: %v", err) + } + + defer os.RemoveAll(tempDirectoryName) + + normalFileMap := map[string]FileData{ + readmeFileName: { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + }, + jsFileName: { + Size: jsFileStat.Size(), + LastModifiedDate: jsFileStat.ModTime(), + }, + viewsFolderName: { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + }, + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + }, + } + + type args struct { + directory string + ignoreRules []string + remoteDirectories map[string]string + existingFileIndex *FileIndex + } + tests := []struct { + name string + args args + wantRet IndexerRet + wantErr bool + }{ + { + name: "case 1: normal directory with no existing file index data", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{}, + }, + wantRet: IndexerRet{ + FilesChanged: []string{readmeFile.Name(), jsFile.Name(), viewsFolderPath, htmlFile.Name()}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 2: normal directory with existing file index data", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{ + Files: normalFileMap, + }, + }, + wantRet: IndexerRet{ + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 3: normal directory with existing file index data and new files are added", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + }, + }, + }, + wantRet: IndexerRet{ + FilesChanged: []string{readmeFile.Name(), jsFile.Name(), viewsFolderPath}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 4: normal directory with existing file index data and files are deleted", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + readmeFileStat.Name(): normalFileMap[readmeFileStat.Name()], + "blah": {}, + }, + }, + }, + wantRet: IndexerRet{ + FilesDeleted: []string{"blah"}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + + { + name: "case 5: with remote directories and no existing file index", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{viewsFolderStat.Name(): "new/Folder", htmlRelFilePath: "new/Folder0/view.html"}, + existingFileIndex: &FileIndex{}, + }, + wantRet: IndexerRet{ + FilesChanged: []string{viewsFolderPath, htmlFile.Name()}, + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + }, + }, + wantErr: false, + }, + { + name: "case 6: with remote directories and no modification", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{htmlRelFilePath: "new/Folder0/view.html", viewsFolderStat.Name(): "new/Folder"}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + }, + }, + }, + wantRet: IndexerRet{ + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + }, + }, + wantErr: false, + }, + { + name: "case 7: with remote directories and files deleted", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{"blah": "new/Blah", htmlRelFilePath: "new/Folder0/view.html", viewsFolderStat.Name(): "new/Folder"}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + "blah": {}, + }, + }, + }, + wantRet: IndexerRet{ + FilesDeleted: []string{"blah"}, + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + }, + }, + wantErr: false, + }, + { + name: "case 8: remote changed", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{htmlRelFilePath: "new/Folder0/view.html", viewsFolderStat.Name(): "new/blah/Folder"}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + }, + }, + }, + wantRet: IndexerRet{ + FilesChanged: []string{viewsFolderPath}, + RemoteDeleted: []string{"new/Folder"}, + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/blah/Folder", + }, + }, + }, + wantErr: false, + }, + { + name: "case 9: remote of a file removed", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{htmlRelFilePath: "new/Folder0/view.html"}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + }, + }, + }, + wantRet: IndexerRet{ + RemoteDeleted: []string{"new/Folder"}, + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + }, + }, + wantErr: false, + }, + { + name: "case 10: all remotes removed", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + readmeFileStat.Name(): { + Size: readmeFileStat.Size(), + LastModifiedDate: readmeFileStat.ModTime(), + RemoteAttribute: readmeFileStat.Name(), + }, + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: "new/Folder0/view.html", + }, + viewsFolderStat.Name(): { + Size: viewsFolderStat.Size(), + LastModifiedDate: viewsFolderStat.ModTime(), + RemoteAttribute: "new/Folder", + }, + }, + }, + }, + wantRet: IndexerRet{ + FilesChanged: []string{jsFile.Name(), viewsFolderPath, htmlFile.Name()}, + RemoteDeleted: []string{"new", "new/Folder", "new/Folder0", "new/Folder0/view.html"}, + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 11: remote added for a file but local path and remote destination are same", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{}, + remoteDirectories: map[string]string{htmlRelFilePath: htmlRelFilePath}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + }, + }, + }, + }, + wantRet: IndexerRet{ + NewFileMap: map[string]FileData{ + htmlRelFilePath: { + Size: htmlFileStat.Size(), + LastModifiedDate: htmlFileStat.ModTime(), + RemoteAttribute: filepath.ToSlash(htmlRelFilePath), + }, + }, + }, + wantErr: false, + }, + + { + name: "case 12: ignore a modified file due to ignore rules", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{filepath.Join(tempDirectoryName, readmeFileStat.Name())}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + }, + }, + }, + wantRet: IndexerRet{ + NewFileMap: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + }, + }, + wantErr: false, + }, + { + name: "case 13: ignore a deleted file due to ignore rules", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{filepath.Join(tempDirectoryName, "blah")}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + readmeFileStat.Name(): normalFileMap[readmeFileStat.Name()], + htmlRelFilePath: normalFileMap[htmlRelFilePath], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + "blah": {}, + }, + }, + }, + wantRet: IndexerRet{ + NewFileMap: normalFileMap, + }, + wantErr: false, + }, + { + name: "case 14: ignore a added file due to ignore rules", + args: args{ + directory: tempDirectoryName, + ignoreRules: []string{filepath.Join(tempDirectoryName, readmeFileStat.Name())}, + remoteDirectories: map[string]string{}, + existingFileIndex: &FileIndex{ + Files: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + }, + }, + }, + wantRet: IndexerRet{ + NewFileMap: map[string]FileData{ + htmlRelFilePath: normalFileMap[htmlRelFilePath], + viewsFolderStat.Name(): normalFileMap[viewsFolderStat.Name()], + jsFileStat.Name(): normalFileMap[jsFileStat.Name()], + }, + }, + wantErr: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + gotRet, err := runIndexerWithExistingFileIndex(tt.args.directory, tt.args.ignoreRules, tt.args.remoteDirectories, tt.args.existingFileIndex) + if (err != nil) != tt.wantErr { + t.Errorf("runIndexerWithExistingFileIndex() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if err != nil && tt.wantErr { + return + } + + sort.Strings(gotRet.FilesDeleted) + sort.Strings(gotRet.FilesChanged) + sort.Strings(gotRet.RemoteDeleted) + if !reflect.DeepEqual(gotRet.FilesChanged, tt.wantRet.FilesChanged) { + t.Errorf("runIndexerWithExistingFileIndex() fileChanged gotRet = %v, want %v", gotRet.FilesChanged, tt.wantRet.FilesChanged) + } + + if !reflect.DeepEqual(gotRet.NewFileMap, tt.wantRet.NewFileMap) { + t.Errorf("runIndexerWithExistingFileIndex() new file map is different = %v", pretty.Compare(gotRet.NewFileMap, tt.wantRet.NewFileMap)) + } + + if !reflect.DeepEqual(gotRet.FilesDeleted, tt.wantRet.FilesDeleted) { + t.Errorf("runIndexerWithExistingFileIndex() files deleted gotRet = %v, want %v", gotRet.FilesDeleted, tt.wantRet.FilesDeleted) + } + + if !reflect.DeepEqual(gotRet.RemoteDeleted, tt.wantRet.RemoteDeleted) { + t.Errorf("runIndexerWithExistingFileIndex() files remote changed gotRet = %v, want %v", gotRet.RemoteDeleted, tt.wantRet.RemoteDeleted) + } + }) + } +} diff --git a/tests/examples/source/devfiles/nodejs/devfile-with-remote-attributes.yaml b/tests/examples/source/devfiles/nodejs/devfile-with-remote-attributes.yaml new file mode 100644 index 00000000000..80437b385db --- /dev/null +++ b/tests/examples/source/devfiles/nodejs/devfile-with-remote-attributes.yaml @@ -0,0 +1,52 @@ +schemaVersion: 2.0.0 +metadata: + name: nodejs +starterProjects: + - name: nodejs-starter + git: + remotes: + origin: "https://github.com/odo-devfiles/nodejs-ex.git" +components: + - name: runtime + container: + image: registry.access.redhat.com/ubi8/nodejs-12:1-36 + memoryLimit: 1024Mi + endpoints: + - name: "3000-tcp" + targetPort: 3000 + mountSources: true +commands: + - id: devbuild + exec: + component: runtime + commandLine: npm install + workingDir: ${PROJECTS_ROOT} + group: + kind: build + isDefault: true + - id: build + exec: + component: runtime + commandLine: npm install + workingDir: ${PROJECTS_ROOT} + group: + kind: build + - id: devrun + attributes: + "dev.odo.push.path:server.js": "server/server.js" + "dev.odo.push.path:test": "server/test" + "dev.odo.push.path:package.json": "package.json" + exec: + component: runtime + commandLine: npm start + workingDir: ${PROJECTS_ROOT} + group: + kind: run + isDefault: true + - id: run + exec: + component: runtime + commandLine: npm start + workingDir: ${PROJECTS_ROOT} + group: + kind: run diff --git a/tests/integration/devfile/cmd_devfile_push_test.go b/tests/integration/devfile/cmd_devfile_push_test.go index 9fba407a899..5573a6bc5c9 100644 --- a/tests/integration/devfile/cmd_devfile_push_test.go +++ b/tests/integration/devfile/cmd_devfile_push_test.go @@ -1145,4 +1145,32 @@ var _ = Describe("odo devfile push command tests", func() { helper.DontMatchAllInOutput(stdout, []string{"odo may not work as expected in the default project"}) }) }) + + Context("Testing Push with remote attributes", func() { + It("should push only the mentioned files at the appropriate remote destination", func() { + helper.CopyExample(filepath.Join("source", "nodejs"), commonVar.Context) + helper.CmdShouldPass("odo", "create", "nodejs", cmpName, "--context", commonVar.Context, "--project", commonVar.Project) + + helper.CopyExample(filepath.Join("source", "devfiles", "nodejs", "project"), commonVar.Context) + helper.CopyExampleDevFile(filepath.Join("source", "devfiles", "nodejs", "devfile-with-remote-attributes.yaml"), filepath.Join(commonVar.Context, "devfile.yaml")) + + // create a folder and file which shouldn't be pushed + helper.MakeDir(filepath.Join(commonVar.Context, "views")) + _, _ = helper.CreateSimpleFile(filepath.Join(commonVar.Context, "views"), "view", ".html") + + helper.ReplaceString("package.json", "node server.js", "node server/server.js") + helper.CmdShouldPass("odo", "push", "--context", commonVar.Context) + + podName := commonVar.CliRunner.GetRunningPodNameByComponent(cmpName, commonVar.Project) + stdOut := commonVar.CliRunner.ExecListDir(podName, commonVar.Project, sourcePath) + helper.MatchAllInOutput(stdOut, []string{"package.json", "server"}) + helper.DontMatchAllInOutput(stdOut, []string{"test", "views", "devfile.yaml"}) + + stdOut = commonVar.CliRunner.ExecListDir(podName, commonVar.Project, sourcePath+"/server") + helper.MatchAllInOutput(stdOut, []string{"server.js", "test"}) + + stdOut = helper.CmdShouldPass("odo", "push", "--context", commonVar.Context) + Expect(stdOut).To(ContainSubstring("No file changes detected")) + }) + }) })