diff --git a/.gitignore b/.gitignore index 8ed47e9..34595ea 100644 --- a/.gitignore +++ b/.gitignore @@ -22,8 +22,7 @@ dist/ .task/ src/i18n/out/en-US/active.en-GB.json -test/data/storage/scientist/ -test/data/research/ +test/data/research/scientist/ .DS_Store thumbs.db diff --git a/.vscode/settings.json b/.vscode/settings.json index bce455e..a4eeef9 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -4,10 +4,13 @@ "--fast" ], "cSpell.words": [ + "beezledub", "bodyclose", "chardata", "clif", "cmds", + "cmock", + "cmocks", "cobrass", "cubiest", "deadcode", @@ -19,6 +22,7 @@ "errcheck", "exportloopref", "extendio", + "faydeaudeau", "fieldalignment", "GOARCH", "goconst", diff --git a/src/app/command/magick-cmd_test.go b/src/app/command/magick-cmd_test.go index deead39..d295144 100644 --- a/src/app/command/magick-cmd_test.go +++ b/src/app/command/magick-cmd_test.go @@ -6,11 +6,9 @@ import ( . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" - ci18n "github.com/snivilised/cobrass/src/assistant/i18n" xi18n "github.com/snivilised/extendio/i18n" "github.com/snivilised/extendio/xfs/storage" "github.com/snivilised/pixa/src/app/command" - "github.com/snivilised/pixa/src/i18n" "github.com/snivilised/pixa/src/internal/helpers" "github.com/snivilised/pixa/src/internal/matchers" ) @@ -31,22 +29,8 @@ var _ = Describe("MagickCmd", Ordered, func() { BeforeEach(func() { xi18n.ResetTx() - err := xi18n.Use(func(uo *xi18n.UseOptions) { - uo.From = xi18n.LoadFrom{ - Path: l10nPath, - Sources: xi18n.TranslationFiles{ - i18n.PixaSourceID: xi18n.TranslationSource{ - Name: "pixa", - }, - ci18n.CobrassSourceID: xi18n.TranslationSource{ - Name: "cobrass", - }, - }, - } - }) - - if err != nil { + if err := helpers.UseI18n(l10nPath); err != nil { Fail(err.Error()) } }) diff --git a/src/app/command/root-cmd.go b/src/app/command/root-cmd.go index 9d3063b..5ad84f5 100644 --- a/src/app/command/root-cmd.go +++ b/src/app/command/root-cmd.go @@ -124,7 +124,7 @@ func (b *Bootstrap) buildRootCommand(container *assistant.CobraContainer) { profileFam := assistant.NewParamSet[store.ProfileParameterSet](rootCommand) profileFam.Native.BindAll(profileFam, rootCommand.PersistentFlags()) - rootCommand.Args = validatePositionalArgs + // ??? rootCommand.Args = validatePositionalArgs container.MustRegisterParamSet(RootPsName, paramSet) container.MustRegisterParamSet(PreviewFamName, previewFam) diff --git a/src/app/command/root-cmd_test.go b/src/app/command/root-cmd_test.go index 90bffab..ffae40d 100644 --- a/src/app/command/root-cmd_test.go +++ b/src/app/command/root-cmd_test.go @@ -36,6 +36,10 @@ var _ = Describe("RootCmd", Ordered, func() { configPath = filepath.Join(repo, "test", "data", "configuration") Expect(matchers.AsDirectory(configPath)).To(matchers.ExistInFS(nfs)) + + if err := helpers.UseI18n(l10nPath); err != nil { + Fail(err.Error()) + } }) BeforeEach(func() { @@ -43,7 +47,6 @@ var _ = Describe("RootCmd", Ordered, func() { Vfs: nfs, } tester = helpers.CommandTester{ - Args: []string{"./"}, Root: bootstrap.Root(func(co *command.ConfigureOptionsInfo) { co.Detector = &DetectorStub{} co.Program = &ExecutorStub{ @@ -65,14 +68,14 @@ var _ = Describe("RootCmd", Ordered, func() { return fmt.Sprintf("๐Ÿงช given: '%v', should: execute", entry.given) }, - Entry( + XEntry( nil, &rootTE{ given: "just a positional", commandLine: []string{"./"}, }, ), - Entry( + XEntry( nil, &rootTE{ given: "a family defined switch (--dry-run)", commandLine: []string{"./", "--dry-run"}, diff --git a/src/app/command/shrink-cmd.go b/src/app/command/shrink-cmd.go index 2e0d2ac..b754b27 100644 --- a/src/app/command/shrink-cmd.go +++ b/src/app/command/shrink-cmd.go @@ -107,6 +107,9 @@ func (b *Bootstrap) buildShrinkCommand(container *assistant.CobraContainer) *cob shrinkPS.Native.ThirdPartySet.KnownBy, ) + // changed is incorrect; it only contains the third party args, + // all the native args are being omitted + shrinkPS.Native.ThirdPartySet.LongChangedCL = changed fmt.Printf("%v %v Running shrink, with options: '%v', args: '%v'\n", @@ -351,7 +354,7 @@ func (b *Bootstrap) buildShrinkCommand(container *assistant.CobraContainer) *cob // the bootstrap, then access it from the func, instead of using // validatePositionalArgs // - shrinkCommand.Args = validatePositionalArgs + // shrinkCommand.Args = validatePositionalArgs return shrinkCommand } diff --git a/src/app/command/shrink-cmd_test.go b/src/app/command/shrink-cmd_test.go index a2cd2b2..2c7bc97 100644 --- a/src/app/command/shrink-cmd_test.go +++ b/src/app/command/shrink-cmd_test.go @@ -7,13 +7,10 @@ import ( . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" - ci18n "github.com/snivilised/cobrass/src/assistant/i18n" "github.com/snivilised/pixa/src/app/command" - "github.com/snivilised/pixa/src/i18n" "github.com/snivilised/pixa/src/internal/helpers" "github.com/snivilised/pixa/src/internal/matchers" - xi18n "github.com/snivilised/extendio/i18n" "github.com/snivilised/extendio/xfs/storage" ) @@ -77,22 +74,7 @@ var _ = Describe("ShrinkCmd", Ordered, func() { }) BeforeEach(func() { - err := xi18n.Use(func(uo *xi18n.UseOptions) { - uo.From = xi18n.LoadFrom{ - Path: l10nPath, - Sources: xi18n.TranslationFiles{ - i18n.PixaSourceID: xi18n.TranslationSource{ - Name: "dummy-cobrass", - }, - - ci18n.CobrassSourceID: xi18n.TranslationSource{ - Name: "dummy-cobrass", - }, - }, - } - }) - - if err != nil { + if err := helpers.UseI18n(l10nPath); err != nil { Fail(err.Error()) } }) diff --git a/src/app/proxy/enter-shrink.go b/src/app/proxy/enter-shrink.go index 9aada2d..2700c02 100644 --- a/src/app/proxy/enter-shrink.go +++ b/src/app/proxy/enter-shrink.go @@ -102,9 +102,11 @@ func (e *ShrinkEntry) PrincipalOptionsFn(o *nav.TraverseOptions) { func (e *ShrinkEntry) createFinder() *PathFinder { finder := &PathFinder{ + Scheme: e.Inputs.Root.ProfileFam.Native.Scheme, + Profile: e.Inputs.Root.ProfileFam.Native.Profile, behaviours: strategies{ - output: inlineOutputStrategy{}, - deletion: inlineDeletionStrategy{}, + output: &inlineOutputStrategy{}, + deletion: &inlineDeletionStrategy{}, }, } diff --git a/src/app/proxy/entry-base.go b/src/app/proxy/entry-base.go index 3545520..30979fe 100644 --- a/src/app/proxy/entry-base.go +++ b/src/app/proxy/entry-base.go @@ -3,6 +3,8 @@ package proxy import ( "context" "fmt" + "io/fs" + "os" "time" "github.com/samber/lo" @@ -46,6 +48,21 @@ type EntryBase struct { func (e *EntryBase) ConfigureOptions(o *nav.TraverseOptions) { e.Options = o + o.Hooks.QueryStatus = func(path string) (os.FileInfo, error) { + fi, err := e.Vfs.Lstat(path) + + return fi, err + } + o.Hooks.ReadDirectory = func(dirname string) ([]fs.DirEntry, error) { + contents, err := e.Vfs.ReadDir(dirname) + if err != nil { + return nil, err + } + + return lo.Filter(contents, func(item fs.DirEntry, index int) bool { + return item.Name() != ".DS_Store" + }), nil + } // TODO: to apply the folder filters in combination with these // file filters, we need to define a custom compound diff --git a/src/app/proxy/execution-step.go b/src/app/proxy/execution-step.go index 4d4be90..2dfecdb 100644 --- a/src/app/proxy/execution-step.go +++ b/src/app/proxy/execution-step.go @@ -6,7 +6,7 @@ import ( // Step type Step interface { - Run() error + Run(*SharedRunnerInfo) error } // Sequence @@ -19,16 +19,16 @@ type Sequence []Step type magickStep struct { shared *SharedRunnerInfo thirdPartyCL clif.ThirdPartyCommandLine + scheme string + profile string sourcePath string outputPath string journalPath string } // Run -func (s *magickStep) Run() error { +func (s *magickStep) Run(*SharedRunnerInfo) error { positional := []string{s.sourcePath} - err := s.shared.program.Execute(clif.Expand(positional, s.thirdPartyCL)...) - - return err + return s.shared.program.Execute(clif.Expand(positional, s.thirdPartyCL, s.outputPath)...) } diff --git a/src/app/proxy/file-manager.go b/src/app/proxy/file-manager.go index 3286d47..521e078 100644 --- a/src/app/proxy/file-manager.go +++ b/src/app/proxy/file-manager.go @@ -1,28 +1,59 @@ package proxy import ( + "fmt" + "os" + "path/filepath" + "github.com/pkg/errors" + "github.com/snivilised/extendio/xfs/nav" "github.com/snivilised/extendio/xfs/storage" ) +const ( + beezledub = os.FileMode(0o666) +) + +// FileManager knows how to translate requests into invocations on the file +// system and nothing else. type FileManager struct { vfs storage.VirtualFS finder *PathFinder } -func (fm *FileManager) setup(source string) error { - // prepare: move existing file out of the way - +// Setup prepares for operation by moving existing file out of the way, +// if applicable. +func (fm *FileManager) Setup(item *nav.TraverseItem) error { // https://pkg.go.dev/os#Rename LinkError may result // // this might not be right. it may be that we want to leave the // original alone and create other outputs; in this scenario // we don't want to rename/move the source... // - destination := fm.finder.Destination(source) + from := &destinationInfo{ + item: item, + origin: item.Parent.Path, + transparent: true, // might come from a flag + } + + if folder, file := fm.finder.Destination(from); folder != "" { + if err := fm.vfs.MkdirAll(folder, beezledub); err != nil { + return errors.Wrapf(err, "could not create parent setup for '%v'", item.Path) + } + + destination := filepath.Join(folder, file) + + if !fm.vfs.FileExists(item.Path) { + return fmt.Errorf("source file: '%v' does not exist", item.Path) + } + + if fm.vfs.FileExists(destination) { + return fmt.Errorf("destination file: '%v' already exists", destination) + } - if err := fm.vfs.Rename(source, destination); err != nil { - return errors.Wrapf(err, "could not complete setup for '%v'", source) + if err := fm.vfs.Rename(item.Path, destination); err != nil { + return errors.Wrapf(err, "could not complete setup for '%v'", item.Path) + } } return nil @@ -40,7 +71,7 @@ func (fm *FileManager) delete(target string) error { return nil } -func (fm *FileManager) tidy() error { +func (fm *FileManager) Tidy() error { // invoke deletions // delete journal file // diff --git a/src/app/proxy/path-finder.go b/src/app/proxy/path-finder.go index 8a1c896..088f7ef 100644 --- a/src/app/proxy/path-finder.go +++ b/src/app/proxy/path-finder.go @@ -2,8 +2,65 @@ package proxy import ( "path/filepath" + "strings" + + "github.com/samber/lo" + "github.com/snivilised/extendio/xfs/nav" +) + +const ( + inlineDestinationTempl = "" +) + +type ( + templateSegments []string + pfTemplatesCollection map[string]templateSegments + pfFieldValues map[string]string ) +var ( + pfTemplates pfTemplatesCollection +) + +func init() { + pfTemplates = pfTemplatesCollection{ + // we probably have to come up with better key names... + // + "setup-inline-dest-folder": templateSegments{ + "${{OUTPUT-ROOT}}", + "${{ITEM-SUB-PATH}}", + "${{TRASH-LABEL}}", + }, + + "setup-inline-dest-file-original-ext": templateSegments{ + "${{ITEM-NAME-ORIG-EXT}}", + }, + } +} + +// expand returns a string as a result of joining the segments +func (tc pfTemplatesCollection) expand(segments ...string) string { + return filepath.Join(segments...) +} + +// evaluate returns a string representing a file system path from a +// template string containing place-holders and field values +func (tc pfTemplatesCollection) evaluate( + sourceTemplate string, + placeHolders templateSegments, + values pfFieldValues, +) string { + const ( + quantity = 1 + ) + + return lo.Reduce(placeHolders, func(acc, field string, _ int) string { + return strings.Replace(acc, field, values[field], quantity) + }, + sourceTemplate, + ) +} + // INLINE-MODE: EJECT | INLINE (should we call this a strategy? // they do the same thing but create a different output structure => OutputStrategy) // @@ -39,7 +96,7 @@ then we revert to the default which is eject(transparent) -- then other flags could adjust the transparent mode if --eject not specified, then ous=inline; des=inline -but if can be adjusted by --output , --trash +but it can be adjusted by --output , --trash -- perhaps we have a transparency mode, ie perform renames such that the new generated @@ -66,9 +123,12 @@ type strategies struct { } type PathFinder struct { - // is this item.Path or item.Path's parent folder? + Scheme string + Profile string + // Origin is the parent of the item (item.Parent) // Origin string + // only the step knows this, so this should be the parent of the output // for scheme, this would include scheme/profile // for profile, this should include profile @@ -78,8 +138,11 @@ type PathFinder struct { // perhaps represented as a slice so it can be joined with filepath.Join // // if Output Path is set, then use this as the output, but also - // create the intermediate paths in order to implement mirroring - // + // create the intermediate paths in order to implement mirroring. + // It is the output as indicated by --output. If not set, then it is + // derived: + // - sampling: (inline) -- item.parent; => item.parent/SHRINK/ + // - full: (inline) -- item.parent Output string // I think this depends on the mode (tidy/preserve) @@ -88,12 +151,100 @@ type PathFinder struct { behaviours strategies } -func (f *PathFinder) Destination(source string) string { - // may be we also return a bool that indicates weather a rename - // should be implemented or not. this depends on the appropriate - // strategy. Or if we dont need to rename, we return an empty string; - // this is the preferred solution. - return filepath.Join(f.Output, source) +type staticInfo struct { + trashLabel string + legacyLabel string +} + +type destinationInfo struct { + item *nav.TraverseItem + origin string // in:item.Parent.Path, ej:eject-path(output???) + // statics *staticInfo + transparent bool + // + // transparent=true should be the default scenario. This means + // that any changes that occur leave the file system in a state + // where nothing appears to have changed except that files have + // been modified, without name changes. This of course doesn't + // include items that end up in TRASH and can be manually deleted + // by the user. The purpose of this is to by default require + // the least amount of post-processing clean-up from the user. + // + // In sampling mode, transparent may mean something different + // because multiple files could be created for each input file. + // So, in this scenario, the original file should stay in-tact + // and the result(s) should be created into the supplementary + // location. + // + // In full mode, transparent means the input file is moved + // to a trash location. The output takes the name of the original + // file, so that by the end of processing, the resultant file + // takes the place of the source file, leaving the file system + // in a state that was the same before processing occurred. + // + // So what happens in non transparent scenario? The source file + // remains unchanged, so the user has to look at another location + // to get the result. It uses the SHRINK label to create the + // output filename; but note, we only use the SHRINK label in + // scenarios where there is a potential for a filename clash if + // the output file is in the same location as the input file + // because we want to create the least amount of friction as + // possible. This only occurs when in adhoc mode (no profile + // or scheme) +} + +// Destination returns the location of what should be used +// for the specified source path; ie when the program runs, it uses +// a source file and requires the destination location. The source +// and destination may not be n the same folder, so the source's name +// is extracted from the source path and attached to the output +// folder. +// +// should return empty string if no move is required +func (f *PathFinder) Destination(info *destinationInfo) (destinationFolder, destinationFile string) { + // TODO: we still need to get the rest of the mirror sub-path + // .///TRASH///<.item.Name>..ext + // legacyLabel := "LEGACY" + trashLabel := "TRASH" + + // this does not take into account transparent, without modification; + // ie what happens if we don;t want any supplemented paths? + + to := lo.TernaryF(f.Output != "", + func() string { + return f.Output // eject + }, + func() string { + return info.origin // inline + }, + ) + + destinationFolder = func() string { + templateFolderSegments := pfTemplates["setup-inline-dest-folder"] + templateFolderPath := pfTemplates.expand(filepath.Join(templateFolderSegments...)) + folder := pfTemplates.evaluate(templateFolderPath, templateFolderSegments, pfFieldValues{ + "${{OUTPUT-ROOT}}": to, + "${{ITEM-SUB-PATH}}": info.item.Extension.SubPath, + "${{TRASH-LABEL}}": trashLabel, + }) + folder = filepath.Clean(folder) + + return folder + }() + + destinationFile = func() string { + templateFileSegments := pfTemplates["setup-inline-dest-file-original-ext"] + templateFilePath := pfTemplates.expand(filepath.Join(templateFileSegments...)) + + file := pfTemplates.evaluate(templateFilePath, templateFileSegments, pfFieldValues{ + "${{ITEM-NAME-ORIG-EXT}}": info.item.Extension.Name, + }) + file = filepath.Clean(file) + + return file + }() + + return destinationFolder, destinationFile } /* diff --git a/src/app/proxy/path-finder_test.go b/src/app/proxy/path-finder_test.go index 756ae9e..1d10c8b 100644 --- a/src/app/proxy/path-finder_test.go +++ b/src/app/proxy/path-finder_test.go @@ -1,13 +1,37 @@ package proxy_test import ( + "path/filepath" + . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" + "github.com/snivilised/extendio/xfs/storage" "github.com/snivilised/pixa/src/app/proxy" + "github.com/snivilised/pixa/src/internal/helpers" + "github.com/snivilised/pixa/src/internal/matchers" ) -var _ = Describe("PathFinder", func() { +var _ = Describe("PathFinder", Ordered, func() { + var ( + repo string + l10nPath string + nfs storage.VirtualFS + ) + + BeforeAll(func() { + nfs = storage.UseNativeFS() + repo = helpers.Repo(filepath.Join("..", "..", "..")) + + l10nPath = helpers.Path(repo, filepath.Join("test", "data", "l10n")) + Expect(matchers.AsDirectory(l10nPath)).To(matchers.ExistInFS(nfs)) + }) + + BeforeEach(func() { + if err := helpers.UseI18n(l10nPath); err != nil { + Fail(err.Error()) + } + }) // the PathFinder should not be aware of profile/sample, it only // know about paths. So it knows about: // - output diff --git a/src/app/proxy/runner-base.go b/src/app/proxy/runner-base.go index 490f7ab..015e094 100644 --- a/src/app/proxy/runner-base.go +++ b/src/app/proxy/runner-base.go @@ -1,7 +1,6 @@ package proxy import ( - "github.com/pkg/errors" "github.com/snivilised/cobrass" "github.com/snivilised/cobrass/src/clif" "github.com/snivilised/extendio/collections" @@ -27,6 +26,7 @@ func (r *baseRunner) profileSequence( shared: r.shared, thirdPartyCL: cl, sourcePath: itemPath, + profile: name, // outputPath: , // journalPath: , } @@ -38,15 +38,17 @@ func (r *baseRunner) schemeSequence( name, itemPath string, ) Sequence { changed := r.shared.Inputs.ParamSet.Native.ThirdPartySet.LongChangedCL - scheme, _ := r.shared.sampler.Scheme(name) // scheme already validated - sequence := make(Sequence, 0, len(scheme.Profiles)) + schemeCfg, _ := r.shared.sampler.Scheme(name) // scheme already validated + sequence := make(Sequence, 0, len(schemeCfg.Profiles)) - for _, currentProfileName := range scheme.Profiles { - cl := r.composeProfileCL(currentProfileName, changed) + for _, current := range schemeCfg.Profiles { + cl := r.composeProfileCL(current, changed) step := &magickStep{ shared: r.shared, thirdPartyCL: cl, sourcePath: itemPath, + scheme: name, + profile: current, // outputPath: , // journalPath: , } @@ -93,28 +95,7 @@ func (r *baseRunner) Run(item *nav.TraverseItem, sequence Sequence) error { iterator := collections.ForwardRunIt[Step, error](sequence, zero) each := func(s Step) error { - // TODO: need to decide a proper policy for cleaning up - // in the presence of an error. Do we allow the journal - // file to remain in place? What happens if there is a timeout? - // There are a few more things to decide about error handling. - // Perhaps we have an error policy including one that implements - // a retry. - if err := r.shared.fileManager.setup(item.Path); err != nil { - return err - } - - err := s.Run() - te := r.shared.fileManager.tidy() - - switch { - case (err != nil) && (te != nil): - return errors.Wrap(err, te.Error()) - - case (err != nil): - return err - } - - return te + return s.Run(r.shared) } while := func(_ Step, err error) bool { if resultErr == nil { @@ -127,9 +108,20 @@ func (r *baseRunner) Run(item *nav.TraverseItem, sequence Sequence) error { return err == nil } + // TODO: need to decide a proper policy for cleaning up + // in the presence of an error. Do we allow the journal + // file to remain in place? What happens if there is a timeout? + // There are a few more things to decide about error handling. + // Perhaps we have an error policy including one that implements + // a retry. + // + if err := r.shared.fileManager.Setup(item); err != nil { + return err + } + iterator.RunAll(each, while) - return resultErr + return r.shared.fileManager.Tidy() } func (r *baseRunner) Reset() { diff --git a/src/app/proxy/runner-sampler_test.go b/src/app/proxy/runner-sampler_test.go index 3f635ed..4f2d41a 100644 --- a/src/app/proxy/runner-sampler_test.go +++ b/src/app/proxy/runner-sampler_test.go @@ -2,18 +2,17 @@ package proxy_test import ( "fmt" + "os" "path/filepath" . "github.com/onsi/ginkgo/v2" . "github.com/onsi/gomega" "github.com/snivilised/cobrass/src/assistant/configuration" - ci18n "github.com/snivilised/cobrass/src/assistant/i18n" + cmocks "github.com/snivilised/cobrass/src/assistant/mocks" "github.com/snivilised/cobrass/src/clif" - xi18n "github.com/snivilised/extendio/i18n" "github.com/snivilised/extendio/xfs/storage" "github.com/snivilised/pixa/src/app/command" "github.com/snivilised/pixa/src/app/proxy" - "github.com/snivilised/pixa/src/i18n" "github.com/snivilised/pixa/src/app/mocks" "github.com/snivilised/pixa/src/internal/helpers" @@ -23,8 +22,11 @@ import ( ) const ( - silent = false - verbose = true + silent = true + verbose = false + faydeaudeau = os.FileMode(0o777) + beezledub = os.FileMode(0o666) + backyardWorldsPlanet9Scan01 = "nasa/exo/Backyard Worlds - Planet 9/sessions/scan-01" ) var ( @@ -33,34 +35,128 @@ var ( _ proxy.ProfilesConfigReader = &proxy.MsProfilesConfigReader{} _ proxy.SamplerConfigReader = &proxy.MsSamplerConfigReader{} - expectedDarkEnergyExplorersScan01First2 []string - expectedDarkEnergyExplorersScan01First4 []string - expectedDarkEnergyExplorersScan01First6 []string + backyardWorldsPlanet9Scan01First2 []string + backyardWorldsPlanet9Scan01First4 []string + backyardWorldsPlanet9Scan01First6 []string + + profilesConfigData proxy.ProfilesConfigMap + samplerConfigData *proxy.MsSamplerConfig ) func init() { - expectedDarkEnergyExplorersScan01First2 = []string{ - "01_Dark-Energy-Explorers_s01.jpg", - "02_Dark-Energy-Explorers_s01.jpg", + backyardWorldsPlanet9Scan01First2 = []string{ + "01_Backyard-Worlds-Planet-9_s01.jpg", + "02_Backyard-Worlds-Planet-9_s01.jpg", } - expectedDarkEnergyExplorersScan01First4 = expectedDarkEnergyExplorersScan01First2 - expectedDarkEnergyExplorersScan01First4 = append( - expectedDarkEnergyExplorersScan01First4, + backyardWorldsPlanet9Scan01First4 = backyardWorldsPlanet9Scan01First2 + backyardWorldsPlanet9Scan01First4 = append( + backyardWorldsPlanet9Scan01First4, []string{ - "03_Dark-Energy-Explorers_s01.jpg", - "04_Dark-Energy-Explorers_s01.jpg", + "03_Backyard-Worlds-Planet-9_s01.jpg", + "04_Backyard-Worlds-Planet-9_s01.jpg", }..., ) - expectedDarkEnergyExplorersScan01First6 = expectedDarkEnergyExplorersScan01First4 - expectedDarkEnergyExplorersScan01First6 = append( - expectedDarkEnergyExplorersScan01First6, + backyardWorldsPlanet9Scan01First6 = backyardWorldsPlanet9Scan01First4 + backyardWorldsPlanet9Scan01First6 = append( + backyardWorldsPlanet9Scan01First6, []string{ - "05_Dark-Energy-Explorers_s01.jpg", - "06_Dark-Energy-Explorers_s01.jpg", + "05_Backyard-Worlds-Planet-9_s01.jpg", + "06_Backyard-Worlds-Planet-9_s01.jpg", }..., ) + + profilesConfigData = proxy.ProfilesConfigMap{ + "blur": clif.ChangedFlagsMap{ + "strip": "true", + "interlace": "plane", + "gaussian-blur": "0.05", + }, + "sf": clif.ChangedFlagsMap{ + "dry-run": "true", + "strip": "true", + "interlace": "plane", + "sampling-factor": "4:2:0", + }, + "adaptive": clif.ChangedFlagsMap{ + "strip": "true", + "interlace": "plane", + "gaussian-blur": "0.25", + "adaptive-resize": "60", + }, + } + + samplerConfigData = &proxy.MsSamplerConfig{ + Files: 2, + Folders: 1, + Schemes: proxy.MsSamplerSchemesConfig{ + "blur-sf": proxy.MsSchemeConfig{ + Profiles: []string{"blur", "sf"}, + }, + "adaptive-sf": proxy.MsSchemeConfig{ + Profiles: []string{"adaptive", "sf"}, + }, + "adaptive-blur": proxy.MsSchemeConfig{ + Profiles: []string{"adaptive", "blur"}, + }, + }, + } +} + +func doMockProfilesConfigsWith( + data proxy.ProfilesConfigMap, + config configuration.ViperConfig, + reader *mocks.MockProfilesConfigReader, +) { + reader.EXPECT().Read(config).DoAndReturn( + func(viper configuration.ViperConfig) (proxy.ProfilesConfig, error) { + stub := &proxy.MsProfilesConfig{ + Profiles: data, + } + + return stub, nil + }, + ).AnyTimes() +} + +func doMockSamplerConfigWith( + data *proxy.MsSamplerConfig, + config configuration.ViperConfig, + reader *mocks.MockSamplerConfigReader, +) { + reader.EXPECT().Read(config).DoAndReturn( + func(viper configuration.ViperConfig) (proxy.SamplerConfig, error) { + stub := data + + return stub, nil + }, + ).AnyTimes() +} + +func doMockConfigs( + config configuration.ViperConfig, + profilesReader *mocks.MockProfilesConfigReader, + samplerReader *mocks.MockSamplerConfigReader, +) { + doMockProfilesConfigsWith(profilesConfigData, config, profilesReader) + doMockSamplerConfigWith(samplerConfigData, config, samplerReader) +} + +func doMockViper(config *cmocks.MockViperConfig) { + config.EXPECT().ReadInConfig().DoAndReturn( + func() error { + return nil + }, + ).AnyTimes() +} + +func resetFS(index string, silent bool) (vfs storage.VirtualFS, root string) { + vfs = storage.UseMemFS() + root = helpers.Scientist(vfs, index, silent) + // Expect(matchers.AsDirectory(root)).To(matchers.ExistInFS(vfs)) + + return vfs, root } type runnerTE struct { @@ -84,28 +180,38 @@ var _ = Describe("SamplerRunner", Ordered, func() { configPath string root string config configuration.ViperConfig - nfs storage.VirtualFS + vfs storage.VirtualFS ctrl *gomock.Controller mockProfilesReader *mocks.MockProfilesConfigReader mockSamplerReader *mocks.MockSamplerConfigReader + mockViperConfig *cmocks.MockViperConfig ) BeforeAll(func() { - nfs = storage.UseNativeFS() repo = helpers.Repo(filepath.Join("..", "..", "..")) - l10nPath = helpers.Path(repo, filepath.Join("test", "data", "l10n")) - Expect(matchers.AsDirectory(l10nPath)).To(matchers.ExistInFS(nfs)) - configPath = filepath.Join(repo, "test", "data", "configuration") - Expect(matchers.AsDirectory(configPath)).To(matchers.ExistInFS(nfs)) - - root = helpers.Scientist(nfs, "nasa-scientist-index.xml", verbose) - Expect(matchers.AsDirectory(root)).To(matchers.ExistInFS(nfs)) }) BeforeEach(func() { viper.Reset() + vfs, root = resetFS("nasa-scientist-index.xml", silent) + + ctrl = gomock.NewController(GinkgoT()) + mockViperConfig = cmocks.NewMockViperConfig(ctrl) + mockProfilesReader = mocks.NewMockProfilesConfigReader(ctrl) + mockSamplerReader = mocks.NewMockSamplerConfigReader(ctrl) + doMockViper(mockViperConfig) + + // create a dummy config file in vfs + // + _ = vfs.MkdirAll(configPath, beezledub) + if _, err := vfs.Create(filepath.Join(configPath, helpers.PixaConfigTestFilename)); err != nil { + Fail(fmt.Sprintf("๐Ÿ”ฅ can't create dummy config (err: '%v')", err)) + } + + Expect(matchers.AsDirectory(configPath)).To(matchers.ExistInFS(vfs)) + config = &configuration.GlobalViperConfig{} config.SetConfigType(helpers.PixaConfigType) @@ -116,28 +222,9 @@ var _ = Describe("SamplerRunner", Ordered, func() { Fail(fmt.Sprintf("๐Ÿ”ฅ can't read config (err: '%v')", err)) } - err := xi18n.Use(func(uo *xi18n.UseOptions) { - uo.From = xi18n.LoadFrom{ - Path: l10nPath, - Sources: xi18n.TranslationFiles{ - i18n.PixaSourceID: xi18n.TranslationSource{ - Name: "dummy-cobrass", - }, - - ci18n.CobrassSourceID: xi18n.TranslationSource{ - Name: "dummy-cobrass", - }, - }, - } - }) - - if err != nil { + if err := helpers.UseI18n(l10nPath); err != nil { Fail(err.Error()) } - - ctrl = gomock.NewController(GinkgoT()) - mockProfilesReader = mocks.NewMockProfilesConfigReader(ctrl) - mockSamplerReader = mocks.NewMockSamplerConfigReader(ctrl) }) AfterEach(func() { @@ -146,53 +233,7 @@ var _ = Describe("SamplerRunner", Ordered, func() { DescribeTable("sampler", func(entry *samplerTE) { - mockProfilesReader.EXPECT().Read(config).DoAndReturn( - func(viper configuration.ViperConfig) (proxy.ProfilesConfig, error) { - config := &proxy.MsProfilesConfig{ - Profiles: proxy.ProfilesConfigMap{ - "blur": clif.ChangedFlagsMap{ - "strip": "true", - "interlace": "plane", - "gaussian-blur": "0.05", - }, - "sf": clif.ChangedFlagsMap{ - "dry-run": "true", - "strip": "true", - "interlace": "plane", - "sampling-factor": "4:2:0", - }, - "adaptive": clif.ChangedFlagsMap{ - "strip": "true", - "interlace": "plane", - "gaussian-blur": "0.25", - "adaptive-resize": "60", - }, - }, - } - - return config, nil - }, - ).AnyTimes() - - mockSamplerReader.EXPECT().Read(config).DoAndReturn( - func(viper configuration.ViperConfig) (proxy.SamplerConfig, error) { - return &proxy.MsSamplerConfig{ - Files: 2, - Folders: 1, - Schemes: proxy.MsSamplerSchemesConfig{ - "blur-sf": proxy.MsSchemeConfig{ - Profiles: []string{"blur", "sf"}, - }, - "adaptive-sf": proxy.MsSchemeConfig{ - Profiles: []string{"adaptive", "sf"}, - }, - "adaptive-blur": proxy.MsSchemeConfig{ - Profiles: []string{"adaptive", "blur"}, - }, - }, - }, nil - }, - ).AnyTimes() + doMockConfigs(config, mockProfilesReader, mockSamplerReader) directory := helpers.Path(root, entry.relative) options := []string{ @@ -200,12 +241,14 @@ var _ = Describe("SamplerRunner", Ordered, func() { "--dry-run", "--mode", "tidy", } + args := options + args = append(args, entry.args...) bootstrap := command.Bootstrap{ - Vfs: nfs, + Vfs: vfs, } tester := helpers.CommandTester{ - Args: append(options, entry.args...), + Args: args, Root: bootstrap.Root(func(co *command.ConfigureOptionsInfo) { co.Detector = &helpers.DetectorStub{} co.Program = &helpers.ExecutorStub{ @@ -223,7 +266,7 @@ var _ = Describe("SamplerRunner", Ordered, func() { _, err := tester.Execute() Expect(err).Error().To(BeNil(), - fmt.Sprintf("should pass validation due to all flag being valid (%v)", err), + fmt.Sprintf("execution result non nil (%v)", err), ) // eventually, we should assert on files created in the virtual @@ -240,16 +283,16 @@ var _ = Describe("SamplerRunner", Ordered, func() { runnerTE: runnerTE{ given: "profile", should: "sample(first) with glob filter using the defined profile", - relative: "nasa/interstellar/Dark Energy Explorers/sessions/scan-01", + relative: backyardWorldsPlanet9Scan01, args: []string{ "--sample", "--no-files", "4", - "--files-gb", "*Energy-Explorers*", + "--files-gb", "*Backyard Worlds*", "--profile", "adaptive", "--gaussian-blur", "0.51", "--interlace", "line", }, - expected: expectedDarkEnergyExplorersScan01First4, + expected: backyardWorldsPlanet9Scan01First4, }, }), @@ -257,7 +300,7 @@ var _ = Describe("SamplerRunner", Ordered, func() { runnerTE: runnerTE{ given: "profile", should: "sample(last) with glob filter using the defined profile", - relative: "nasa/interstellar/Dark Energy Explorers/sessions/scan-01", + relative: backyardWorldsPlanet9Scan01, args: []string{ "--sample", "--last", @@ -265,7 +308,7 @@ var _ = Describe("SamplerRunner", Ordered, func() { "--files-gb", "*Energy-Explorers*", "--profile", "adaptive", }, - expected: expectedDarkEnergyExplorersScan01First4, + expected: backyardWorldsPlanet9Scan01First4, }, }), @@ -273,13 +316,13 @@ var _ = Describe("SamplerRunner", Ordered, func() { runnerTE: runnerTE{ given: "profile without no-files in args", should: "sample(first) with glob filter, using no-files from config", - relative: "nasa/interstellar/Dark Energy Explorers/sessions/scan-01", + relative: backyardWorldsPlanet9Scan01, args: []string{ "--sample", "--files-gb", "*Energy-Explorers*", "--profile", "adaptive", }, - expected: expectedDarkEnergyExplorersScan01First2, + expected: backyardWorldsPlanet9Scan01First2, }, }), @@ -287,7 +330,7 @@ var _ = Describe("SamplerRunner", Ordered, func() { runnerTE: runnerTE{ given: "profile", should: "sample with regex filter using the defined profile", - relative: "nasa/interstellar/Dark Energy Explorers/sessions/scan-01", + relative: backyardWorldsPlanet9Scan01, args: []string{ "--strip", "--interlace", "plane", "--quality", "85", "--profile", "adaptive", }, @@ -305,11 +348,11 @@ var _ = Describe("SamplerRunner", Ordered, func() { runnerTE: runnerTE{ given: "scheme", should: "sample all profiles in the scheme", - relative: "nasa/interstellar/Dark Energy Explorers/sessions/scan-01", + relative: backyardWorldsPlanet9Scan01, args: []string{ "--strip", "--interlace", "plane", "--quality", "85", "--scheme", "blur-sf", }, - expected: expectedDarkEnergyExplorersScan01First6, + expected: backyardWorldsPlanet9Scan01First6, }, }), ) diff --git a/src/app/proxy/strategy-output.go b/src/app/proxy/strategy-output.go index a867343..3bf6374 100644 --- a/src/app/proxy/strategy-output.go +++ b/src/app/proxy/strategy-output.go @@ -1,10 +1,32 @@ package proxy +// the strategies look like they don't do much, so all this +// abstraction feels like overkill. instead the path finder +// could make a one calculation of destination path depending +// on strategy, using s simple func closure, eg we could +// funcs such as inlineDestination and ejectDestination() of +// the form func(source string) string. (rename this file +// strategy-funcs) + type outputStrategy interface { + // Destination fills in the gap between the root and the destination + Destination(source string) string } type inlineOutputStrategy struct { } +func (s *inlineOutputStrategy) Destination(source string) string { + _ = source + // .//TRASH///destination/<.item.Name>..ext + return "" +} + type ejectOutputStrategy struct { } + +func (s *ejectOutputStrategy) Destination(source string) string { + _ = source + // .//TRASH///destination + return "" +} diff --git a/src/internal/helpers/directory-tree-builder.go b/src/internal/helpers/directory-tree-builder.go index 0221f5c..f71490c 100644 --- a/src/internal/helpers/directory-tree-builder.go +++ b/src/internal/helpers/directory-tree-builder.go @@ -18,19 +18,22 @@ const offset = 2 const tabSize = 2 type DirectoryTreeBuilder struct { - vfs storage.VirtualFS - root string - full string - stack *collections.Stack[string] - index string - write bool - depth int - padding string - silent bool + vfs storage.VirtualFS + root string + full string + stack *collections.Stack[string] + indexPath string + write bool + depth int + padding string + silent bool } func (r *DirectoryTreeBuilder) read() (*Directory, error) { - data, err := os.ReadFile(r.index) // always read from real fs + // /Users/plastikfan/dev/github/snivilised/pixa/test/data/research/nasa-scientist-index.xml + fmt.Printf("===> reading index file from: '%v'", r.indexPath) + + data, err := os.ReadFile(r.indexPath) // always read from real fs if err != nil { return nil, err @@ -165,22 +168,29 @@ func Scientist(vfs storage.VirtualFS, index string, silent bool) string { indexPath := filepath.Join(research, index) utils.Must(ensure(scientist, indexPath, vfs, silent)) + // RESEARCH: '/Users/plastikfan/dev/github/snivilised/pixa/Test/data/research' + fmt.Printf("๐Ÿ’ฅ ---> RESEARCH: '%v'\n", research) + + // SCIENTIST: '/Users/plastikfan/dev/github/snivilised/pixa/Test/data/research/scientist' + fmt.Printf("๐Ÿ’ฅ ---> SCIENTIST: '%v'\n", scientist) + // -->created in test/data/scientist + // require test/data/research/scientist return scientist } -func ensure(root, index string, vfs storage.VirtualFS, silent bool) error { +func ensure(root, indexPath string, vfs storage.VirtualFS, silent bool) error { if vfs.DirectoryExists(root) { return nil } parent, _ := utils.SplitParent(root) builder := DirectoryTreeBuilder{ - vfs: vfs, - root: root, - stack: collections.NewStackWith([]string{parent}), - index: index, - write: doWrite, - silent: silent, + vfs: vfs, + root: root, + stack: collections.NewStackWith([]string{parent}), + indexPath: indexPath, + write: doWrite, + silent: silent, } return builder.walk() diff --git a/src/internal/helpers/test-utils.go b/src/internal/helpers/test-utils.go index 067c1ca..06d2e39 100644 --- a/src/internal/helpers/test-utils.go +++ b/src/internal/helpers/test-utils.go @@ -7,6 +7,10 @@ import ( "runtime" "strings" + ci18n "github.com/snivilised/cobrass/src/assistant/i18n" + "github.com/snivilised/pixa/src/i18n" + + xi18n "github.com/snivilised/extendio/i18n" "golang.org/x/text/language" ) @@ -68,6 +72,23 @@ func Log() string { panic("could not get root path") } +func UseI18n(l10nPath string) error { + return xi18n.Use(func(uo *xi18n.UseOptions) { + uo.From = xi18n.LoadFrom{ + Path: l10nPath, + Sources: xi18n.TranslationFiles{ + i18n.PixaSourceID: xi18n.TranslationSource{ + Name: "dummy-cobrass", + }, + + ci18n.CobrassSourceID: xi18n.TranslationSource{ + Name: "dummy-cobrass", + }, + }, + } + }) +} + type DetectorStub struct { } diff --git a/test/data/configuration/pixa-test.yml b/test/data/configuration/pixa-test.yml index f10d03d..ed9487b 100644 --- a/test/data/configuration/pixa-test.yml +++ b/test/data/configuration/pixa-test.yml @@ -29,4 +29,5 @@ advanced: external-program-execution-retry: 0 legacy-file-dot-suffix: LEGACY journal-file-dot-suffix: JOURNAL - trash-folder: TRASH + trash-label: TRASH + shrink-label: SHRINK diff --git a/test/data/research/citizen-scientist-index.xml b/test/data/research/citizen-scientist-index.xml new file mode 100644 index 0000000..b188202 --- /dev/null +++ b/test/data/research/citizen-scientist-index.xml @@ -0,0 +1,655 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 177 + 656 + + \ No newline at end of file diff --git a/test/data/research/nasa-scientist-index.xml b/test/data/research/nasa-scientist-index.xml new file mode 100644 index 0000000..50bd931 --- /dev/null +++ b/test/data/research/nasa-scientist-index.xml @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file