Skip to content

Commit

Permalink
Fix execution from outside CWD
Browse files Browse the repository at this point in the history
Improve IO performance
Add caching for IO operations
Fix dependency caching
Add cache by modifietAt filestat, avoiding checksums
  • Loading branch information
sonalys committed Jun 8, 2024
1 parent 0528302 commit 80461ef
Show file tree
Hide file tree
Showing 15 changed files with 250 additions and 213 deletions.
33 changes: 17 additions & 16 deletions ast.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,28 +9,29 @@ import (
"github.com/sonalys/fake/internal/packages"
)

func (file *ParsedFile) importConflictResolution(importUsedName string, importPath string) string {
info, ok := file.OriginalImports[importUsedName]
// If the original import is found, use either name or alias.
if ok {
pkgInfo := file.ImportsPathMap[importPath]
if pkgInfo.Alias != "" {
file.UsedImports[pkgInfo.Alias] = struct{}{}
return pkgInfo.Alias
}
file.UsedImports[pkgInfo.Name] = struct{}{}
return pkgInfo.Name
func (file *ParsedFile) importConflictResolution() string {
if file.importResolved {
return file.importAlias
}
var alias string = file.PkgName
info, ok := file.Imports[file.PkgName]
// Conflict detected for a package with different path.
if ok && info.Path != file.PkgPath {
alias = fmt.Sprintf("%s1", alias)
}
info = &imports.ImportEntry{
PackageInfo: packages.PackageInfo{
PackageInfo: &packages.PackageInfo{
Path: file.PkgPath,
Name: file.PkgName,
},
Alias: alias,
}
file.Imports[file.PkgName] = info
file.Imports[alias] = info
file.ImportsPathMap[file.PkgPath] = info
file.UsedImports[file.PkgName] = struct{}{}
return file.PkgName
file.UsedImports[alias] = struct{}{}
file.importAlias = alias
file.importResolved = true
return alias
}

func (f *ParsedInterface) printAstExpr(expr ast.Expr) string {
Expand All @@ -52,7 +53,7 @@ func (f *ParsedInterface) printAstExpr(expr ast.Expr) string {
return fieldType.Name
}
}
return fmt.Sprintf("%s.%s", file.importConflictResolution(file.PkgName, file.PkgPath), fieldType.Name)
return fmt.Sprintf("%s.%s", file.importConflictResolution(), fieldType.Name)
case *ast.SelectorExpr:
// Type from another package.
pkgName := fmt.Sprint(fieldType.X)
Expand Down
11 changes: 9 additions & 2 deletions file.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"io"
"slices"

"github.com/rs/zerolog/log"
"github.com/sonalys/fake/internal/imports"
)

Expand All @@ -20,6 +21,9 @@ type ParsedFile struct {
OriginalImports map[string]*imports.ImportEntry
ImportsPathMap map[string]*imports.ImportEntry
UsedImports map[string]struct{}

importResolved bool
importAlias string
}

func (f *ParsedFile) ListInterfaces(names ...string) []*ParsedInterface {
Expand Down Expand Up @@ -59,9 +63,12 @@ func (f *ParsedFile) writeImports(w io.Writer) {
fmt.Fprintf(w, "\t\"testing\"\n")
fmt.Fprintf(w, "\tmockSetup \"github.com/sonalys/fake/boilerplate\"\n")
for name := range f.UsedImports {
info := f.Imports[name]
info, ok := f.Imports[name]
if !ok {
log.Fatal().Msg("inconsistency between usedImports and imports state")
}
fmt.Fprintf(w, "\t")
if info.Alias != "" {
if info.Alias != "" && info.Alias != info.PackageInfo.Name {
fmt.Fprintf(w, "%s ", info.Alias)
}
fmt.Fprintf(w, "\"%s\"\n", info.Path)
Expand Down
37 changes: 33 additions & 4 deletions generator.go
Original file line number Diff line number Diff line change
@@ -1,19 +1,48 @@
package fake

import (
"go/ast"
"go/token"
"os"
"path"

"github.com/sonalys/fake/internal/files"
"github.com/sonalys/fake/internal/imports"
"golang.org/x/mod/modfile"
)

// Generator is the controller for the whole module, caching files and holding metadata.
type Generator struct {
FileSet *token.FileSet
MockPackageName string

cachedPackageInfo func(f *ast.File) (nameMap, pathMap map[string]*imports.ImportEntry)
goModFilename string
goMod *modfile.File
}

// NewGenerator will create a new mock generator for the specified module.
func NewGenerator(n string) *Generator {
return &Generator{
FileSet: token.NewFileSet(),
MockPackageName: n,
func NewGenerator(pkgName, baseDir string) (*Generator, error) {
goModPath, err := files.FindFile(baseDir, "go.mod")
if err != nil {
return nil, err
}
// Read the contents of the go.mod file
modFileContent, err := os.ReadFile(goModPath)
if err != nil {
return nil, err
}
// Parse the go.mod file
modFile, err := modfile.Parse(goModPath, modFileContent, nil)
if err != nil {
return nil, err
}

return &Generator{
FileSet: token.NewFileSet(),
goModFilename: goModPath,
goMod: modFile,
MockPackageName: pkgName,
cachedPackageInfo: imports.CachedImportInformation(path.Dir(goModPath)),
}, nil
}
10 changes: 6 additions & 4 deletions generator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,12 @@ import (
)

func Test_Generate(t *testing.T) {
// output := t.TempDir()
output := "out"
output := t.TempDir()
// output := "out"
// os.RemoveAll(output) // no caching
Run([]string{"testdata"}, output, nil)
g := NewGenerator("mocks")
_, err := g.ParseFile(path.Join(output, "testdata", "stub.gen.go"))
g, err := NewGenerator("mocks", "testdata")
require.NoError(t, err)
_, err = g.ParseFile(path.Join(output, "testdata", "stub.gen.go"))
require.NoError(t, err)
}
14 changes: 12 additions & 2 deletions interface.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,16 @@ type ParsedInterface struct {
// type B[J any] interface{ Method() J }
// it should have method Method() T when implementing A mock.
TranslateGenericNames []string

fieldsCache []*ParsedField
}

func (i *ParsedInterface) ListFields() []*ParsedField {
return i.ParsedFile.Generator.listInterfaceFields(i, i.ParsedFile.Imports)
if i.fieldsCache != nil {
return i.fieldsCache
}
i.fieldsCache = i.ParsedFile.Generator.listInterfaceFields(i, i.ParsedFile.Imports)
return i.fieldsCache
}

// ListInterfaceFields receives an interface to translate fields into fields.
Expand All @@ -37,6 +43,9 @@ func (g *Generator) listInterfaceFields(i *ParsedInterface, imports map[string]*
if i == nil || i.Ref.Methods == nil {
return nil
}
if i.fieldsCache != nil {
return i.fieldsCache
}
var resp []*ParsedField
for _, field := range i.Ref.Methods.List {
switch t := field.Type.(type) {
Expand Down Expand Up @@ -90,6 +99,7 @@ func (g *Generator) listInterfaceFields(i *ParsedInterface, imports map[string]*
}
}
resp = deduplicatedResp
i.fieldsCache = resp
return resp
}

Expand Down Expand Up @@ -124,7 +134,7 @@ func (g *Generator) parseInterface(ident *ast.SelectorExpr, f *ParsedFile) *Pars
if !ok {
return nil
}
pkg, ok := pkgs.Parse(pkgInfo.Path)
pkg, ok := pkgs.Parse(g.goModFilename, pkgInfo.Path)
if !ok {
return nil
}
Expand Down
111 changes: 64 additions & 47 deletions internal/caching/hash.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"path"
"sort"
"strings"
"time"

"github.com/rs/zerolog/log"
"github.com/sonalys/fake/internal/files"
Expand All @@ -29,26 +30,25 @@ func getImportsHash(filePath string, dependencies map[string]string) (string, er
sort.Strings(imports)
var b strings.Builder
for _, importPath := range imports {
if hash, ok := dependencies[importPath]; ok {
b.WriteString(hash)
for path, hash := range dependencies {
if strings.Contains(importPath, path) {
b.WriteString(hash)
break
}
}
}
return b.String(), nil
}

func GetUncachedFiles(inputs, ignore []string, outputDir string) (map[string]LockfileHandler, error) {
lockFilePath := path.Join(outputDir, lockFilename)
lockFilePath = strings.ReplaceAll(lockFilePath, "internal", "internal_")
groupLockFiles, err := readLockFile(lockFilePath)
if err != nil {
return nil, fmt.Errorf("reading %s file: %w", lockFilename, err)
}
var dependencies map[string]string
if len(groupLockFiles) > 0 {
dependencies, err = gosum.Parse(inputs[0])
if err != nil {
return nil, fmt.Errorf("parsing go.sum file: %w", err)
}
dependencies, err := gosum.Parse(inputs[0])
if err != nil {
return nil, fmt.Errorf("parsing go.sum file: %w", err)
}
goFiles, err := files.ListGoFiles(inputs, append(ignore, outputDir))
if err != nil {
Expand All @@ -57,52 +57,69 @@ func GetUncachedFiles(inputs, ignore []string, outputDir string) (map[string]Loc
out := make(map[string]LockfileHandler, len(groupLockFiles))

cachedHasher := getFileHasher(len(goFiles))
// TODO: split into a function.
for _, filePathList := range files.GroupByDirectory(goFiles) {
for _, filePath := range filePathList {
entry, ok := groupLockFiles[filePath]
// If file is not in lock file hashes, then we delay hash calculation for after the mock generation.
// this makes it faster by avoiding calculation of useless files.
if !ok {
out[filePath] = &UnhashedLockFile{
Filepath: filePath,
Dependencies: dependencies,
}
continue
}
stat, _ := os.Stat(filePath)
if !entry.ModifiedAt.IsZero() && !stat.ModTime().IsZero() && stat.ModTime().Equal(entry.ModifiedAt) {
entry.exists = true
out[filePath] = &entry
continue
}
importsHash, err := getImportsHash(filePath, dependencies)
if err != nil {
return nil, err

gomod, err := files.FindFile(inputs[0], "go.mod")
if err != nil {
return nil, fmt.Errorf("input is not part of a go module")
}

for _, absPath := range goFiles {
relPath, err := files.GetRelativePath(gomod, absPath)
if err != nil {
return nil, err
}
entry, ok := groupLockFiles[relPath]
// If file is not in lock file hashes, then we delay hash calculation for after the mock generation.
// this makes it faster by avoiding calculation of useless files.
if !ok {
out[relPath] = &UnhashedLockFile{
Filepath: absPath,
Dependencies: dependencies,
}
hash, err := cachedHasher(filePath)
continue
}
var modAt time.Time
if !entry.ModifiedAt.IsZero() {
stat, err := os.Stat(absPath)
if err != nil {
return nil, fmt.Errorf("hashing file: %w", err)
return nil, fmt.Errorf("could not get file stats: %w", err)
}
if entry.Hash == hash && entry.Dependencies == importsHash {
// Mark file as processed, to further delete unused entries.
modAt := stat.ModTime()
if !stat.ModTime().IsZero() && modAt.Equal(entry.ModifiedAt) {
entry.exists = true
out[filePath] = &entry
entry.filepath = absPath
out[relPath] = &entry
continue
}
out[filePath] = &HashedLockFile{
changed: true,
exists: true,
Hash: hash,
Dependencies: importsHash,
ModifiedAt: stat.ModTime(),
}
}
importsHash, err := getImportsHash(absPath, dependencies)
if err != nil {
return nil, err
}
hash, err := cachedHasher(absPath)
if err != nil {
return nil, fmt.Errorf("hashing file: %w", err)
}
if entry.Hash == hash && entry.Dependencies == importsHash {
// Mark file as processed, to further delete unused entries.
entry.exists = true
entry.filepath = absPath
out[relPath] = &entry
continue
}
out[relPath] = &HashedLockFile{
changed: true,
exists: true,
Hash: hash,
filepath: absPath,
Dependencies: importsHash,
ModifiedAt: modAt,
}
}
for filePath := range groupLockFiles {
if _, ok := out[filePath]; !ok {
for relPath := range groupLockFiles {
if _, ok := out[relPath]; !ok {
// Remove empty files from our new lock file.
rmFileName := files.GenerateOutputFileName(filePath, outputDir)
rmFileName := files.GenerateOutputFileName(relPath, outputDir)
os.Remove(rmFileName)
log.Info().Msgf("removing legacy mock from %s", rmFileName)
}
Expand All @@ -119,7 +136,7 @@ func loadPackageImports(file string) ([]string, error) {
if err != nil {
return nil, err
}
var imports []string
imports := make([]string, 0, 30)
for _, pkg := range pkgs {
for imp := range pkg.Imports {
imports = append(imports, imp)
Expand Down
Loading

0 comments on commit 80461ef

Please sign in to comment.