From decfe8ed84647b7819574154bd8b2daa7ec2dddf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Tue, 26 Nov 2024 12:45:22 +0100 Subject: [PATCH 01/16] optimize aggregations using parallelization --- src/Analyzer/Aggregator.go | 64 +++++++++++++++++++++++++++++++------- 1 file changed, 53 insertions(+), 11 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 4b29659..1b4a85d 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -3,6 +3,7 @@ package Analyzer import ( "math" "regexp" + "sync" "github.com/halleck45/ast-metrics/src/Engine" pb "github.com/halleck45/ast-metrics/src/NodeType" @@ -235,23 +236,64 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat byLanguage.NbFiles++ // Make calculations: sums of metrics, etc. - r.calculateSums(file, &projectAggregated.ByFile) - r.calculateSums(file, &projectAggregated.ByClass) - r.calculateSums(file, &projectAggregated.Combined) - r.calculateSums(file, &byLanguage) + var wg sync.WaitGroup + wg.Add(4) + + go func() { + defer wg.Done() + r.calculateSums(file, &projectAggregated.ByFile) + }() + + go func() { + defer wg.Done() + r.calculateSums(file, &projectAggregated.ByClass) + }() + + go func() { + defer wg.Done() + r.calculateSums(file, &projectAggregated.Combined) + }() + + go func() { + defer wg.Done() + r.calculateSums(file, &byLanguage) + }() + + wg.Wait() projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = byLanguage } - // Consolidate averages - r.consolidate(&projectAggregated.ByFile) - r.consolidate(&projectAggregated.ByClass) - r.consolidate(&projectAggregated.Combined) + // Consolidate averages using goroutines + var wg sync.WaitGroup + wg.Add(3) - // by language + go func() { + defer wg.Done() + r.consolidate(&projectAggregated.ByFile) + }() + + go func() { + defer wg.Done() + r.consolidate(&projectAggregated.ByClass) + }() + + go func() { + defer wg.Done() + r.consolidate(&projectAggregated.Combined) + }() + + wg.Wait() + + // by language in parallel + wg.Add(len(projectAggregated.ByProgrammingLanguage)) for lng, byLanguage := range projectAggregated.ByProgrammingLanguage { - r.consolidate(&byLanguage) - projectAggregated.ByProgrammingLanguage[lng] = byLanguage + go func(language string, langAggregated Aggregated) { + defer wg.Done() + r.consolidate(&langAggregated) + projectAggregated.ByProgrammingLanguage[language] = langAggregated + }(lng, byLanguage) } + wg.Wait() // Risks riskAnalyzer := NewRiskAnalyzer() From dbdb70a59d1b23d3d56af160d08cd48b10cb24cf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Tue, 26 Nov 2024 12:59:58 +0100 Subject: [PATCH 02/16] make some process parallel --- src/Analyzer/Aggregator.go | 186 +++++++++++++++++++------------------ 1 file changed, 96 insertions(+), 90 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 1b4a85d..44a21d6 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -402,122 +402,112 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { classes := Engine.GetClassesInFile(file) functions := file.Stmts.StmtFunction - for _, class := range classes { - if class.LinesOfCode == nil { - continue - } - loc += class.LinesOfCode.LinesOfCode - lloc += class.LinesOfCode.LogicalLinesOfCode - cloc += class.LinesOfCode.CommentLinesOfCode - } - - for _, function := range functions { - if function.LinesOfCode == nil { - continue - } - loc += function.LinesOfCode.LinesOfCode - lloc += function.LinesOfCode.LogicalLinesOfCode - cloc += function.LinesOfCode.CommentLinesOfCode - } - file.Stmts.Analyze.Volume.Loc = &loc - file.Stmts.Analyze.Volume.Lloc = &lloc - file.Stmts.Analyze.Volume.Cloc = &cloc - - // File analysis should be the sum of all methods and classes in the file - // That's useful when we navigate over the files instead of the classes + // Initialize file complexity if needed if file.Stmts.Analyze.Complexity.Cyclomatic == nil { file.Stmts.Analyze.Complexity.Cyclomatic = &zero } + + // Process functions in a single loop for _, function := range functions { - if function.Stmts.Analyze == nil || function.Stmts.Analyze.Complexity == nil { - continue + // Handle LOC + if function.LinesOfCode != nil { + loc += function.LinesOfCode.LinesOfCode + lloc += function.LinesOfCode.LogicalLinesOfCode + cloc += function.LinesOfCode.CommentLinesOfCode } - if function.Stmts.Analyze.Complexity != nil { + // Handle complexity + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic } } - // Coupling - // Store relations, with counter + // Process classes for _, class := range classes { - if class.Stmts == nil || class.Stmts.Analyze == nil { - continue + // Handle LOC + if class.LinesOfCode != nil { + loc += class.LinesOfCode.LinesOfCode + lloc += class.LinesOfCode.LogicalLinesOfCode + cloc += class.LinesOfCode.CommentLinesOfCode } - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - class.Stmts.Analyze.Coupling.Afferent = 0 - if class.Name == nil { - // avoid nil pointer during tests - continue + // Handle coupling + if class.Stmts != nil && class.Stmts.Analyze != nil { + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + class.Stmts.Analyze.Coupling.Afferent = 0 + + if class.Name != nil { + // if in hashmap + if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { + class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) + file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + } + + // instability + if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { + instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) + class.Stmts.Analyze.Coupling.Instability = instability + aggregated.AverageInstability += float64(instability) + } + } } + } - // if in hashmap - if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { - class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) + file.Stmts.Analyze.Volume.Loc = &loc + file.Stmts.Analyze.Volume.Lloc = &lloc + file.Stmts.Analyze.Volume.Cloc = &cloc - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent - } + dependencies := file.Stmts.StmtExternalDependencies - // instability - if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { - // Ce / (Ce + Ca) - instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) - class.Stmts.Analyze.Coupling.Instability = instability + for _, dependency := range dependencies { - // to consolidate - aggregated.AverageInstability += float64(instability) + if dependency == nil { + continue } - } - dependencies := file.Stmts.StmtExternalDependencies - - if dependencies != nil { - for _, dependency := range dependencies { - namespaceTo := dependency.Namespace - namespaceFrom := dependency.From - - // Keep only 2 levels in namespace - reg := regexp.MustCompile("[^A-Za-z0-9.]+") - separator := reg.FindString(namespaceFrom) - parts := reg.Split(namespaceTo, -1) - if len(parts) > 2 { - namespaceTo = parts[0] + separator + parts[1] - } + namespaceTo := dependency.Namespace + namespaceFrom := dependency.From - parts = reg.Split(namespaceFrom, -1) - if len(parts) > 2 { - namespaceFrom = parts[0] + separator + parts[1] - } + // Keep only 2 levels in namespace + reg := regexp.MustCompile("[^A-Za-z0-9.]+") + separator := reg.FindString(namespaceFrom) + parts := reg.Split(namespaceTo, -1) + if len(parts) > 2 { + namespaceTo = parts[0] + separator + parts[1] + } - // if same, continue - if namespaceFrom == namespaceTo { - continue - } + parts = reg.Split(namespaceFrom, -1) + if len(parts) > 2 { + namespaceFrom = parts[0] + separator + parts[1] + } - // if root namespace, continue - if namespaceFrom == "" || namespaceTo == "" { - continue - } + // if same, continue + if namespaceFrom == namespaceTo { + continue + } - // create the map if not exists - if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { - aggregated.PackageRelations[namespaceFrom] = make(map[string]int) - } + // if root namespace, continue + if namespaceFrom == "" || namespaceTo == "" { + continue + } - if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { - aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 - } + // create the map if not exists + if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { + aggregated.PackageRelations[namespaceFrom] = make(map[string]int) + } - // increment the counter - aggregated.PackageRelations[namespaceFrom][namespaceTo]++ + if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { + aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 } + + // increment the counter + aggregated.PackageRelations[namespaceFrom][namespaceTo]++ } } @@ -527,15 +517,31 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { // Count commits for the period based on `ResultOfGitAnalysis` data aggregated.ResultOfGitAnalysis = r.gitSummaries if aggregated.ResultOfGitAnalysis != nil { + var wg sync.WaitGroup + var mu sync.Mutex + for _, result := range aggregated.ResultOfGitAnalysis { - aggregated.CommitCountForPeriod += result.CountCommitsForLanguage + wg.Add(1) + go func(res ResultOfGitAnalysis) { + defer wg.Done() + mu.Lock() + aggregated.CommitCountForPeriod += res.CountCommitsForLanguage + mu.Unlock() + }(result) } + wg.Wait() } // Bus factor and other metrics based on aggregated data + var wg sync.WaitGroup + wg.Add(len(r.analyzers)) for _, analyzer := range r.analyzers { - analyzer.Calculate(aggregated) + go func(a AggregateAnalyzer) { + defer wg.Done() + a.Calculate(aggregated) + }(analyzer) } + wg.Wait() } // Add an analyzer to the aggregator From 0c5a9199b47873a254d44c48f35c244131c976c8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Tue, 26 Nov 2024 13:05:12 +0100 Subject: [PATCH 03/16] traverse file in parallel --- src/Analyzer/Aggregator.go | 269 ++++++++++++++++++++----------------- 1 file changed, 143 insertions(+), 126 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 44a21d6..855a6be 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -263,7 +263,7 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = byLanguage } - // Consolidate averages using goroutines + // Consolidate averages var wg sync.WaitGroup wg.Add(3) @@ -284,7 +284,7 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat wg.Wait() - // by language in parallel + // by language wg.Add(len(projectAggregated.ByProgrammingLanguage)) for lng, byLanguage := range projectAggregated.ByProgrammingLanguage { go func(language string, langAggregated Aggregated) { @@ -354,162 +354,180 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { aggregated.Cloc = 0 aggregated.Lloc = 0 - for _, file := range aggregated.ConcernedFiles { - - if file.LinesOfCode == nil { - continue - } + var wg sync.WaitGroup + var mu sync.Mutex - aggregated.Loc += int(file.LinesOfCode.LinesOfCode) - aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) - aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) + for _, file := range aggregated.ConcernedFiles { + wg.Add(1) + go func(file *pb.File) { + defer wg.Done() - // Calculate alternate MI using average MI per method when file has no class - if file.Stmts.StmtClass == nil || len(file.Stmts.StmtClass) == 0 { - if file.Stmts.Analyze.Maintainability == nil { - file.Stmts.Analyze.Maintainability = &pb.Maintainability{} + if file.LinesOfCode == nil { + return } - methods := file.Stmts.StmtFunction - if methods == nil || len(methods) == 0 { - continue - } - averageForFile := float32(0) - for _, method := range methods { - if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { - continue + mu.Lock() + aggregated.Loc += int(file.LinesOfCode.LinesOfCode) + aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) + aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) + mu.Unlock() + + // Calculate alternate MI using average MI per method when file has no class + if file.Stmts.StmtClass == nil || len(file.Stmts.StmtClass) == 0 { + if file.Stmts.Analyze.Maintainability == nil { + file.Stmts.Analyze.Maintainability = &pb.Maintainability{} } - averageForFile += float32(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) - } - averageForFile = averageForFile / float32(len(methods)) - file.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile - } - // LOC of file is the sum of all classes and methods - // That's useful when we navigate over the files instead of the classes - zero := int32(0) - loc := int32(0) - lloc := int32(0) - cloc := int32(0) - - if file.Stmts.Analyze.Volume == nil { - file.Stmts.Analyze.Volume = &pb.Volume{ - Lloc: &zero, - Cloc: &zero, - Loc: &zero, + methods := file.Stmts.StmtFunction + if methods == nil || len(methods) == 0 { + return + } + averageForFile := float32(0) + for _, method := range methods { + if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { + continue + } + averageForFile += float32(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) + } + averageForFile = averageForFile / float32(len(methods)) + file.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile + } + + // LOC of file is the sum of all classes and methods + // That's useful when we navigate over the files instead of the classes + zero := int32(0) + loc := int32(0) + lloc := int32(0) + cloc := int32(0) + + if file.Stmts.Analyze.Volume == nil { + file.Stmts.Analyze.Volume = &pb.Volume{ + Lloc: &zero, + Cloc: &zero, + Loc: &zero, + } } - } - classes := Engine.GetClassesInFile(file) - functions := file.Stmts.StmtFunction + classes := Engine.GetClassesInFile(file) + functions := file.Stmts.StmtFunction - // Initialize file complexity if needed - if file.Stmts.Analyze.Complexity.Cyclomatic == nil { - file.Stmts.Analyze.Complexity.Cyclomatic = &zero - } - - // Process functions in a single loop - for _, function := range functions { - // Handle LOC - if function.LinesOfCode != nil { - loc += function.LinesOfCode.LinesOfCode - lloc += function.LinesOfCode.LogicalLinesOfCode - cloc += function.LinesOfCode.CommentLinesOfCode + // Initialize file complexity if needed + if file.Stmts.Analyze.Complexity.Cyclomatic == nil { + file.Stmts.Analyze.Complexity.Cyclomatic = &zero } - // Handle complexity - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { - *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic - } - } + // Process functions + for _, function := range functions { + // Handle LOC + if function.LinesOfCode != nil { + loc += function.LinesOfCode.LinesOfCode + lloc += function.LinesOfCode.LogicalLinesOfCode + cloc += function.LinesOfCode.CommentLinesOfCode + } - // Process classes - for _, class := range classes { - // Handle LOC - if class.LinesOfCode != nil { - loc += class.LinesOfCode.LinesOfCode - lloc += class.LinesOfCode.LogicalLinesOfCode - cloc += class.LinesOfCode.CommentLinesOfCode + // Handle complexity + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { + *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic + } } - // Handle coupling - if class.Stmts != nil && class.Stmts.Analyze != nil { - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } + // Process classes + for _, class := range classes { + // Handle LOC + if class.LinesOfCode != nil { + loc += class.LinesOfCode.LinesOfCode + lloc += class.LinesOfCode.LogicalLinesOfCode + cloc += class.LinesOfCode.CommentLinesOfCode } - class.Stmts.Analyze.Coupling.Afferent = 0 - if class.Name != nil { - // if in hashmap - if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { - class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + // Handle coupling + if class.Stmts != nil && class.Stmts.Analyze != nil { + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } } - - // instability - if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { - instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) - class.Stmts.Analyze.Coupling.Instability = instability - aggregated.AverageInstability += float64(instability) + class.Stmts.Analyze.Coupling.Afferent = 0 + + if class.Name != nil { + mu.Lock() + // if in hashmap + if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { + class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) + file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + } + + // instability + if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { + instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) + class.Stmts.Analyze.Coupling.Instability = instability + aggregated.AverageInstability += float64(instability) + } + mu.Unlock() } } } - } - file.Stmts.Analyze.Volume.Loc = &loc - file.Stmts.Analyze.Volume.Lloc = &lloc - file.Stmts.Analyze.Volume.Cloc = &cloc + file.Stmts.Analyze.Volume.Loc = &loc + file.Stmts.Analyze.Volume.Lloc = &lloc + file.Stmts.Analyze.Volume.Cloc = &cloc - dependencies := file.Stmts.StmtExternalDependencies + dependencies := file.Stmts.StmtExternalDependencies - for _, dependency := range dependencies { + for _, dependency := range dependencies { + if dependency == nil { + continue + } - if dependency == nil { - continue - } + namespaceTo := dependency.Namespace + namespaceFrom := dependency.From - namespaceTo := dependency.Namespace - namespaceFrom := dependency.From + // Keep only 2 levels in namespace + reg := regexp.MustCompile("[^A-Za-z0-9.]+") - // Keep only 2 levels in namespace - reg := regexp.MustCompile("[^A-Za-z0-9.]+") - separator := reg.FindString(namespaceFrom) - parts := reg.Split(namespaceTo, -1) - if len(parts) > 2 { - namespaceTo = parts[0] + separator + parts[1] - } + if namespaceFrom == "" || namespaceTo == "" { + continue + } - parts = reg.Split(namespaceFrom, -1) - if len(parts) > 2 { - namespaceFrom = parts[0] + separator + parts[1] - } + separator := reg.FindString(namespaceFrom) + parts := reg.Split(namespaceTo, -1) + if len(parts) > 2 { + namespaceTo = parts[0] + separator + parts[1] + } - // if same, continue - if namespaceFrom == namespaceTo { - continue - } + parts = reg.Split(namespaceFrom, -1) + if len(parts) > 2 { + namespaceFrom = parts[0] + separator + parts[1] + } - // if root namespace, continue - if namespaceFrom == "" || namespaceTo == "" { - continue - } + // if same, continue + if namespaceFrom == namespaceTo { + continue + } - // create the map if not exists - if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { - aggregated.PackageRelations[namespaceFrom] = make(map[string]int) - } + // if root namespace, continue + if namespaceFrom == "" || namespaceTo == "" { + continue + } - if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { - aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 - } + mu.Lock() + // create the map if not exists + if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { + aggregated.PackageRelations[namespaceFrom] = make(map[string]int) + } - // increment the counter - aggregated.PackageRelations[namespaceFrom][namespaceTo]++ - } + if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { + aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 + } + + // increment the counter + aggregated.PackageRelations[namespaceFrom][namespaceTo]++ + mu.Unlock() + } + }(file) } + wg.Wait() // Consolidate aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) @@ -533,7 +551,6 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { } // Bus factor and other metrics based on aggregated data - var wg sync.WaitGroup wg.Add(len(r.analyzers)) for _, analyzer := range r.analyzers { go func(a AggregateAnalyzer) { From 25f9abf3c457fc4870c28f12ea280b3c452a6602 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Tue, 26 Nov 2024 13:14:25 +0100 Subject: [PATCH 04/16] avoid useless float conversions, in order to speedup the application --- src/Analyzer/Aggregator.go | 166 ++++++++++-------- src/Analyzer/Comparator.go | 80 ++++----- .../Component/MaintainabilityIndexVisitor.go | 14 +- src/Analyzer/RiskAnalyzer.go | 30 ++-- src/Analyzer/Volume/HalsteadMetricsVisitor.go | 52 +++--- src/Cli/ComponentClassTable.go | 4 +- src/Cli/Styles.go | 8 +- src/Engine/util.go | 4 +- src/Pkg/Cleaner/cleaner.go | 13 +- src/Report/MarkdownReportGenerator.go | 4 +- src/Report/types.go | 50 +++--- src/Ui/ComponentBarchart.go | 9 +- ...ntBarchartCyclomaticByMethodRepartition.go | 6 +- ...ComponentBarchartLocByMethodRepartition.go | 6 +- ...BarchartMaintainabilityIndexRepartition.go | 6 +- src/Ui/ComponentLineChartGitActivity.go | 8 +- 16 files changed, 235 insertions(+), 225 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 855a6be..2870ea4 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -33,33 +33,33 @@ type Aggregated struct { Loc int Cloc int Lloc int - AverageMethodsPerClass float64 - AverageLocPerMethod float64 - AverageLlocPerMethod float64 - AverageClocPerMethod float64 - AverageCyclomaticComplexityPerMethod float64 - AverageCyclomaticComplexityPerClass float64 + AverageMethodsPerClass float32 + AverageLocPerMethod float32 + AverageLlocPerMethod float32 + AverageClocPerMethod float32 + AverageCyclomaticComplexityPerMethod float32 + AverageCyclomaticComplexityPerClass float32 MinCyclomaticComplexity int MaxCyclomaticComplexity int - AverageHalsteadDifficulty float64 - AverageHalsteadEffort float64 - AverageHalsteadVolume float64 - AverageHalsteadTime float64 - AverageHalsteadBugs float64 - SumHalsteadDifficulty float64 - SumHalsteadEffort float64 - SumHalsteadVolume float64 - SumHalsteadTime float64 - SumHalsteadBugs float64 - AverageMI float64 - AverageMIwoc float64 - AverageMIcw float64 - AverageMIPerMethod float64 - AverageMIwocPerMethod float64 - AverageMIcwPerMethod float64 - AverageAfferentCoupling float64 - AverageEfferentCoupling float64 - AverageInstability float64 + AverageHalsteadDifficulty float32 + AverageHalsteadEffort float32 + AverageHalsteadVolume float32 + AverageHalsteadTime float32 + AverageHalsteadBugs float32 + SumHalsteadDifficulty float32 + SumHalsteadEffort float32 + SumHalsteadVolume float32 + SumHalsteadTime float32 + SumHalsteadBugs float32 + AverageMI float32 + AverageMIwoc float32 + AverageMIcw float32 + AverageMIPerMethod float32 + AverageMIwocPerMethod float32 + AverageMIcwPerMethod float32 + AverageAfferentCoupling float32 + AverageEfferentCoupling float32 + AverageInstability float32 CommitCountForPeriod int CommittedFilesCountForPeriod int // for example if one commit concerns 10 files, it will be 10 BusFactor int @@ -245,7 +245,7 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat }() go func() { - defer wg.Done() + defer wg.Done() r.calculateSums(file, &projectAggregated.ByClass) }() @@ -306,37 +306,37 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat func (r *Aggregator) consolidate(aggregated *Aggregated) { if aggregated.NbClasses > 0 { - aggregated.AverageMethodsPerClass = float64(aggregated.NbMethods) / float64(aggregated.NbClasses) - aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float64(aggregated.NbClasses) + aggregated.AverageMethodsPerClass = float32(aggregated.NbMethods) / float32(aggregated.NbClasses) + aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float32(aggregated.NbClasses) } else { aggregated.AverageMethodsPerClass = 0 aggregated.AverageCyclomaticComplexityPerClass = 0 } if aggregated.AverageMI > 0 { - aggregated.AverageMI = aggregated.AverageMI / float64(aggregated.NbClasses) - aggregated.AverageMIwoc = aggregated.AverageMIwoc / float64(aggregated.NbClasses) - aggregated.AverageMIcw = aggregated.AverageMIcw / float64(aggregated.NbClasses) + aggregated.AverageMI = aggregated.AverageMI / float32(aggregated.NbClasses) + aggregated.AverageMIwoc = aggregated.AverageMIwoc / float32(aggregated.NbClasses) + aggregated.AverageMIcw = aggregated.AverageMIcw / float32(aggregated.NbClasses) } if aggregated.AverageInstability > 0 { - aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float64(aggregated.NbClasses) - aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float64(aggregated.NbClasses) + aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float32(aggregated.NbClasses) + aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float32(aggregated.NbClasses) } if aggregated.NbMethods > 0 { - aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float64(aggregated.NbMethods) - aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float64(aggregated.NbClasses) - aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float64(aggregated.NbClasses) - aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float64(aggregated.NbClasses) - aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float64(aggregated.NbClasses) - aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float64(aggregated.NbClasses) + aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float32(aggregated.NbMethods) + aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float32(aggregated.NbMethods) + aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float32(aggregated.NbMethods) + aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float32(aggregated.NbMethods) + aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float32(aggregated.NbMethods) + aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float32(aggregated.NbMethods) + aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float32(aggregated.NbMethods) + aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float32(aggregated.NbClasses) + aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float32(aggregated.NbClasses) + aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float32(aggregated.NbClasses) + aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float32(aggregated.NbClasses) + aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float32(aggregated.NbClasses) } // if langage without classes @@ -357,6 +357,8 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { var wg sync.WaitGroup var mu sync.Mutex + reg := regexp.MustCompile("[^A-Za-z0-9.]+") + for _, file := range aggregated.ConcernedFiles { wg.Add(1) go func(file *pb.File) { @@ -372,14 +374,19 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) mu.Unlock() + // Create local variables for file processing + localFile := &pb.File{ + Stmts: file.Stmts, + } + // Calculate alternate MI using average MI per method when file has no class - if file.Stmts.StmtClass == nil || len(file.Stmts.StmtClass) == 0 { - if file.Stmts.Analyze.Maintainability == nil { - file.Stmts.Analyze.Maintainability = &pb.Maintainability{} + if len(localFile.Stmts.StmtClass) == 0 { + if localFile.Stmts.Analyze.Maintainability == nil { + localFile.Stmts.Analyze.Maintainability = &pb.Maintainability{} } methods := file.Stmts.StmtFunction - if methods == nil || len(methods) == 0 { + if len(methods) == 0 { return } averageForFile := float32(0) @@ -390,9 +397,14 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { averageForFile += float32(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) } averageForFile = averageForFile / float32(len(methods)) - file.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile + localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile } + // Update the original file with processed data + mu.Lock() + file.Stmts = localFile.Stmts + mu.Unlock() + // LOC of file is the sum of all classes and methods // That's useful when we navigate over the files instead of the classes zero := int32(0) @@ -462,7 +474,7 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) class.Stmts.Analyze.Coupling.Instability = instability - aggregated.AverageInstability += float64(instability) + aggregated.AverageInstability += instability } mu.Unlock() } @@ -483,13 +495,11 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { namespaceTo := dependency.Namespace namespaceFrom := dependency.From - // Keep only 2 levels in namespace - reg := regexp.MustCompile("[^A-Za-z0-9.]+") - if namespaceFrom == "" || namespaceTo == "" { continue } + // Keep only 2 levels in namespace separator := reg.FindString(namespaceFrom) parts := reg.Split(namespaceTo, -1) if len(parts) > 2 { @@ -530,14 +540,14 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { wg.Wait() // Consolidate - aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) + aggregated.AverageInstability = aggregated.AverageInstability / float32(aggregated.NbClasses) // Count commits for the period based on `ResultOfGitAnalysis` data aggregated.ResultOfGitAnalysis = r.gitSummaries if aggregated.ResultOfGitAnalysis != nil { var wg sync.WaitGroup var mu sync.Mutex - + for _, result := range aggregated.ResultOfGitAnalysis { wg.Add(1) go func(res ResultOfGitAnalysis) { @@ -627,28 +637,28 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // Average cyclomatic complexity per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { if function.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) + specificAggregation.AverageCyclomaticComplexityPerMethod += float32(*function.Stmts.Analyze.Complexity.Cyclomatic) } } // Average maintainability index per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Maintainability != nil { if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMIPerMethod += float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex) - specificAggregation.AverageMIwocPerMethod += float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - specificAggregation.AverageMIcwPerMethod += float64(*function.Stmts.Analyze.Maintainability.CommentWeight) + specificAggregation.AverageMIPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + specificAggregation.AverageMIwocPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + specificAggregation.AverageMIcwPerMethod += *function.Stmts.Analyze.Maintainability.CommentWeight } } // average lines of code per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { if function.Stmts.Analyze.Volume.Loc != nil { - specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) + specificAggregation.AverageLocPerMethod += float32(*function.Stmts.Analyze.Volume.Loc) } if function.Stmts.Analyze.Volume.Cloc != nil { - specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) + specificAggregation.AverageClocPerMethod += float32(*function.Stmts.Analyze.Volume.Cloc) } if function.Stmts.Analyze.Volume.Lloc != nil { - specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) + specificAggregation.AverageLlocPerMethod += float32(*function.Stmts.Analyze.Volume.Lloc) } } } @@ -667,22 +677,22 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // Maintainability Index if class.Stmts.Analyze.Maintainability != nil { if class.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMI += float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex) - specificAggregation.AverageMIwoc += float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - specificAggregation.AverageMIcw += float64(*class.Stmts.Analyze.Maintainability.CommentWeight) + specificAggregation.AverageMI += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + specificAggregation.AverageMIwoc += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + specificAggregation.AverageMIcw += *class.Stmts.Analyze.Maintainability.CommentWeight } } // Coupling if class.Stmts.Analyze.Coupling != nil { - specificAggregation.AverageInstability += float64(class.Stmts.Analyze.Coupling.Instability) - specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) - specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) + specificAggregation.AverageInstability += class.Stmts.Analyze.Coupling.Instability + specificAggregation.AverageEfferentCoupling += float32(class.Stmts.Analyze.Coupling.Efferent) + specificAggregation.AverageAfferentCoupling += float32(class.Stmts.Analyze.Coupling.Afferent) } // cyclomatic complexity per class if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + specificAggregation.AverageCyclomaticComplexityPerClass += float32(*class.Stmts.Analyze.Complexity.Cyclomatic) if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) } @@ -694,20 +704,20 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // Halstead if class.Stmts.Analyze.Volume != nil { if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty)) { - specificAggregation.AverageHalsteadDifficulty += float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty) - specificAggregation.SumHalsteadDifficulty += float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty) + specificAggregation.AverageHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty + specificAggregation.SumHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty } if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadEffort)) { - specificAggregation.AverageHalsteadEffort += float64(*class.Stmts.Analyze.Volume.HalsteadEffort) - specificAggregation.SumHalsteadEffort += float64(*class.Stmts.Analyze.Volume.HalsteadEffort) + specificAggregation.AverageHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort + specificAggregation.SumHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort } if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadVolume)) { - specificAggregation.AverageHalsteadVolume += float64(*class.Stmts.Analyze.Volume.HalsteadVolume) - specificAggregation.SumHalsteadVolume += float64(*class.Stmts.Analyze.Volume.HalsteadVolume) + specificAggregation.AverageHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume + specificAggregation.SumHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume } if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadTime)) { - specificAggregation.AverageHalsteadTime += float64(*class.Stmts.Analyze.Volume.HalsteadTime) - specificAggregation.SumHalsteadTime += float64(*class.Stmts.Analyze.Volume.HalsteadTime) + specificAggregation.AverageHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime + specificAggregation.SumHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime } } diff --git a/src/Analyzer/Comparator.go b/src/Analyzer/Comparator.go index eb4057c..1ab94a2 100644 --- a/src/Analyzer/Comparator.go +++ b/src/Analyzer/Comparator.go @@ -9,9 +9,9 @@ type Comparator struct { } const ( - ADDED = "added" - DELETED = "deleted" - MODIFIED = "modified" + ADDED = "added" + DELETED = "deleted" + MODIFIED = "modified" UNCHANGED = "unchanged" ) @@ -33,37 +33,37 @@ type Comparaison struct { Loc int Cloc int Lloc int - AverageMethodsPerClass float64 - AverageLocPerMethod float64 - AverageLlocPerMethod float64 - AverageClocPerMethod float64 - AverageCyclomaticComplexityPerMethod float64 - AverageCyclomaticComplexityPerClass float64 + AverageMethodsPerClass float32 + AverageLocPerMethod float32 + AverageLlocPerMethod float32 + AverageClocPerMethod float32 + AverageCyclomaticComplexityPerMethod float32 + AverageCyclomaticComplexityPerClass float32 MinCyclomaticComplexity int MaxCyclomaticComplexity int - AverageHalsteadDifficulty float64 - AverageHalsteadEffort float64 - AverageHalsteadVolume float64 - AverageHalsteadTime float64 - AverageHalsteadBugs float64 - SumHalsteadDifficulty float64 - SumHalsteadEffort float64 - SumHalsteadVolume float64 - SumHalsteadTime float64 - SumHalsteadBugs float64 - AverageMI float64 - AverageMIwoc float64 - AverageMIcw float64 - AverageMIPerMethod float64 - AverageMIwocPerMethod float64 - AverageMIcwPerMethod float64 - AverageAfferentCoupling float64 - AverageEfferentCoupling float64 - AverageInstability float64 + AverageHalsteadDifficulty float32 + AverageHalsteadEffort float32 + AverageHalsteadVolume float32 + AverageHalsteadTime float32 + AverageHalsteadBugs float32 + SumHalsteadDifficulty float32 + SumHalsteadEffort float32 + SumHalsteadVolume float32 + SumHalsteadTime float32 + SumHalsteadBugs float32 + AverageMI float32 + AverageMIwoc float32 + AverageMIcw float32 + AverageMIPerMethod float32 + AverageMIwocPerMethod float32 + AverageMIcwPerMethod float32 + AverageAfferentCoupling float32 + AverageEfferentCoupling float32 + AverageInstability float32 CommitCountForPeriod int CommittedFilesCountForPeriod int // for example if one commit concerns 10 files, it will be 10 BusFactor int - Risk float64 + Risk float32 ChangedFiles []ChangedFile NbNewFiles int NbDeletedFiles int @@ -218,36 +218,36 @@ func (c *Comparator) Compare(first Aggregated, second Aggregated) Comparaison { // Cyclomatic complexity if file.Stmts.Analyze.Complexity != nil && file2.Stmts.Analyze.Complexity != nil { - change.Comparaison.AverageCyclomaticComplexityPerMethod = float64(*file.Stmts.Analyze.Complexity.Cyclomatic) - float64(*file2.Stmts.Analyze.Complexity.Cyclomatic) + change.Comparaison.AverageCyclomaticComplexityPerMethod = float32(*file.Stmts.Analyze.Complexity.Cyclomatic) - float32(*file2.Stmts.Analyze.Complexity.Cyclomatic) } // Halstead if file.Stmts.Analyze.Volume != nil && file.Stmts.Analyze.Volume.HalsteadDifficulty != nil && file2.Stmts.Analyze.Volume != nil && file2.Stmts.Analyze.Volume.HalsteadDifficulty != nil { - change.Comparaison.AverageHalsteadDifficulty = float64(*file.Stmts.Analyze.Volume.HalsteadDifficulty) - float64(*file2.Stmts.Analyze.Volume.HalsteadDifficulty) - change.Comparaison.AverageHalsteadEffort = float64(*file.Stmts.Analyze.Volume.HalsteadEffort) - float64(*file2.Stmts.Analyze.Volume.HalsteadEffort) - change.Comparaison.AverageHalsteadVolume = float64(*file.Stmts.Analyze.Volume.HalsteadVolume) - float64(*file2.Stmts.Analyze.Volume.HalsteadVolume) - change.Comparaison.AverageHalsteadTime = float64(*file.Stmts.Analyze.Volume.HalsteadTime) - float64(*file2.Stmts.Analyze.Volume.HalsteadTime) + change.Comparaison.AverageHalsteadDifficulty = *file.Stmts.Analyze.Volume.HalsteadDifficulty - *file2.Stmts.Analyze.Volume.HalsteadDifficulty + change.Comparaison.AverageHalsteadEffort = *file.Stmts.Analyze.Volume.HalsteadEffort - *file2.Stmts.Analyze.Volume.HalsteadEffort + change.Comparaison.AverageHalsteadVolume = *file.Stmts.Analyze.Volume.HalsteadVolume - *file2.Stmts.Analyze.Volume.HalsteadVolume + change.Comparaison.AverageHalsteadTime = *file.Stmts.Analyze.Volume.HalsteadTime - *file2.Stmts.Analyze.Volume.HalsteadTime } // Maintainability index if file.Stmts.Analyze.Maintainability != nil && file2.Stmts.Analyze.Maintainability != nil && file.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && file2.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil { - change.Comparaison.AverageMI = float64(*file.Stmts.Analyze.Maintainability.MaintainabilityIndex) - float64(*file2.Stmts.Analyze.Maintainability.MaintainabilityIndex) + change.Comparaison.AverageMI = *file.Stmts.Analyze.Maintainability.MaintainabilityIndex - *file2.Stmts.Analyze.Maintainability.MaintainabilityIndex } if file.Stmts.Analyze.Maintainability != nil && file2.Stmts.Analyze.Maintainability != nil && file.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && file2.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil { - change.Comparaison.AverageMIwoc = float64(*file.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - float64(*file2.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) + change.Comparaison.AverageMIwoc = *file.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - *file2.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments } // Coupling if file.Stmts.Analyze.Coupling != nil && file2.Stmts.Analyze.Coupling != nil { - change.Comparaison.AverageAfferentCoupling = float64(file.Stmts.Analyze.Coupling.Afferent) - float64(file2.Stmts.Analyze.Coupling.Afferent) - change.Comparaison.AverageEfferentCoupling = float64(file.Stmts.Analyze.Coupling.Efferent) - float64(file2.Stmts.Analyze.Coupling.Efferent) - change.Comparaison.AverageInstability = float64(file.Stmts.Analyze.Coupling.Instability) - float64(file2.Stmts.Analyze.Coupling.Instability) + change.Comparaison.AverageAfferentCoupling = float32(file.Stmts.Analyze.Coupling.Afferent) - float32(file2.Stmts.Analyze.Coupling.Afferent) + change.Comparaison.AverageEfferentCoupling = float32(file.Stmts.Analyze.Coupling.Efferent) - float32(file2.Stmts.Analyze.Coupling.Efferent) + change.Comparaison.AverageInstability = file.Stmts.Analyze.Coupling.Instability - file2.Stmts.Analyze.Coupling.Instability } // Risk if file.Stmts.Analyze.Risk != nil && file2.Stmts.Analyze.Risk != nil { - change.Comparaison.Risk = float64(file.Stmts.Analyze.Risk.Score) - float64(file2.Stmts.Analyze.Risk.Score) + change.Comparaison.Risk = file.Stmts.Analyze.Risk.Score - file2.Stmts.Analyze.Risk.Score // check if not NaN if change.Comparaison.Risk != change.Comparaison.Risk { change.Comparaison.Risk = 0 diff --git a/src/Analyzer/Component/MaintainabilityIndexVisitor.go b/src/Analyzer/Component/MaintainabilityIndexVisitor.go index 67406b0..4b051ac 100644 --- a/src/Analyzer/Component/MaintainabilityIndexVisitor.go +++ b/src/Analyzer/Component/MaintainabilityIndexVisitor.go @@ -66,23 +66,23 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { var cloc int32 = *stmts.Analyze.Volume.Cloc var cyclomatic int32 = *stmts.Analyze.Complexity.Cyclomatic var halsteadVolume float32 = *stmts.Analyze.Volume.HalsteadVolume - var MIwoC float64 = 0 - var MI float64 = 0 - var commentWeight float64 = 0 + var MIwoC float32 = 0 + var MI float32 = 0 + var commentWeight float32 = 0 // // maintainability index without comment - MIwoC = max((171- + MIwoC = float32(math.Max((171- (5.2*math.Log(float64(halsteadVolume)))- (0.23*float64(cyclomatic))- - (16.2*math.Log(float64(lloc))))*100/171, 0) + (16.2*math.Log(float64(lloc))))*100/171, 0)) - if math.IsInf(MIwoC, 0) { + if math.IsInf(float64(MIwoC), 0) { MIwoC = 171 } if loc > 0 { CM := float64(cloc) / float64(loc) - commentWeight = 50 * math.Sin(math.Sqrt(2.4*CM)) + commentWeight = float32(50 * math.Sin(math.Sqrt(2.4*CM))) } MI = MIwoC + commentWeight diff --git a/src/Analyzer/RiskAnalyzer.go b/src/Analyzer/RiskAnalyzer.go index 8f04049..54fb2aa 100644 --- a/src/Analyzer/RiskAnalyzer.go +++ b/src/Analyzer/RiskAnalyzer.go @@ -16,9 +16,9 @@ func NewRiskAnalyzer() *RiskAnalyzer { func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { - maxComplexity := 0.0 - maxCyclomatic := 0.0 - maxCommits := 0.0 + var maxComplexity float32 = 0 + var maxCyclomatic int32 = 0 + var maxCommits int = 0 // get bounds for _, file := range project.Combined.ConcernedFiles { @@ -32,20 +32,20 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { // OOP file for _, class := range classes { - maintainability := float64(128 - *class.Stmts.Analyze.Maintainability.MaintainabilityIndex) + maintainability := 128 - *class.Stmts.Analyze.Maintainability.MaintainabilityIndex if maintainability > maxComplexity { maxComplexity = maintainability } } // all files (procedural and OOP) - cyclomatic := float64(*file.Stmts.Analyze.Complexity.Cyclomatic) + cyclomatic := *file.Stmts.Analyze.Complexity.Cyclomatic if cyclomatic > maxCyclomatic { maxCyclomatic = cyclomatic } - if float64(len(commits)) > maxCommits { - maxCommits = float64(len(commits)) + if len(commits) > maxCommits { + maxCommits = len(commits) } } @@ -68,7 +68,7 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { continue } - risk := v.GetRisk(maxCommits, maxComplexity, nbCommits, int(128-*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) + risk := v.GetRisk(int32(maxCommits), maxComplexity, nbCommits, int(128-*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) file.Stmts.Analyze.Risk.Score += float32(risk) } @@ -77,24 +77,24 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { continue } - cyclo := float64(*file.Stmts.Analyze.Complexity.Cyclomatic) - risk := v.GetRisk(maxCommits, maxCyclomatic, nbCommits, int(cyclo)) + cyclo := *file.Stmts.Analyze.Complexity.Cyclomatic + risk := v.GetRisk(int32(maxCommits), float32(maxCyclomatic), nbCommits, int(cyclo)) file.Stmts.Analyze.Risk.Score += float32(risk) } } -func (v *RiskAnalyzer) GetRisk(maxCommits float64, maxComplexity float64, nbCommits int, complexity int) float32 { +func (v *RiskAnalyzer) GetRisk(maxCommits int32, maxComplexity float32, nbCommits int, complexity int) float32 { // Calculate the horizontal and vertical distance from the "top right" corner. - horizontalDistance := maxCommits - float64(nbCommits) - verticalDistance := maxComplexity - float64(complexity) + horizontalDistance := float32(maxCommits) - float32(nbCommits) + verticalDistance := maxComplexity - float32(complexity) // Normalize these values over time, we first divide by the maximum values, to always end up with distances between 0 and 1. - normalizedHorizontalDistance := horizontalDistance / maxCommits + normalizedHorizontalDistance := horizontalDistance / float32(maxCommits) normalizedVerticalDistance := verticalDistance / maxComplexity // Calculate the distance of this class from the "top right" corner, using the simple formula A^2 + B^2 = C^2; or: C = sqrt(A^2 + B^2)). - distanceFromTopRightCorner := math.Sqrt(math.Pow(normalizedHorizontalDistance, 2) + math.Pow(normalizedVerticalDistance, 2)) + distanceFromTopRightCorner := math.Sqrt(math.Pow(float64(normalizedHorizontalDistance), 2) + math.Pow(float64(normalizedVerticalDistance), 2)) // The resulting value will be between 0 and sqrt(2). A short distance is bad, so in order to end up with a high score, we invert the value by subtracting it from 1. risk := 1 - distanceFromTopRightCorner diff --git a/src/Analyzer/Volume/HalsteadMetricsVisitor.go b/src/Analyzer/Volume/HalsteadMetricsVisitor.go index c76a13e..11608e6 100644 --- a/src/Analyzer/Volume/HalsteadMetricsVisitor.go +++ b/src/Analyzer/Volume/HalsteadMetricsVisitor.go @@ -30,11 +30,11 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { var N int32 // program length (N) var N1 int32 var N2 int32 - var hatN float64 = 0 // estimated program length (𝑁̂) - var V float64 = 0 // volume (V) - var D float64 = 0 // difficulty (D) - var E float64 = 0 // effort (E) - var T float64 = 0 // time required to program (T) + var hatN float32 = 0 // estimated program length (𝑁̂) + var V float32 = 0 // volume (V) + var D float32 = 0 // difficulty (D) + var E float32 = 0 // effort (E) + var T float32 = 0 // time required to program (T) for _, stmt := range parents.StmtFunction { if stmt.Stmts == nil { @@ -74,20 +74,20 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { N = int32(N1 + N2) // Calculate estimated program length (𝑁̂) - hatN = float64(n1)*math.Log2(float64(n1)) + float64(n2)*math.Log2(float64(n2)) - if math.IsNaN(hatN) { + hatN = float32(n1)*float32(math.Log2(float64(n1))) + float32(n2)*float32(math.Log2(float64(n2))) + if math.IsNaN(float64(hatN)) { hatN = 0 } // Calculate volume (V) - V = float64(N) * math.Log2(float64(n)) - if math.IsNaN(V) { + V = float32(N) * float32(math.Log2(float64(n))) + if math.IsNaN(float64(V)) { V = 0 } // Calculate difficulty (D) - D = float64(n1) / 2 * float64(N2) / float64(n2) - if math.IsNaN(D) { + D = float32(n1)/2*float32(N2)/float32(n2) + if math.IsNaN(float64(D)) { D = 0 } @@ -135,11 +135,11 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { var n int32 = 0 var N int32 = 0 - var hatN float64 - var V float64 - var D float64 - var E float64 - var T float64 + var hatN float32 + var V float32 + var D float32 + var E float32 + var T float32 // initialize default values hatN = 0 @@ -155,11 +155,11 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { } n += int32(*method.Stmts.Analyze.Volume.HalsteadVocabulary) N += int32(*method.Stmts.Analyze.Volume.HalsteadLength) - hatN += float64(*method.Stmts.Analyze.Volume.HalsteadEstimatedLength) - V += float64(*method.Stmts.Analyze.Volume.HalsteadVolume) - D += float64(*method.Stmts.Analyze.Volume.HalsteadDifficulty) - E += float64(*method.Stmts.Analyze.Volume.HalsteadEffort) - T += float64(*method.Stmts.Analyze.Volume.HalsteadTime) + hatN += *method.Stmts.Analyze.Volume.HalsteadEstimatedLength + V += *method.Stmts.Analyze.Volume.HalsteadVolume + D += *method.Stmts.Analyze.Volume.HalsteadDifficulty + E += *method.Stmts.Analyze.Volume.HalsteadEffort + T += *method.Stmts.Analyze.Volume.HalsteadTime } } @@ -167,11 +167,11 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { if len(stmt.Stmts.StmtFunction) > 0 { n = n / int32(len(stmt.Stmts.StmtFunction)) N = N / int32(len(stmt.Stmts.StmtFunction)) - hatN = hatN / float64(len(stmt.Stmts.StmtFunction)) - V = V / float64(len(stmt.Stmts.StmtFunction)) - D = D / float64(len(stmt.Stmts.StmtFunction)) - E = E / float64(len(stmt.Stmts.StmtFunction)) - T = T / float64(len(stmt.Stmts.StmtFunction)) + hatN = hatN / float32(len(stmt.Stmts.StmtFunction)) + V = V / float32(len(stmt.Stmts.StmtFunction)) + D = D / float32(len(stmt.Stmts.StmtFunction)) + E = E / float32(len(stmt.Stmts.StmtFunction)) + T = T / float32(len(stmt.Stmts.StmtFunction)) } // convert float to float32 diff --git a/src/Cli/ComponentClassTable.go b/src/Cli/ComponentClassTable.go index bcc84d7..e5aeab2 100644 --- a/src/Cli/ComponentClassTable.go +++ b/src/Cli/ComponentClassTable.go @@ -150,7 +150,7 @@ func (v *ComponentTableClass) Init() { strconv.Itoa(int(*class.Stmts.Analyze.Volume.Loc)), strconv.Itoa(int(*class.Stmts.Analyze.Complexity.Cyclomatic)), strconv.Itoa(int(*class.Stmts.Analyze.Volume.HalsteadLength)), - fmt.Sprintf("%.2f", ToFixed(float64(*class.Stmts.Analyze.Volume.HalsteadVolume), 2)), + fmt.Sprintf("%.2f", ToFixed(*class.Stmts.Analyze.Volume.HalsteadVolume, 2)), DecorateMaintainabilityIndex(int(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex), class.Stmts.Analyze), }) } @@ -171,7 +171,7 @@ func (v *ComponentTableClass) Init() { strconv.Itoa(int(*class.Stmts.Analyze.Volume.Loc)), strconv.Itoa(int(*class.Stmts.Analyze.Complexity.Cyclomatic)), strconv.Itoa(int(*class.Stmts.Analyze.Volume.HalsteadLength)), - fmt.Sprintf("%.2f", ToFixed(float64(*class.Stmts.Analyze.Volume.HalsteadVolume), 2)), + fmt.Sprintf("%.2f", ToFixed(*class.Stmts.Analyze.Volume.HalsteadVolume, 2)), DecorateMaintainabilityIndex(int(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex), class.Stmts.Analyze), }) } diff --git a/src/Cli/Styles.go b/src/Cli/Styles.go index 72e3d5b..704817b 100644 --- a/src/Cli/Styles.go +++ b/src/Cli/Styles.go @@ -125,11 +125,11 @@ func DecorateMaintainabilityIndex(mi int, analyze *pb.Analyze) string { return "🟢 " + strconv.Itoa(mi) } -func Round(num float64) int { - return int(num + math.Copysign(0.5, num)) +func Round(num float32) int { + return int(num + float32(math.Copysign(0.5, float64(num)))) } -func ToFixed(num float64, precision int) float64 { +func ToFixed(num float32, precision int) float32 { output := math.Pow(10, float64(precision)) - return float64(Round(num*output)) / output + return float32(Round(num*float32(output))) / float32(output) } diff --git a/src/Engine/util.go b/src/Engine/util.go index 614fa7a..5a09de0 100644 --- a/src/Engine/util.go +++ b/src/Engine/util.go @@ -133,7 +133,7 @@ func GetFunctionsInFile(file *pb.File) []*pb.StmtFunction { } // render as HTML -func HtmlChartLine(data *orderedmap.OrderedMap[string, float64], label string, id string) string { +func HtmlChartLine(data *orderedmap.OrderedMap[string, float32], label string, id string) string { series := "[" for _, key := range data.Keys() { value, _ := data.Get(key) @@ -237,7 +237,7 @@ if (document.getElementById("` + id + `") && typeof ApexCharts !== 'undefined') } // render as HTML -func HtmlChartArea(data *orderedmap.OrderedMap[string, float64], label string, id string) string { +func HtmlChartArea(data *orderedmap.OrderedMap[string, float32], label string, id string) string { values := "[" keys := "[" diff --git a/src/Pkg/Cleaner/cleaner.go b/src/Pkg/Cleaner/cleaner.go index 9f386a6..61adc6b 100644 --- a/src/Pkg/Cleaner/cleaner.go +++ b/src/Pkg/Cleaner/cleaner.go @@ -7,12 +7,12 @@ import ( ) var ( - defaultFloat64 float64 = 0 + defaultfloat32 float32 = 0 defaultFloat32 float32 = 0 ) // The CleanVal removes all NaN values from any value -// and sets them to the default float64 value, which is 0. +// and sets them to the default float32 value, which is 0. // For float32 values, it also sets them to 0. // // This function accepts a pointer because it needs @@ -60,18 +60,13 @@ func cleanSlice(v reflect.Value) { func cleanField(field reflect.Value) { switch field.Kind() { - case reflect.Float32, reflect.Float64: + case reflect.Float32: f := field.Float() isInvalidAndCanSet := field.CanSet() && (math.IsNaN(f) || math.IsInf(f, 0)) if !isInvalidAndCanSet { return } - switch field.Kind() { - case reflect.Float64: - field.Set(reflect.ValueOf(defaultFloat64)) - case reflect.Float32: - field.Set(reflect.ValueOf(defaultFloat32)) - } + field.Set(reflect.ValueOf(defaultFloat32)) } } diff --git a/src/Report/MarkdownReportGenerator.go b/src/Report/MarkdownReportGenerator.go index 98cd3bf..a854539 100644 --- a/src/Report/MarkdownReportGenerator.go +++ b/src/Report/MarkdownReportGenerator.go @@ -165,9 +165,9 @@ func (v *MarkdownReportGenerator) RegisterFilters() { // format it if number > 1000000 { - return pongo2.AsValue(fmt.Sprintf("%.1f M", float64(number)/1000000)), nil + return pongo2.AsValue(fmt.Sprintf("%.1f M", number/1000000)), nil } else if number > 1000 { - return pongo2.AsValue(fmt.Sprintf("%.1f K", float64(number)/1000)), nil + return pongo2.AsValue(fmt.Sprintf("%.1f K", number/1000)), nil } return pongo2.AsValue(number), nil diff --git a/src/Report/types.go b/src/Report/types.go index a6d7e1a..b5c7e37 100644 --- a/src/Report/types.go +++ b/src/Report/types.go @@ -16,33 +16,33 @@ type report struct { Loc int `json:"loc,omitempty"` Cloc int `json:"cloc,omitempty"` Lloc int `json:"lloc,omitempty"` - AverageMethodsPerClass float64 `json:"averageMethodsPerClass,omitempty"` - AverageLocPerMethod float64 `json:"averageLocPerMethod,omitempty"` - AverageLlocPerMethod float64 `json:"averageLlocPerMethod,omitempty"` - AverageClocPerMethod float64 `json:"averageClocPerMethod,omitempty"` - AverageCyclomaticComplexityPerMethod float64 `json:"averageCyclomaticComplexityPerMethod,omitempty"` - AverageCyclomaticComplexityPerClass float64 `json:"averageCyclomaticComplexityPerClass,omitempty"` + AverageMethodsPerClass float32 `json:"averageMethodsPerClass,omitempty"` + AverageLocPerMethod float32 `json:"averageLocPerMethod,omitempty"` + AverageLlocPerMethod float32 `json:"averageLlocPerMethod,omitempty"` + AverageClocPerMethod float32 `json:"averageClocPerMethod,omitempty"` + AverageCyclomaticComplexityPerMethod float32 `json:"averageCyclomaticComplexityPerMethod,omitempty"` + AverageCyclomaticComplexityPerClass float32 `json:"averageCyclomaticComplexityPerClass,omitempty"` MinCyclomaticComplexity int `json:"minCyclomaticComplexity,omitempty"` MaxCyclomaticComplexity int `json:"maxCyclomaticComplexity,omitempty"` - AverageHalsteadDifficulty float64 `json:"averageHalsteadDifficulty,omitempty"` - AverageHalsteadEffort float64 `json:"averageHalsteadEffort,omitempty"` - AverageHalsteadVolume float64 `json:"averageHalsteadVolume,omitempty"` - AverageHalsteadTime float64 `json:"averageHalsteadTime,omitempty"` - AverageHalsteadBugs float64 `json:"averageHalsteadBugs,omitempty"` - SumHalsteadDifficulty float64 `json:"sumHalsteadDifficulty,omitempty"` - SumHalsteadEffort float64 `json:"sumHalsteadEffort,omitempty"` - SumHalsteadVolume float64 `json:"sumHalsteadVolume,omitempty"` - SumHalsteadTime float64 `json:"sumHalsteadTime,omitempty"` - SumHalsteadBugs float64 `json:"sumHalsteadBugs,omitempty"` - AverageMI float64 `json:"averageMI,omitempty"` - AverageMIwoc float64 `json:"averageMIwoc,omitempty"` - AverageMIcw float64 `json:"averageMIcw,omitempty"` - AverageMIPerMethod float64 `json:"averageMIPerMethod,omitempty"` - AverageMIwocPerMethod float64 `json:"averageMIwocPerMethod,omitempty"` - AverageMIcwPerMethod float64 `json:"averageMIcwPerMethod,omitempty"` - AverageAfferentCoupling float64 `json:"averageAfferentCoupling,omitempty"` - AverageEfferentCoupling float64 `json:"averageEfferentCoupling,omitempty"` - AverageInstability float64 `json:"averageInstability,omitempty"` + AverageHalsteadDifficulty float32 `json:"averageHalsteadDifficulty,omitempty"` + AverageHalsteadEffort float32 `json:"averageHalsteadEffort,omitempty"` + AverageHalsteadVolume float32 `json:"averageHalsteadVolume,omitempty"` + AverageHalsteadTime float32 `json:"averageHalsteadTime,omitempty"` + AverageHalsteadBugs float32 `json:"averageHalsteadBugs,omitempty"` + SumHalsteadDifficulty float32 `json:"sumHalsteadDifficulty,omitempty"` + SumHalsteadEffort float32 `json:"sumHalsteadEffort,omitempty"` + SumHalsteadVolume float32 `json:"sumHalsteadVolume,omitempty"` + SumHalsteadTime float32 `json:"sumHalsteadTime,omitempty"` + SumHalsteadBugs float32 `json:"sumHalsteadBugs,omitempty"` + AverageMI float32 `json:"averageMI,omitempty"` + AverageMIwoc float32 `json:"averageMIwoc,omitempty"` + AverageMIcw float32 `json:"averageMIcw,omitempty"` + AverageMIPerMethod float32 `json:"averageMIPerMethod,omitempty"` + AverageMIwocPerMethod float32 `json:"averageMIwocPerMethod,omitempty"` + AverageMIcwPerMethod float32 `json:"averageMIcwPerMethod,omitempty"` + AverageAfferentCoupling float32 `json:"averageAfferentCoupling,omitempty"` + AverageEfferentCoupling float32 `json:"averageEfferentCoupling,omitempty"` + AverageInstability float32 `json:"averageInstability,omitempty"` CommitCountForPeriod int `json:"commitCountForPeriod,omitempty"` CommittedFilesCountForPeriod int `json:"committedFilesCountForPeriod,omitempty"` // for example if one commit concerns 10 files, it will be 10 BusFactor int `json:"busFactor,omitempty"` diff --git a/src/Ui/ComponentBarchart.go b/src/Ui/ComponentBarchart.go index 1c683ce..e27a52e 100644 --- a/src/Ui/ComponentBarchart.go +++ b/src/Ui/ComponentBarchart.go @@ -5,7 +5,7 @@ import ( ) type ComponentBarchart struct { - data map[string]float64 + data map[string]float32 height int barWidth int } @@ -20,7 +20,12 @@ func (c *ComponentBarchart) AsTerminalElement() string { c.barWidth = 8 } - graph := bar.Draw(c.data, bar.Options{ + data := make(map[string]float64) + for key, value := range c.data { + data[key] = float64(value) + } + + graph := bar.Draw(data, bar.Options{ Chart: bar.Chart{ Height: c.height, }, diff --git a/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go b/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go index 482d842..c678bca 100644 --- a/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go +++ b/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go @@ -15,7 +15,7 @@ type ComponentBarchartCyclomaticByMethodRepartition struct { func (c *ComponentBarchartCyclomaticByMethodRepartition) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float64) + data := make(map[string]float32) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -27,8 +27,8 @@ func (c *ComponentBarchartCyclomaticByMethodRepartition) AsTerminalElement() str return graph.AsTerminalElement() } -func (c *ComponentBarchartCyclomaticByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float64] { - data := orderedmap.NewOrderedMap[string, float64]() +func (c *ComponentBarchartCyclomaticByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float32] { + data := orderedmap.NewOrderedMap[string, float32]() rangeOfLabels := []string{"0-5", "5-20", "> 20"} rangeOfValues := []int32{5, 20, 999999} diff --git a/src/Ui/ComponentBarchartLocByMethodRepartition.go b/src/Ui/ComponentBarchartLocByMethodRepartition.go index cf5bea0..2f8529c 100644 --- a/src/Ui/ComponentBarchartLocByMethodRepartition.go +++ b/src/Ui/ComponentBarchartLocByMethodRepartition.go @@ -16,7 +16,7 @@ type ComponentBarchartLocByMethodRepartition struct { // Render is the method to render the component func (c *ComponentBarchartLocByMethodRepartition) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float64) + data := make(map[string]float32) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -33,8 +33,8 @@ func (c *ComponentBarchartLocByMethodRepartition) AsHtml() string { return Engine.HtmlChartLine(data, "Number of files", "chart-loc-by-method") } -func (c *ComponentBarchartLocByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float64] { - data := orderedmap.NewOrderedMap[string, float64]() +func (c *ComponentBarchartLocByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float32] { + data := orderedmap.NewOrderedMap[string, float32]() rangeOfLabels := []string{"< 15", "< 35", "< 50", "> 50"} rangeOfValues := []int32{15, 35, 50, 999999} diff --git a/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go b/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go index 2f79b94..3261623 100644 --- a/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go +++ b/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go @@ -22,7 +22,7 @@ func (c *ComponentBarchartMaintainabilityIndexRepartition) AsHtml() string { // Render is the method to render the component func (c *ComponentBarchartMaintainabilityIndexRepartition) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float64) + data := make(map[string]float32) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -33,8 +33,8 @@ func (c *ComponentBarchartMaintainabilityIndexRepartition) AsTerminalElement() s } // GetData returns the data for the barchart -func (c *ComponentBarchartMaintainabilityIndexRepartition) GetData() *orderedmap.OrderedMap[string, float64] { - data := orderedmap.NewOrderedMap[string, float64]() +func (c *ComponentBarchartMaintainabilityIndexRepartition) GetData() *orderedmap.OrderedMap[string, float32] { + data := orderedmap.NewOrderedMap[string, float32]() rangeOfLabels := []string{"🔴 < 64", "🟡 < 85", "🟢 > 85"} rangeOfValues := []float32{64, 85, 1000} diff --git a/src/Ui/ComponentLineChartGitActivity.go b/src/Ui/ComponentLineChartGitActivity.go index 55e6a0c..9fec44c 100644 --- a/src/Ui/ComponentLineChartGitActivity.go +++ b/src/Ui/ComponentLineChartGitActivity.go @@ -18,7 +18,7 @@ type ComponentLineChartGitActivity struct { // Render is the method to render the component func (c *ComponentLineChartGitActivity) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float64) + data := make(map[string]float32) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -35,9 +35,9 @@ func (c *ComponentLineChartGitActivity) AsHtml() string { return Engine.HtmlChartArea(data, "Number of commits", "chart-git") } -func (c *ComponentLineChartGitActivity) GetData() *orderedmap.OrderedMap[string, float64] { - //data := make(map[string]float64)* - data := orderedmap.NewOrderedMap[string, float64]() +func (c *ComponentLineChartGitActivity) GetData() *orderedmap.OrderedMap[string, float32] { + //data := make(map[string]float32)* + data := orderedmap.NewOrderedMap[string, float32]() // 1 year ago oneYearAgo := time.Now().AddDate(-1, 0, 0) From 7b79fce72066f3499fc1af13e08e6a3c97dbb84c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Tue, 26 Nov 2024 13:23:50 +0100 Subject: [PATCH 05/16] configure parallelisation according the number of available CPU --- src/Analyzer/Aggregator.go | 347 +++++++++++++++++++----------------- src/Analyzer/AstAnalyzer.go | 35 ++-- 2 files changed, 202 insertions(+), 180 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 2870ea4..89eaffd 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -3,6 +3,7 @@ package Analyzer import ( "math" "regexp" + "runtime" "sync" "github.com/halleck45/ast-metrics/src/Engine" @@ -356,187 +357,199 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { var wg sync.WaitGroup var mu sync.Mutex + numWorkers := runtime.NumCPU() + filesChan := make(chan *pb.File, numWorkers) reg := regexp.MustCompile("[^A-Za-z0-9.]+") - for _, file := range aggregated.ConcernedFiles { - wg.Add(1) - go func(file *pb.File) { - defer wg.Done() - - if file.LinesOfCode == nil { - return - } - - mu.Lock() - aggregated.Loc += int(file.LinesOfCode.LinesOfCode) - aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) - aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) - mu.Unlock() - - // Create local variables for file processing - localFile := &pb.File{ - Stmts: file.Stmts, - } - - // Calculate alternate MI using average MI per method when file has no class - if len(localFile.Stmts.StmtClass) == 0 { - if localFile.Stmts.Analyze.Maintainability == nil { - localFile.Stmts.Analyze.Maintainability = &pb.Maintainability{} - } - - methods := file.Stmts.StmtFunction - if len(methods) == 0 { - return - } - averageForFile := float32(0) - for _, method := range methods { - if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { - continue + for i := 0; i < numWorkers; i++ { + go func() { + for file := range filesChan { + + wg.Add(1) + go func(file *pb.File) { + defer wg.Done() + + if file.LinesOfCode == nil { + return } - averageForFile += float32(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) - } - averageForFile = averageForFile / float32(len(methods)) - localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile - } - - // Update the original file with processed data - mu.Lock() - file.Stmts = localFile.Stmts - mu.Unlock() - - // LOC of file is the sum of all classes and methods - // That's useful when we navigate over the files instead of the classes - zero := int32(0) - loc := int32(0) - lloc := int32(0) - cloc := int32(0) - - if file.Stmts.Analyze.Volume == nil { - file.Stmts.Analyze.Volume = &pb.Volume{ - Lloc: &zero, - Cloc: &zero, - Loc: &zero, - } - } - - classes := Engine.GetClassesInFile(file) - functions := file.Stmts.StmtFunction - - // Initialize file complexity if needed - if file.Stmts.Analyze.Complexity.Cyclomatic == nil { - file.Stmts.Analyze.Complexity.Cyclomatic = &zero - } - - // Process functions - for _, function := range functions { - // Handle LOC - if function.LinesOfCode != nil { - loc += function.LinesOfCode.LinesOfCode - lloc += function.LinesOfCode.LogicalLinesOfCode - cloc += function.LinesOfCode.CommentLinesOfCode - } - - // Handle complexity - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { - *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic - } - } - - // Process classes - for _, class := range classes { - // Handle LOC - if class.LinesOfCode != nil { - loc += class.LinesOfCode.LinesOfCode - lloc += class.LinesOfCode.LogicalLinesOfCode - cloc += class.LinesOfCode.CommentLinesOfCode - } - - // Handle coupling - if class.Stmts != nil && class.Stmts.Analyze != nil { - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, + + mu.Lock() + aggregated.Loc += int(file.LinesOfCode.LinesOfCode) + aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) + aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) + mu.Unlock() + + // Create local variables for file processing + localFile := &pb.File{ + Stmts: file.Stmts, + } + + // Calculate alternate MI using average MI per method when file has no class + if len(localFile.Stmts.StmtClass) == 0 { + if localFile.Stmts.Analyze.Maintainability == nil { + localFile.Stmts.Analyze.Maintainability = &pb.Maintainability{} + } + + methods := file.Stmts.StmtFunction + if len(methods) == 0 { + return + } + averageForFile := float32(0) + for _, method := range methods { + if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { + continue + } + averageForFile += float32(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) } + averageForFile = averageForFile / float32(len(methods)) + localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile } - class.Stmts.Analyze.Coupling.Afferent = 0 - - if class.Name != nil { + + // Update the original file with processed data + mu.Lock() + file.Stmts = localFile.Stmts + mu.Unlock() + + // LOC of file is the sum of all classes and methods + // That's useful when we navigate over the files instead of the classes + zero := int32(0) + loc := int32(0) + lloc := int32(0) + cloc := int32(0) + + if file.Stmts.Analyze.Volume == nil { + file.Stmts.Analyze.Volume = &pb.Volume{ + Lloc: &zero, + Cloc: &zero, + Loc: &zero, + } + } + + classes := Engine.GetClassesInFile(file) + functions := file.Stmts.StmtFunction + + // Initialize file complexity if needed + if file.Stmts.Analyze.Complexity.Cyclomatic == nil { + file.Stmts.Analyze.Complexity.Cyclomatic = &zero + } + + // Process functions + for _, function := range functions { + // Handle LOC + if function.LinesOfCode != nil { + loc += function.LinesOfCode.LinesOfCode + lloc += function.LinesOfCode.LogicalLinesOfCode + cloc += function.LinesOfCode.CommentLinesOfCode + } + + // Handle complexity + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { + *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic + } + } + + // Process classes + for _, class := range classes { + // Handle LOC + if class.LinesOfCode != nil { + loc += class.LinesOfCode.LinesOfCode + lloc += class.LinesOfCode.LogicalLinesOfCode + cloc += class.LinesOfCode.CommentLinesOfCode + } + + // Handle coupling + if class.Stmts != nil && class.Stmts.Analyze != nil { + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + class.Stmts.Analyze.Coupling.Afferent = 0 + + if class.Name != nil { + mu.Lock() + // if in hashmap + if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { + class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) + file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + } + + // instability + if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { + instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) + class.Stmts.Analyze.Coupling.Instability = instability + aggregated.AverageInstability += instability + } + mu.Unlock() + } + } + } + + file.Stmts.Analyze.Volume.Loc = &loc + file.Stmts.Analyze.Volume.Lloc = &lloc + file.Stmts.Analyze.Volume.Cloc = &cloc + + dependencies := file.Stmts.StmtExternalDependencies + + for _, dependency := range dependencies { + if dependency == nil { + continue + } + + namespaceTo := dependency.Namespace + namespaceFrom := dependency.From + + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + // Keep only 2 levels in namespace + separator := reg.FindString(namespaceFrom) + parts := reg.Split(namespaceTo, -1) + if len(parts) > 2 { + namespaceTo = parts[0] + separator + parts[1] + } + + parts = reg.Split(namespaceFrom, -1) + if len(parts) > 2 { + namespaceFrom = parts[0] + separator + parts[1] + } + + // if same, continue + if namespaceFrom == namespaceTo { + continue + } + + // if root namespace, continue + if namespaceFrom == "" || namespaceTo == "" { + continue + } + mu.Lock() - // if in hashmap - if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { - class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + // create the map if not exists + if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { + aggregated.PackageRelations[namespaceFrom] = make(map[string]int) } - - // instability - if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { - instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) - class.Stmts.Analyze.Coupling.Instability = instability - aggregated.AverageInstability += instability + + if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { + aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 } + + // increment the counter + aggregated.PackageRelations[namespaceFrom][namespaceTo]++ mu.Unlock() } - } + }(file) } + }() + } - file.Stmts.Analyze.Volume.Loc = &loc - file.Stmts.Analyze.Volume.Lloc = &lloc - file.Stmts.Analyze.Volume.Cloc = &cloc - - dependencies := file.Stmts.StmtExternalDependencies - - for _, dependency := range dependencies { - if dependency == nil { - continue - } - - namespaceTo := dependency.Namespace - namespaceFrom := dependency.From - - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - // Keep only 2 levels in namespace - separator := reg.FindString(namespaceFrom) - parts := reg.Split(namespaceTo, -1) - if len(parts) > 2 { - namespaceTo = parts[0] + separator + parts[1] - } - - parts = reg.Split(namespaceFrom, -1) - if len(parts) > 2 { - namespaceFrom = parts[0] + separator + parts[1] - } - - // if same, continue - if namespaceFrom == namespaceTo { - continue - } - - // if root namespace, continue - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - mu.Lock() - // create the map if not exists - if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { - aggregated.PackageRelations[namespaceFrom] = make(map[string]int) - } - - if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { - aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 - } - - // increment the counter - aggregated.PackageRelations[namespaceFrom][namespaceTo]++ - mu.Unlock() - } - }(file) + for _, file := range aggregated.ConcernedFiles { + filesChan <- file } + wg.Wait() // Consolidate diff --git a/src/Analyzer/AstAnalyzer.go b/src/Analyzer/AstAnalyzer.go index 1b83374..c0039bf 100644 --- a/src/Analyzer/AstAnalyzer.go +++ b/src/Analyzer/AstAnalyzer.go @@ -2,6 +2,7 @@ package Analyzer import ( "io/ioutil" + "runtime" "strconv" "sync" @@ -32,21 +33,29 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi channelResult := make(chan *pb.File, len(astFiles)) nbParsingFiles := 0 - // in parallel, 8 process max, analyze each AST file running the runAnalysis function - for _, file := range astFiles { - wg.Add(1) - nbParsingFiles++ - go func(file string) { - defer wg.Done() - executeFileAnalysis(file, channelResult) - // details is the number of files processed / total number of files - details := strconv.Itoa(nbParsingFiles) + "/" + strconv.Itoa(len(astFiles)) - - if progressbar != nil { - progressbar.UpdateText("Analyzing (" + details + ")") + // analyze each AST file running the runAnalysis function + numWorkers := runtime.NumCPU() + mu := sync.Mutex{} + filesChan := make(chan string, numWorkers) + + for i := 0; i < numWorkers; i++ { + go func() { + for file := range filesChan { + executeFileAnalysis(file, channelResult) + + mu.Lock() + details := strconv.Itoa(nbParsingFiles) + "/" + strconv.Itoa(len(astFiles)) + mu.Unlock() + + if progressbar != nil { + progressbar.UpdateText("Analyzing (" + details + ")") + } } + }() + } - }(file) + for _, file := range astFiles { + filesChan <- file } wg.Wait() From c6e0c35ad2412f7034348ef4e361012c30037124 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Tue, 26 Nov 2024 13:30:01 +0100 Subject: [PATCH 06/16] monkey testing --- Makefile | 10 ++++-- scripts/monkey-test.sh | 72 +++++++++++++++++++++++++++++++++++++ src/Analyzer/Aggregator.go | 11 ++++-- src/Analyzer/AstAnalyzer.go | 7 ++-- 4 files changed, 93 insertions(+), 7 deletions(-) create mode 100644 scripts/monkey-test.sh diff --git a/Makefile b/Makefile index 9183ae6..e304a79 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: install build +.PHONY: install build monkey-test PROTOC_VERSION=24.4 ARCHITECTURE=linux-x86_64 @@ -41,4 +41,10 @@ test: go clean -testcache find . -type d -iname ".ast-metrics-cache" -exec rm -rf "{}" \; || true go test ./... - @echo "\e[34m\033[1mDONE \033[0m\e[39m\n" \ No newline at end of file + @echo "\e[34m\033[1mDONE \033[0m\e[39m\n" + +# monkey test: download random PHP and Go packages from top 100 and analyze them +monkey-test: + @echo "\e[34m\033[1m-> Monkey testing\033[0m\e[39m\n" + bash scripts/monkey-test.sh + @echo "\e[34m\033[1mDONE \033[0m\e[39m\n" diff --git a/scripts/monkey-test.sh b/scripts/monkey-test.sh new file mode 100644 index 0000000..7f5ae27 --- /dev/null +++ b/scripts/monkey-test.sh @@ -0,0 +1,72 @@ +set -e + +# number of packages to download +PACKAGES_COUNT=100 + +workdir=$(mktemp -d) +echo "Working in $workdir" +if [ -z "$workdir" ]; then + echo "Workdir not found" + exit 1 +fi + +# cleanup reports +rm -f ast-metrics-report.json + + +# sort TOP packages randomly +url="https://packagist.org/explore/popular.json?per_page=100" +# shuffle 100 packages +packages=$(curl -s $url | jq -r '.packages[].name' | shuf) +# take only $PACKAGES_COUNT packages +packages=$(echo "$packages" | head -n $PACKAGES_COUNT) + +echo "Downloading $PACKAGES_COUNT packages" +for package in $packages; +do + echo " Downloading $package" + repository=$(curl -s https://packagist.org/packages/$package.json | jq -r '.package.repository') + zipUrl="$repository/archive/refs/heads/master.zip" + # generate random name for destination + name=$(uuidgen) + destination="$workdir/$name" + echo " Downloading $zipUrl to $destination" + curl -s -L -o $destination.zip $zipUrl + + # if zip contains HTML, like "Just a moment...", then skip + if grep -q " /dev/null + rm $destination.zip +done + +echo "Analyzing $workdir" +time go run . analyze --ci $workdir + +# Ensure that report is generated +if [ ! -f ast-metrics-report.json ]; then + echo "Report not generated" + exit 1 +else + echo "Report generated" +fi + + +# Count number of analyzed files +# | **PHP** | 122.0 K | 🟢 112 | 1.21 | 12 | +line=$(cat build/report.md |grep '**PHP**'|head -n 1) +separator="|" +linesOfCode=$(echo $line | awk -F "$separator" '{print $3}') +echo "Analyzed $linesOfCode lines of code" + + +echo "Done" \ No newline at end of file diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 89eaffd..7a4e98b 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -511,6 +511,10 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { if len(parts) > 2 { namespaceTo = parts[0] + separator + parts[1] } + + if namespaceFrom == "" || namespaceTo == "" { + continue + } parts = reg.Split(namespaceFrom, -1) if len(parts) > 2 { @@ -574,14 +578,15 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { } // Bus factor and other metrics based on aggregated data - wg.Add(len(r.analyzers)) + var wgAnalyzers sync.WaitGroup + wgAnalyzers.Add(len(r.analyzers)) for _, analyzer := range r.analyzers { go func(a AggregateAnalyzer) { - defer wg.Done() + defer wgAnalyzers.Done() a.Calculate(aggregated) }(analyzer) } - wg.Wait() + wgAnalyzers.Wait() } // Add an analyzer to the aggregator diff --git a/src/Analyzer/AstAnalyzer.go b/src/Analyzer/AstAnalyzer.go index c0039bf..515fe02 100644 --- a/src/Analyzer/AstAnalyzer.go +++ b/src/Analyzer/AstAnalyzer.go @@ -32,6 +32,7 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi // https://stackoverflow.com/questions/58743038/why-does-this-goroutine-not-call-wg-done channelResult := make(chan *pb.File, len(astFiles)) + nbParsingFiles := 0 // analyze each AST file running the runAnalysis function numWorkers := runtime.NumCPU() @@ -41,11 +42,13 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi for i := 0; i < numWorkers; i++ { go func() { for file := range filesChan { + mu.Lock() + nbParsingFiles++ + mu.Unlock() + executeFileAnalysis(file, channelResult) - mu.Lock() details := strconv.Itoa(nbParsingFiles) + "/" + strconv.Itoa(len(astFiles)) - mu.Unlock() if progressbar != nil { progressbar.UpdateText("Analyzing (" + details + ")") From 242e77f7af38667901ba42b11facbf219485c2fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Wed, 27 Nov 2024 06:43:15 +0100 Subject: [PATCH 07/16] speed up application, using the same double type everywhere --- src/Analyzer/Aggregator.go | 170 +++++++++--------- src/Analyzer/Aggregator_test.go | 72 ++++---- src/Analyzer/Comparator.go | 58 +++--- .../Component/MaintainabilityIndexVisitor.go | 20 +-- .../MaintainabilityIndexVisitor_test.go | 69 +++---- src/Analyzer/RiskAnalyzer.go | 20 +-- src/Analyzer/Volume/HalsteadMetricsVisitor.go | 60 +++---- .../Volume/HalsteadMetricsVisitor_test.go | 36 ++-- src/Cli/ComponentFileTable.go | 4 +- src/Cli/ComponentTableClass_test.go | 10 +- src/Cli/Styles.go | 8 +- src/Engine/util.go | 4 +- src/NodeType/NodeType.pb.go | 40 ++--- src/Pkg/Cleaner/cleaner.go | 12 +- src/Report/OpenMetricsGenerator_test.go | 4 +- src/Report/types.go | 70 ++++---- src/Ui/ComponentBarchart.go | 2 +- ...ntBarchartCyclomaticByMethodRepartition.go | 6 +- ...ComponentBarchartLocByMethodRepartition.go | 6 +- ...BarchartMaintainabilityIndexRepartition.go | 8 +- src/Ui/ComponentLineChartGitActivity.go | 8 +- 21 files changed, 344 insertions(+), 343 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 7a4e98b..e88e89c 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -34,33 +34,33 @@ type Aggregated struct { Loc int Cloc int Lloc int - AverageMethodsPerClass float32 - AverageLocPerMethod float32 - AverageLlocPerMethod float32 - AverageClocPerMethod float32 - AverageCyclomaticComplexityPerMethod float32 - AverageCyclomaticComplexityPerClass float32 + AverageMethodsPerClass float64 + AverageLocPerMethod float64 + AverageLlocPerMethod float64 + AverageClocPerMethod float64 + AverageCyclomaticComplexityPerMethod float64 + AverageCyclomaticComplexityPerClass float64 MinCyclomaticComplexity int MaxCyclomaticComplexity int - AverageHalsteadDifficulty float32 - AverageHalsteadEffort float32 - AverageHalsteadVolume float32 - AverageHalsteadTime float32 - AverageHalsteadBugs float32 - SumHalsteadDifficulty float32 - SumHalsteadEffort float32 - SumHalsteadVolume float32 - SumHalsteadTime float32 - SumHalsteadBugs float32 - AverageMI float32 - AverageMIwoc float32 - AverageMIcw float32 - AverageMIPerMethod float32 - AverageMIwocPerMethod float32 - AverageMIcwPerMethod float32 - AverageAfferentCoupling float32 - AverageEfferentCoupling float32 - AverageInstability float32 + AverageHalsteadDifficulty float64 + AverageHalsteadEffort float64 + AverageHalsteadVolume float64 + AverageHalsteadTime float64 + AverageHalsteadBugs float64 + SumHalsteadDifficulty float64 + SumHalsteadEffort float64 + SumHalsteadVolume float64 + SumHalsteadTime float64 + SumHalsteadBugs float64 + AverageMI float64 + AverageMIwoc float64 + AverageMIcw float64 + AverageMIPerMethod float64 + AverageMIwocPerMethod float64 + AverageMIcwPerMethod float64 + AverageAfferentCoupling float64 + AverageEfferentCoupling float64 + AverageInstability float64 CommitCountForPeriod int CommittedFilesCountForPeriod int // for example if one commit concerns 10 files, it will be 10 BusFactor int @@ -307,37 +307,37 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat func (r *Aggregator) consolidate(aggregated *Aggregated) { if aggregated.NbClasses > 0 { - aggregated.AverageMethodsPerClass = float32(aggregated.NbMethods) / float32(aggregated.NbClasses) - aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float32(aggregated.NbClasses) + aggregated.AverageMethodsPerClass = float64(aggregated.NbMethods) / float64(aggregated.NbClasses) + aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float64(aggregated.NbClasses) } else { aggregated.AverageMethodsPerClass = 0 aggregated.AverageCyclomaticComplexityPerClass = 0 } if aggregated.AverageMI > 0 { - aggregated.AverageMI = aggregated.AverageMI / float32(aggregated.NbClasses) - aggregated.AverageMIwoc = aggregated.AverageMIwoc / float32(aggregated.NbClasses) - aggregated.AverageMIcw = aggregated.AverageMIcw / float32(aggregated.NbClasses) + aggregated.AverageMI = aggregated.AverageMI / float64(aggregated.NbClasses) + aggregated.AverageMIwoc = aggregated.AverageMIwoc / float64(aggregated.NbClasses) + aggregated.AverageMIcw = aggregated.AverageMIcw / float64(aggregated.NbClasses) } if aggregated.AverageInstability > 0 { - aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float32(aggregated.NbClasses) - aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float32(aggregated.NbClasses) + aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float64(aggregated.NbClasses) + aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float64(aggregated.NbClasses) } if aggregated.NbMethods > 0 { - aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float32(aggregated.NbMethods) - aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float32(aggregated.NbMethods) - aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float32(aggregated.NbMethods) - aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float32(aggregated.NbMethods) - aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float32(aggregated.NbMethods) - aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float32(aggregated.NbMethods) - aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float32(aggregated.NbMethods) - aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float32(aggregated.NbClasses) - aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float32(aggregated.NbClasses) - aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float32(aggregated.NbClasses) - aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float32(aggregated.NbClasses) - aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float32(aggregated.NbClasses) + aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float64(aggregated.NbMethods) + aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float64(aggregated.NbMethods) + aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float64(aggregated.NbMethods) + aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float64(aggregated.NbClasses) + aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float64(aggregated.NbClasses) + aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float64(aggregated.NbClasses) + aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float64(aggregated.NbClasses) + aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float64(aggregated.NbClasses) } // if langage without classes @@ -369,55 +369,55 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { wg.Add(1) go func(file *pb.File) { defer wg.Done() - + if file.LinesOfCode == nil { return } - + mu.Lock() aggregated.Loc += int(file.LinesOfCode.LinesOfCode) aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) mu.Unlock() - + // Create local variables for file processing localFile := &pb.File{ - Stmts: file.Stmts, + Stmts: file.Stmts, } - + // Calculate alternate MI using average MI per method when file has no class if len(localFile.Stmts.StmtClass) == 0 { if localFile.Stmts.Analyze.Maintainability == nil { localFile.Stmts.Analyze.Maintainability = &pb.Maintainability{} } - + methods := file.Stmts.StmtFunction if len(methods) == 0 { return } - averageForFile := float32(0) + averageForFile := float64(0) for _, method := range methods { if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { continue } - averageForFile += float32(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) + averageForFile += float64(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) } - averageForFile = averageForFile / float32(len(methods)) + averageForFile = averageForFile / float64(len(methods)) localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile } - + // Update the original file with processed data mu.Lock() file.Stmts = localFile.Stmts mu.Unlock() - + // LOC of file is the sum of all classes and methods // That's useful when we navigate over the files instead of the classes zero := int32(0) loc := int32(0) lloc := int32(0) cloc := int32(0) - + if file.Stmts.Analyze.Volume == nil { file.Stmts.Analyze.Volume = &pb.Volume{ Lloc: &zero, @@ -425,15 +425,15 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { Loc: &zero, } } - + classes := Engine.GetClassesInFile(file) functions := file.Stmts.StmtFunction - + // Initialize file complexity if needed if file.Stmts.Analyze.Complexity.Cyclomatic == nil { file.Stmts.Analyze.Complexity.Cyclomatic = &zero } - + // Process functions for _, function := range functions { // Handle LOC @@ -442,13 +442,13 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { lloc += function.LinesOfCode.LogicalLinesOfCode cloc += function.LinesOfCode.CommentLinesOfCode } - + // Handle complexity if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic } } - + // Process classes for _, class := range classes { // Handle LOC @@ -457,7 +457,7 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { lloc += class.LinesOfCode.LogicalLinesOfCode cloc += class.LinesOfCode.CommentLinesOfCode } - + // Handle coupling if class.Stmts != nil && class.Stmts.Analyze != nil { if class.Stmts.Analyze.Coupling == nil { @@ -467,7 +467,7 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { } } class.Stmts.Analyze.Coupling.Afferent = 0 - + if class.Name != nil { mu.Lock() // if in hashmap @@ -475,10 +475,10 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent } - + // instability if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { - instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) + instability := float64(class.Stmts.Analyze.Coupling.Efferent) / float64(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) class.Stmts.Analyze.Coupling.Instability = instability aggregated.AverageInstability += instability } @@ -486,25 +486,25 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { } } } - + file.Stmts.Analyze.Volume.Loc = &loc file.Stmts.Analyze.Volume.Lloc = &lloc file.Stmts.Analyze.Volume.Cloc = &cloc - + dependencies := file.Stmts.StmtExternalDependencies - + for _, dependency := range dependencies { if dependency == nil { continue } - + namespaceTo := dependency.Namespace namespaceFrom := dependency.From - + if namespaceFrom == "" || namespaceTo == "" { continue } - + // Keep only 2 levels in namespace separator := reg.FindString(namespaceFrom) parts := reg.Split(namespaceTo, -1) @@ -515,32 +515,32 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { if namespaceFrom == "" || namespaceTo == "" { continue } - + parts = reg.Split(namespaceFrom, -1) if len(parts) > 2 { namespaceFrom = parts[0] + separator + parts[1] } - + // if same, continue if namespaceFrom == namespaceTo { continue } - + // if root namespace, continue if namespaceFrom == "" || namespaceTo == "" { continue } - + mu.Lock() // create the map if not exists if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { aggregated.PackageRelations[namespaceFrom] = make(map[string]int) } - + if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 } - + // increment the counter aggregated.PackageRelations[namespaceFrom][namespaceTo]++ mu.Unlock() @@ -557,7 +557,7 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { wg.Wait() // Consolidate - aggregated.AverageInstability = aggregated.AverageInstability / float32(aggregated.NbClasses) + aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) // Count commits for the period based on `ResultOfGitAnalysis` data aggregated.ResultOfGitAnalysis = r.gitSummaries @@ -655,7 +655,7 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // Average cyclomatic complexity per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { if function.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerMethod += float32(*function.Stmts.Analyze.Complexity.Cyclomatic) + specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) } } @@ -670,13 +670,13 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // average lines of code per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { if function.Stmts.Analyze.Volume.Loc != nil { - specificAggregation.AverageLocPerMethod += float32(*function.Stmts.Analyze.Volume.Loc) + specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) } if function.Stmts.Analyze.Volume.Cloc != nil { - specificAggregation.AverageClocPerMethod += float32(*function.Stmts.Analyze.Volume.Cloc) + specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) } if function.Stmts.Analyze.Volume.Lloc != nil { - specificAggregation.AverageLlocPerMethod += float32(*function.Stmts.Analyze.Volume.Lloc) + specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) } } } @@ -704,13 +704,13 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // Coupling if class.Stmts.Analyze.Coupling != nil { specificAggregation.AverageInstability += class.Stmts.Analyze.Coupling.Instability - specificAggregation.AverageEfferentCoupling += float32(class.Stmts.Analyze.Coupling.Efferent) - specificAggregation.AverageAfferentCoupling += float32(class.Stmts.Analyze.Coupling.Afferent) + specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) + specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) } // cyclomatic complexity per class if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerClass += float32(*class.Stmts.Analyze.Complexity.Cyclomatic) + specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) } diff --git a/src/Analyzer/Aggregator_test.go b/src/Analyzer/Aggregator_test.go index 7f850d0..f548a6f 100644 --- a/src/Analyzer/Aggregator_test.go +++ b/src/Analyzer/Aggregator_test.go @@ -170,9 +170,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(120), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -182,9 +182,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(85), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(85), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -194,9 +194,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(65), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(65), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -206,9 +206,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(100), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(100), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -234,9 +234,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(70), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(70), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -246,9 +246,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(100), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(100), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -287,9 +287,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(75), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(75), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -299,9 +299,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(120), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -327,9 +327,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(90), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(90), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -371,9 +371,9 @@ func TestAggregates(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(120), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), }, }, }, @@ -472,9 +472,9 @@ func TestCalculateMaintainabilityIndex(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(15), - MaintainabilityIndexWithoutComments: proto.Float32(20), - CommentWeight: proto.Float32(25), + MaintainabilityIndex: proto.Float64(15), + MaintainabilityIndexWithoutComments: proto.Float64(20), + CommentWeight: proto.Float64(25), }, }, }, @@ -483,9 +483,9 @@ func TestCalculateMaintainabilityIndex(t *testing.T) { Stmts: &pb.Stmts{ Analyze: &pb.Analyze{ Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(30), - MaintainabilityIndexWithoutComments: proto.Float32(35), - CommentWeight: proto.Float32(40), + MaintainabilityIndex: proto.Float64(30), + MaintainabilityIndexWithoutComments: proto.Float64(35), + CommentWeight: proto.Float64(40), }, }, }, diff --git a/src/Analyzer/Comparator.go b/src/Analyzer/Comparator.go index 1ab94a2..fabb819 100644 --- a/src/Analyzer/Comparator.go +++ b/src/Analyzer/Comparator.go @@ -33,37 +33,37 @@ type Comparaison struct { Loc int Cloc int Lloc int - AverageMethodsPerClass float32 - AverageLocPerMethod float32 - AverageLlocPerMethod float32 - AverageClocPerMethod float32 - AverageCyclomaticComplexityPerMethod float32 - AverageCyclomaticComplexityPerClass float32 + AverageMethodsPerClass float64 + AverageLocPerMethod float64 + AverageLlocPerMethod float64 + AverageClocPerMethod float64 + AverageCyclomaticComplexityPerMethod float64 + AverageCyclomaticComplexityPerClass float64 MinCyclomaticComplexity int MaxCyclomaticComplexity int - AverageHalsteadDifficulty float32 - AverageHalsteadEffort float32 - AverageHalsteadVolume float32 - AverageHalsteadTime float32 - AverageHalsteadBugs float32 - SumHalsteadDifficulty float32 - SumHalsteadEffort float32 - SumHalsteadVolume float32 - SumHalsteadTime float32 - SumHalsteadBugs float32 - AverageMI float32 - AverageMIwoc float32 - AverageMIcw float32 - AverageMIPerMethod float32 - AverageMIwocPerMethod float32 - AverageMIcwPerMethod float32 - AverageAfferentCoupling float32 - AverageEfferentCoupling float32 - AverageInstability float32 + AverageHalsteadDifficulty float64 + AverageHalsteadEffort float64 + AverageHalsteadVolume float64 + AverageHalsteadTime float64 + AverageHalsteadBugs float64 + SumHalsteadDifficulty float64 + SumHalsteadEffort float64 + SumHalsteadVolume float64 + SumHalsteadTime float64 + SumHalsteadBugs float64 + AverageMI float64 + AverageMIwoc float64 + AverageMIcw float64 + AverageMIPerMethod float64 + AverageMIwocPerMethod float64 + AverageMIcwPerMethod float64 + AverageAfferentCoupling float64 + AverageEfferentCoupling float64 + AverageInstability float64 CommitCountForPeriod int CommittedFilesCountForPeriod int // for example if one commit concerns 10 files, it will be 10 BusFactor int - Risk float32 + Risk float64 ChangedFiles []ChangedFile NbNewFiles int NbDeletedFiles int @@ -218,7 +218,7 @@ func (c *Comparator) Compare(first Aggregated, second Aggregated) Comparaison { // Cyclomatic complexity if file.Stmts.Analyze.Complexity != nil && file2.Stmts.Analyze.Complexity != nil { - change.Comparaison.AverageCyclomaticComplexityPerMethod = float32(*file.Stmts.Analyze.Complexity.Cyclomatic) - float32(*file2.Stmts.Analyze.Complexity.Cyclomatic) + change.Comparaison.AverageCyclomaticComplexityPerMethod = float64(*file.Stmts.Analyze.Complexity.Cyclomatic) - float64(*file2.Stmts.Analyze.Complexity.Cyclomatic) } // Halstead @@ -240,8 +240,8 @@ func (c *Comparator) Compare(first Aggregated, second Aggregated) Comparaison { // Coupling if file.Stmts.Analyze.Coupling != nil && file2.Stmts.Analyze.Coupling != nil { - change.Comparaison.AverageAfferentCoupling = float32(file.Stmts.Analyze.Coupling.Afferent) - float32(file2.Stmts.Analyze.Coupling.Afferent) - change.Comparaison.AverageEfferentCoupling = float32(file.Stmts.Analyze.Coupling.Efferent) - float32(file2.Stmts.Analyze.Coupling.Efferent) + change.Comparaison.AverageAfferentCoupling = float64(file.Stmts.Analyze.Coupling.Afferent) - float64(file2.Stmts.Analyze.Coupling.Afferent) + change.Comparaison.AverageEfferentCoupling = float64(file.Stmts.Analyze.Coupling.Efferent) - float64(file2.Stmts.Analyze.Coupling.Efferent) change.Comparaison.AverageInstability = file.Stmts.Analyze.Coupling.Instability - file2.Stmts.Analyze.Coupling.Instability } diff --git a/src/Analyzer/Component/MaintainabilityIndexVisitor.go b/src/Analyzer/Component/MaintainabilityIndexVisitor.go index 4b051ac..9435978 100644 --- a/src/Analyzer/Component/MaintainabilityIndexVisitor.go +++ b/src/Analyzer/Component/MaintainabilityIndexVisitor.go @@ -65,13 +65,13 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { var lloc int32 = *stmts.Analyze.Volume.Lloc var cloc int32 = *stmts.Analyze.Volume.Cloc var cyclomatic int32 = *stmts.Analyze.Complexity.Cyclomatic - var halsteadVolume float32 = *stmts.Analyze.Volume.HalsteadVolume - var MIwoC float32 = 0 - var MI float32 = 0 - var commentWeight float32 = 0 + var halsteadVolume float64 = *stmts.Analyze.Volume.HalsteadVolume + var MIwoC float64 = 0 + var MI float64 = 0 + var commentWeight float64 = 0 // // maintainability index without comment - MIwoC = float32(math.Max((171- + MIwoC = float64(math.Max((171- (5.2*math.Log(float64(halsteadVolume)))- (0.23*float64(cyclomatic))- (16.2*math.Log(float64(lloc))))*100/171, 0)) @@ -82,7 +82,7 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { if loc > 0 { CM := float64(cloc) / float64(loc) - commentWeight = float32(50 * math.Sin(math.Sqrt(2.4*CM))) + commentWeight = float64(50 * math.Sin(math.Sqrt(2.4*CM))) } MI = MIwoC + commentWeight @@ -94,9 +94,9 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { commentWeight = 0 } - MI32 := float32(MI) - MIwoC32 := float32(MIwoC) - commentWeight32 := float32(commentWeight) + MI32 := float64(MI) + MIwoC32 := float64(MIwoC) + commentWeight32 := float64(commentWeight) if stmts.Analyze.Maintainability == nil { stmts.Analyze.Maintainability = &pb.Maintainability{} @@ -104,7 +104,7 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { if loc == 0 { // when class has no code - MI32 = float32(171) + MI32 = float64(171) } stmts.Analyze.Maintainability.MaintainabilityIndex = &MI32 diff --git a/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go b/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go index 444c44f..fbfdad3 100644 --- a/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go +++ b/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go @@ -1,50 +1,51 @@ package Analyzer import ( - "testing" - pb "github.com/halleck45/ast-metrics/src/NodeType" + "testing" + + pb "github.com/halleck45/ast-metrics/src/NodeType" ) func TestItCalculateMaintainabilityIndex(t *testing.T) { - visitor := MaintainabilityIndexVisitor{} + visitor := MaintainabilityIndexVisitor{} - stmts := pb.Stmts{} - class1 := pb.StmtClass{} - class1.Stmts = &pb.Stmts{} - stmts.StmtClass = append(stmts.StmtClass, &class1) + stmts := pb.Stmts{} + class1 := pb.StmtClass{} + class1.Stmts = &pb.Stmts{} + stmts.StmtClass = append(stmts.StmtClass, &class1) - stmts.Analyze = &pb.Analyze{} - stmts.Analyze.Volume = &pb.Volume{} + stmts.Analyze = &pb.Analyze{} + stmts.Analyze.Volume = &pb.Volume{} - loc := int32(10) - lloc := int32(8) - cloc := int32(2) - cyclomatic := int32(3) - halsteadVolume := float32(10) + loc := int32(10) + lloc := int32(8) + cloc := int32(2) + cyclomatic := int32(3) + halsteadVolume := float64(10) - stmts.Analyze.Volume.Loc = &loc - stmts.Analyze.Volume.Lloc = &lloc - stmts.Analyze.Volume.Cloc = &cloc - stmts.Analyze.Complexity = &pb.Complexity{} - stmts.Analyze.Complexity.Cyclomatic = &cyclomatic - stmts.Analyze.Volume.HalsteadVolume = &halsteadVolume + stmts.Analyze.Volume.Loc = &loc + stmts.Analyze.Volume.Lloc = &lloc + stmts.Analyze.Volume.Cloc = &cloc + stmts.Analyze.Complexity = &pb.Complexity{} + stmts.Analyze.Complexity.Cyclomatic = &cyclomatic + stmts.Analyze.Volume.HalsteadVolume = &halsteadVolume - visitor.Calculate(&stmts) + visitor.Calculate(&stmts) - MI := int(*stmts.Analyze.Maintainability.MaintainabilityIndex) - MIwoc := int(*stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - commentWeight := int(*stmts.Analyze.Maintainability.CommentWeight) + MI := int(*stmts.Analyze.Maintainability.MaintainabilityIndex) + MIwoc := int(*stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) + commentWeight := int(*stmts.Analyze.Maintainability.CommentWeight) - if MI != 104 { - t.Error("Expected 104, got ", MI) - } + if MI != 104 { + t.Error("Expected 104, got ", MI) + } - if MIwoc != 72 { - t.Error("Expected 72, got ", MIwoc) - } + if MIwoc != 72 { + t.Error("Expected 72, got ", MIwoc) + } - if commentWeight != 31 { - t.Error("Expected 31, got ", commentWeight) - } -} \ No newline at end of file + if commentWeight != 31 { + t.Error("Expected 31, got ", commentWeight) + } +} diff --git a/src/Analyzer/RiskAnalyzer.go b/src/Analyzer/RiskAnalyzer.go index 54fb2aa..94cab36 100644 --- a/src/Analyzer/RiskAnalyzer.go +++ b/src/Analyzer/RiskAnalyzer.go @@ -16,7 +16,7 @@ func NewRiskAnalyzer() *RiskAnalyzer { func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { - var maxComplexity float32 = 0 + var maxComplexity float64 = 0 var maxCyclomatic int32 = 0 var maxCommits int = 0 @@ -53,7 +53,7 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { for _, file := range project.Combined.ConcernedFiles { if file.Stmts.Analyze.Risk == nil { - file.Stmts.Analyze.Risk = &pb.Risk{Score: float32(0)} + file.Stmts.Analyze.Risk = &pb.Risk{Score: float64(0)} } nbCommits := 0 @@ -69,7 +69,7 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { } risk := v.GetRisk(int32(maxCommits), maxComplexity, nbCommits, int(128-*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) - file.Stmts.Analyze.Risk.Score += float32(risk) + file.Stmts.Analyze.Risk.Score += float64(risk) } // Procedural file. We put risk on the file itself, according to the cyclomatic complexity. @@ -78,19 +78,19 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { } cyclo := *file.Stmts.Analyze.Complexity.Cyclomatic - risk := v.GetRisk(int32(maxCommits), float32(maxCyclomatic), nbCommits, int(cyclo)) - file.Stmts.Analyze.Risk.Score += float32(risk) + risk := v.GetRisk(int32(maxCommits), float64(maxCyclomatic), nbCommits, int(cyclo)) + file.Stmts.Analyze.Risk.Score += float64(risk) } } -func (v *RiskAnalyzer) GetRisk(maxCommits int32, maxComplexity float32, nbCommits int, complexity int) float32 { +func (v *RiskAnalyzer) GetRisk(maxCommits int32, maxComplexity float64, nbCommits int, complexity int) float64 { // Calculate the horizontal and vertical distance from the "top right" corner. - horizontalDistance := float32(maxCommits) - float32(nbCommits) - verticalDistance := maxComplexity - float32(complexity) + horizontalDistance := float64(maxCommits) - float64(nbCommits) + verticalDistance := maxComplexity - float64(complexity) // Normalize these values over time, we first divide by the maximum values, to always end up with distances between 0 and 1. - normalizedHorizontalDistance := horizontalDistance / float32(maxCommits) + normalizedHorizontalDistance := horizontalDistance / float64(maxCommits) normalizedVerticalDistance := verticalDistance / maxComplexity // Calculate the distance of this class from the "top right" corner, using the simple formula A^2 + B^2 = C^2; or: C = sqrt(A^2 + B^2)). @@ -99,5 +99,5 @@ func (v *RiskAnalyzer) GetRisk(maxCommits int32, maxComplexity float32, nbCommit // The resulting value will be between 0 and sqrt(2). A short distance is bad, so in order to end up with a high score, we invert the value by subtracting it from 1. risk := 1 - distanceFromTopRightCorner - return float32(risk) + return float64(risk) } diff --git a/src/Analyzer/Volume/HalsteadMetricsVisitor.go b/src/Analyzer/Volume/HalsteadMetricsVisitor.go index 11608e6..d11af04 100644 --- a/src/Analyzer/Volume/HalsteadMetricsVisitor.go +++ b/src/Analyzer/Volume/HalsteadMetricsVisitor.go @@ -30,11 +30,11 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { var N int32 // program length (N) var N1 int32 var N2 int32 - var hatN float32 = 0 // estimated program length (𝑁̂) - var V float32 = 0 // volume (V) - var D float32 = 0 // difficulty (D) - var E float32 = 0 // effort (E) - var T float32 = 0 // time required to program (T) + var hatN float64 = 0 // estimated program length (𝑁̂) + var V float64 = 0 // volume (V) + var D float64 = 0 // difficulty (D) + var E float64 = 0 // effort (E) + var T float64 = 0 // time required to program (T) for _, stmt := range parents.StmtFunction { if stmt.Stmts == nil { @@ -74,19 +74,19 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { N = int32(N1 + N2) // Calculate estimated program length (𝑁̂) - hatN = float32(n1)*float32(math.Log2(float64(n1))) + float32(n2)*float32(math.Log2(float64(n2))) + hatN = float64(n1)*float64(math.Log2(float64(n1))) + float64(n2)*float64(math.Log2(float64(n2))) if math.IsNaN(float64(hatN)) { hatN = 0 } // Calculate volume (V) - V = float32(N) * float32(math.Log2(float64(n))) + V = float64(N) * float64(math.Log2(float64(n))) if math.IsNaN(float64(V)) { V = 0 } // Calculate difficulty (D) - D = float32(n1)/2*float32(N2)/float32(n2) + D = float64(n1) / 2 * float64(N2) / float64(n2) if math.IsNaN(float64(D)) { D = 0 } @@ -97,12 +97,12 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { // Calculate time required to program (T) T = E / 18 - // convert float to float32 - V32 := float32(V) - hatN32 := float32(hatN) - D32 := float32(D) - E32 := float32(E) - T32 := float32(T) + // convert float to float64 + V32 := float64(V) + hatN32 := float64(hatN) + D32 := float64(D) + E32 := float64(E) + T32 := float64(T) // Assign to result if stmt.Stmts.Analyze == nil { @@ -135,11 +135,11 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { var n int32 = 0 var N int32 = 0 - var hatN float32 - var V float32 - var D float32 - var E float32 - var T float32 + var hatN float64 + var V float64 + var D float64 + var E float64 + var T float64 // initialize default values hatN = 0 @@ -167,19 +167,19 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { if len(stmt.Stmts.StmtFunction) > 0 { n = n / int32(len(stmt.Stmts.StmtFunction)) N = N / int32(len(stmt.Stmts.StmtFunction)) - hatN = hatN / float32(len(stmt.Stmts.StmtFunction)) - V = V / float32(len(stmt.Stmts.StmtFunction)) - D = D / float32(len(stmt.Stmts.StmtFunction)) - E = E / float32(len(stmt.Stmts.StmtFunction)) - T = T / float32(len(stmt.Stmts.StmtFunction)) + hatN = hatN / float64(len(stmt.Stmts.StmtFunction)) + V = V / float64(len(stmt.Stmts.StmtFunction)) + D = D / float64(len(stmt.Stmts.StmtFunction)) + E = E / float64(len(stmt.Stmts.StmtFunction)) + T = T / float64(len(stmt.Stmts.StmtFunction)) } - // convert float to float32 - V32 := float32(V) - hatN32 := float32(hatN) - D32 := float32(D) - E32 := float32(E) - T32 := float32(T) + // convert float to float64 + V32 := float64(V) + hatN32 := float64(hatN) + D32 := float64(D) + E32 := float64(E) + T32 := float64(T) // Assign to result if stmt.Stmts.Analyze == nil { diff --git a/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go b/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go index 43aa78d..affa79d 100644 --- a/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go +++ b/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go @@ -92,15 +92,15 @@ func TestHalsteadMetricsVisitor(t *testing.T) { t.Errorf("Expected 4, got %d", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadLength) } - if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float32(4.754887502163469) { + if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float64(4.754887502163469) { t.Errorf("Expected 4.754887502163469, got %f", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadEstimatedLength) } - if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadVolume != float32(8) { + if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadVolume != float64(8) { t.Errorf("Expected 8, got %f", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadVolume) } - if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadDifficulty != float32(1.5) { + if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadDifficulty != float64(1.5) { t.Errorf("Expected 1.5, got %f", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadDifficulty) } } @@ -119,11 +119,11 @@ func TestHalsteadMetricsVisitor_LeaveNode(t *testing.T) { Volume: &pb.Volume{ HalsteadVocabulary: proto.Int32(2), HalsteadLength: proto.Int32(2), - HalsteadEstimatedLength: proto.Float32(2.5), - HalsteadVolume: proto.Float32(2.5), - HalsteadDifficulty: proto.Float32(2.5), - HalsteadEffort: proto.Float32(2.5), - HalsteadTime: proto.Float32(2.5), + HalsteadEstimatedLength: proto.Float64(2.5), + HalsteadVolume: proto.Float64(2.5), + HalsteadDifficulty: proto.Float64(2.5), + HalsteadEffort: proto.Float64(2.5), + HalsteadTime: proto.Float64(2.5), }, }, }, @@ -134,11 +134,11 @@ func TestHalsteadMetricsVisitor_LeaveNode(t *testing.T) { Volume: &pb.Volume{ HalsteadVocabulary: proto.Int32(4), HalsteadLength: proto.Int32(4), - HalsteadEstimatedLength: proto.Float32(4.5), - HalsteadVolume: proto.Float32(4.5), - HalsteadDifficulty: proto.Float32(4.5), - HalsteadEffort: proto.Float32(4.5), - HalsteadTime: proto.Float32(4.5), + HalsteadEstimatedLength: proto.Float64(4.5), + HalsteadVolume: proto.Float64(4.5), + HalsteadDifficulty: proto.Float64(4.5), + HalsteadEffort: proto.Float64(4.5), + HalsteadTime: proto.Float64(4.5), }, }, }, @@ -159,23 +159,23 @@ func TestHalsteadMetricsVisitor_LeaveNode(t *testing.T) { t.Errorf("Expected 3, got %d", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadLength) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEstimatedLength) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadVolume != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadVolume != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadVolume) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadDifficulty != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadDifficulty != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadDifficulty) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEffort != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEffort != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEffort) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadTime != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadTime != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadTime) } } diff --git a/src/Cli/ComponentFileTable.go b/src/Cli/ComponentFileTable.go index 98b7c07..bfebd09 100644 --- a/src/Cli/ComponentFileTable.go +++ b/src/Cli/ComponentFileTable.go @@ -116,9 +116,9 @@ func (v *ComponentFileTable) Init() { cyclo = int(*file.Stmts.Analyze.Complexity.Cyclomatic) } - risk := float32(0.0) + risk := float64(0.0) if file.Stmts != nil && file.Stmts.Analyze != nil && file.Stmts.Analyze.Risk != nil { - risk = float32(file.Stmts.Analyze.Risk.Score) + risk = float64(file.Stmts.Analyze.Risk.Score) } // truncate filename, but to the left diff --git a/src/Cli/ComponentTableClass_test.go b/src/Cli/ComponentTableClass_test.go index 50f1b76..39ce6f2 100644 --- a/src/Cli/ComponentTableClass_test.go +++ b/src/Cli/ComponentTableClass_test.go @@ -41,11 +41,11 @@ func TestNewComponentTableClass(t *testing.T) { func TestComponentTableClass_Render(t *testing.T) { - mi := float32(120) + mi := float64(120) ccn := int32(5) loc := int32(100) halsteadLength := int32(100) - halsteadVolume := float32(100) + halsteadVolume := float64(100) files := []*pb.File{ { @@ -89,14 +89,14 @@ func TestComponentTableClass_Render(t *testing.T) { func TestComponentTableClass_Sort(t *testing.T) { // class 1 - mi1 := float32(120) + mi1 := float64(120) ccn1 := int32(5) loc1 := int32(100) halsteadLength1 := int32(5) - halsteadVolume := float32(7) + halsteadVolume := float64(7) // class 2 - mi2 := float32(110) + mi2 := float64(110) ccn2 := int32(10) loc2 := int32(80) halsteadLength2 := int32(7) diff --git a/src/Cli/Styles.go b/src/Cli/Styles.go index 704817b..f476476 100644 --- a/src/Cli/Styles.go +++ b/src/Cli/Styles.go @@ -125,11 +125,11 @@ func DecorateMaintainabilityIndex(mi int, analyze *pb.Analyze) string { return "🟢 " + strconv.Itoa(mi) } -func Round(num float32) int { - return int(num + float32(math.Copysign(0.5, float64(num)))) +func Round(num float64) int { + return int(num + float64(math.Copysign(0.5, float64(num)))) } -func ToFixed(num float32, precision int) float32 { +func ToFixed(num float64, precision int) float64 { output := math.Pow(10, float64(precision)) - return float32(Round(num*float32(output))) / float32(output) + return float64(Round(num*float64(output))) / float64(output) } diff --git a/src/Engine/util.go b/src/Engine/util.go index 5a09de0..614fa7a 100644 --- a/src/Engine/util.go +++ b/src/Engine/util.go @@ -133,7 +133,7 @@ func GetFunctionsInFile(file *pb.File) []*pb.StmtFunction { } // render as HTML -func HtmlChartLine(data *orderedmap.OrderedMap[string, float32], label string, id string) string { +func HtmlChartLine(data *orderedmap.OrderedMap[string, float64], label string, id string) string { series := "[" for _, key := range data.Keys() { value, _ := data.Get(key) @@ -237,7 +237,7 @@ if (document.getElementById("` + id + `") && typeof ApexCharts !== 'undefined') } // render as HTML -func HtmlChartArea(data *orderedmap.OrderedMap[string, float32], label string, id string) string { +func HtmlChartArea(data *orderedmap.OrderedMap[string, float64], label string, id string) string { values := "[" keys := "[" diff --git a/src/NodeType/NodeType.pb.go b/src/NodeType/NodeType.pb.go index 514b32a..dd1bab4 100644 --- a/src/NodeType/NodeType.pb.go +++ b/src/NodeType/NodeType.pb.go @@ -1767,11 +1767,11 @@ type Volume struct { Cloc *int32 `protobuf:"varint,3,opt,name=cloc,proto3,oneof" json:"cloc,omitempty"` HalsteadVocabulary *int32 `protobuf:"varint,4,opt,name=halsteadVocabulary,proto3,oneof" json:"halsteadVocabulary,omitempty"` HalsteadLength *int32 `protobuf:"varint,5,opt,name=halsteadLength,proto3,oneof" json:"halsteadLength,omitempty"` - HalsteadVolume *float32 `protobuf:"fixed32,6,opt,name=halsteadVolume,proto3,oneof" json:"halsteadVolume,omitempty"` - HalsteadDifficulty *float32 `protobuf:"fixed32,7,opt,name=halsteadDifficulty,proto3,oneof" json:"halsteadDifficulty,omitempty"` - HalsteadEffort *float32 `protobuf:"fixed32,8,opt,name=halsteadEffort,proto3,oneof" json:"halsteadEffort,omitempty"` - HalsteadTime *float32 `protobuf:"fixed32,9,opt,name=halsteadTime,proto3,oneof" json:"halsteadTime,omitempty"` - HalsteadEstimatedLength *float32 `protobuf:"fixed32,10,opt,name=halsteadEstimatedLength,proto3,oneof" json:"halsteadEstimatedLength,omitempty"` + HalsteadVolume *float64 `protobuf:"fixed32,6,opt,name=halsteadVolume,proto3,oneof" json:"halsteadVolume,omitempty"` + HalsteadDifficulty *float64 `protobuf:"fixed32,7,opt,name=halsteadDifficulty,proto3,oneof" json:"halsteadDifficulty,omitempty"` + HalsteadEffort *float64 `protobuf:"fixed32,8,opt,name=halsteadEffort,proto3,oneof" json:"halsteadEffort,omitempty"` + HalsteadTime *float64 `protobuf:"fixed32,9,opt,name=halsteadTime,proto3,oneof" json:"halsteadTime,omitempty"` + HalsteadEstimatedLength *float64 `protobuf:"fixed32,10,opt,name=halsteadEstimatedLength,proto3,oneof" json:"halsteadEstimatedLength,omitempty"` } func (x *Volume) Reset() { @@ -1841,35 +1841,35 @@ func (x *Volume) GetHalsteadLength() int32 { return 0 } -func (x *Volume) GetHalsteadVolume() float32 { +func (x *Volume) GetHalsteadVolume() float64 { if x != nil && x.HalsteadVolume != nil { return *x.HalsteadVolume } return 0 } -func (x *Volume) GetHalsteadDifficulty() float32 { +func (x *Volume) GetHalsteadDifficulty() float64 { if x != nil && x.HalsteadDifficulty != nil { return *x.HalsteadDifficulty } return 0 } -func (x *Volume) GetHalsteadEffort() float32 { +func (x *Volume) GetHalsteadEffort() float64 { if x != nil && x.HalsteadEffort != nil { return *x.HalsteadEffort } return 0 } -func (x *Volume) GetHalsteadTime() float32 { +func (x *Volume) GetHalsteadTime() float64 { if x != nil && x.HalsteadTime != nil { return *x.HalsteadTime } return 0 } -func (x *Volume) GetHalsteadEstimatedLength() float32 { +func (x *Volume) GetHalsteadEstimatedLength() float64 { if x != nil && x.HalsteadEstimatedLength != nil { return *x.HalsteadEstimatedLength } @@ -1881,9 +1881,9 @@ type Maintainability struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - MaintainabilityIndex *float32 `protobuf:"fixed32,1,opt,name=maintainabilityIndex,proto3,oneof" json:"maintainabilityIndex,omitempty"` - MaintainabilityIndexWithoutComments *float32 `protobuf:"fixed32,2,opt,name=maintainabilityIndexWithoutComments,proto3,oneof" json:"maintainabilityIndexWithoutComments,omitempty"` - CommentWeight *float32 `protobuf:"fixed32,3,opt,name=commentWeight,proto3,oneof" json:"commentWeight,omitempty"` + MaintainabilityIndex *float64 `protobuf:"fixed32,1,opt,name=maintainabilityIndex,proto3,oneof" json:"maintainabilityIndex,omitempty"` + MaintainabilityIndexWithoutComments *float64 `protobuf:"fixed32,2,opt,name=maintainabilityIndexWithoutComments,proto3,oneof" json:"maintainabilityIndexWithoutComments,omitempty"` + CommentWeight *float64 `protobuf:"fixed32,3,opt,name=commentWeight,proto3,oneof" json:"commentWeight,omitempty"` } func (x *Maintainability) Reset() { @@ -1918,21 +1918,21 @@ func (*Maintainability) Descriptor() ([]byte, []int) { return file_proto_NodeType_proto_rawDescGZIP(), []int{25} } -func (x *Maintainability) GetMaintainabilityIndex() float32 { +func (x *Maintainability) GetMaintainabilityIndex() float64 { if x != nil && x.MaintainabilityIndex != nil { return *x.MaintainabilityIndex } return 0 } -func (x *Maintainability) GetMaintainabilityIndexWithoutComments() float32 { +func (x *Maintainability) GetMaintainabilityIndexWithoutComments() float64 { if x != nil && x.MaintainabilityIndexWithoutComments != nil { return *x.MaintainabilityIndexWithoutComments } return 0 } -func (x *Maintainability) GetCommentWeight() float32 { +func (x *Maintainability) GetCommentWeight() float64 { if x != nil && x.CommentWeight != nil { return *x.CommentWeight } @@ -2076,7 +2076,7 @@ type Risk struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` // score of risk. Lower is better + Score float64 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` // score of risk. Lower is better } func (x *Risk) Reset() { @@ -2111,7 +2111,7 @@ func (*Risk) Descriptor() ([]byte, []int) { return file_proto_NodeType_proto_rawDescGZIP(), []int{28} } -func (x *Risk) GetScore() float32 { +func (x *Risk) GetScore() float64 { if x != nil { return x.Score } @@ -2128,7 +2128,7 @@ type Coupling struct { Afferent int32 `protobuf:"varint,1,opt,name=afferent,proto3" json:"afferent,omitempty"` // number of classes that depends on this class Efferent int32 `protobuf:"varint,2,opt,name=efferent,proto3" json:"efferent,omitempty"` // number of classes that this class depends on - Instability float32 `protobuf:"fixed32,3,opt,name=instability,proto3" json:"instability,omitempty"` // instability of the class + Instability float64 `protobuf:"fixed32,3,opt,name=instability,proto3" json:"instability,omitempty"` // instability of the class } func (x *Coupling) Reset() { @@ -2177,7 +2177,7 @@ func (x *Coupling) GetEfferent() int32 { return 0 } -func (x *Coupling) GetInstability() float32 { +func (x *Coupling) GetInstability() float64 { if x != nil { return x.Instability } diff --git a/src/Pkg/Cleaner/cleaner.go b/src/Pkg/Cleaner/cleaner.go index 61adc6b..77a8209 100644 --- a/src/Pkg/Cleaner/cleaner.go +++ b/src/Pkg/Cleaner/cleaner.go @@ -7,13 +7,13 @@ import ( ) var ( - defaultfloat32 float32 = 0 - defaultFloat32 float32 = 0 + defaultfloat64 float64 = 0 + defaultFloat64 float64 = 0 ) // The CleanVal removes all NaN values from any value -// and sets them to the default float32 value, which is 0. -// For float32 values, it also sets them to 0. +// and sets them to the default float64 value, which is 0. +// For float64 values, it also sets them to 0. // // This function accepts a pointer because it needs // to modify the provided value. @@ -60,13 +60,13 @@ func cleanSlice(v reflect.Value) { func cleanField(field reflect.Value) { switch field.Kind() { - case reflect.Float32: + case reflect.Float64: f := field.Float() isInvalidAndCanSet := field.CanSet() && (math.IsNaN(f) || math.IsInf(f, 0)) if !isInvalidAndCanSet { return } - field.Set(reflect.ValueOf(defaultFloat32)) + field.Set(reflect.ValueOf(defaultFloat64)) } } diff --git a/src/Report/OpenMetricsGenerator_test.go b/src/Report/OpenMetricsGenerator_test.go index 6e785a9..b3d2956 100644 --- a/src/Report/OpenMetricsGenerator_test.go +++ b/src/Report/OpenMetricsGenerator_test.go @@ -46,8 +46,8 @@ func TestGenerateOpenMetricsReports(t *testing.T) { Cloc: proto.Int32(20), }, Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(75.5), - MaintainabilityIndexWithoutComments: proto.Float32(70.0), + MaintainabilityIndex: proto.Float64(75.5), + MaintainabilityIndexWithoutComments: proto.Float64(70.0), }, Coupling: &pb.Coupling{ Afferent: *proto.Int32(5), diff --git a/src/Report/types.go b/src/Report/types.go index b5c7e37..4f57fbf 100644 --- a/src/Report/types.go +++ b/src/Report/types.go @@ -16,33 +16,33 @@ type report struct { Loc int `json:"loc,omitempty"` Cloc int `json:"cloc,omitempty"` Lloc int `json:"lloc,omitempty"` - AverageMethodsPerClass float32 `json:"averageMethodsPerClass,omitempty"` - AverageLocPerMethod float32 `json:"averageLocPerMethod,omitempty"` - AverageLlocPerMethod float32 `json:"averageLlocPerMethod,omitempty"` - AverageClocPerMethod float32 `json:"averageClocPerMethod,omitempty"` - AverageCyclomaticComplexityPerMethod float32 `json:"averageCyclomaticComplexityPerMethod,omitempty"` - AverageCyclomaticComplexityPerClass float32 `json:"averageCyclomaticComplexityPerClass,omitempty"` + AverageMethodsPerClass float64 `json:"averageMethodsPerClass,omitempty"` + AverageLocPerMethod float64 `json:"averageLocPerMethod,omitempty"` + AverageLlocPerMethod float64 `json:"averageLlocPerMethod,omitempty"` + AverageClocPerMethod float64 `json:"averageClocPerMethod,omitempty"` + AverageCyclomaticComplexityPerMethod float64 `json:"averageCyclomaticComplexityPerMethod,omitempty"` + AverageCyclomaticComplexityPerClass float64 `json:"averageCyclomaticComplexityPerClass,omitempty"` MinCyclomaticComplexity int `json:"minCyclomaticComplexity,omitempty"` MaxCyclomaticComplexity int `json:"maxCyclomaticComplexity,omitempty"` - AverageHalsteadDifficulty float32 `json:"averageHalsteadDifficulty,omitempty"` - AverageHalsteadEffort float32 `json:"averageHalsteadEffort,omitempty"` - AverageHalsteadVolume float32 `json:"averageHalsteadVolume,omitempty"` - AverageHalsteadTime float32 `json:"averageHalsteadTime,omitempty"` - AverageHalsteadBugs float32 `json:"averageHalsteadBugs,omitempty"` - SumHalsteadDifficulty float32 `json:"sumHalsteadDifficulty,omitempty"` - SumHalsteadEffort float32 `json:"sumHalsteadEffort,omitempty"` - SumHalsteadVolume float32 `json:"sumHalsteadVolume,omitempty"` - SumHalsteadTime float32 `json:"sumHalsteadTime,omitempty"` - SumHalsteadBugs float32 `json:"sumHalsteadBugs,omitempty"` - AverageMI float32 `json:"averageMI,omitempty"` - AverageMIwoc float32 `json:"averageMIwoc,omitempty"` - AverageMIcw float32 `json:"averageMIcw,omitempty"` - AverageMIPerMethod float32 `json:"averageMIPerMethod,omitempty"` - AverageMIwocPerMethod float32 `json:"averageMIwocPerMethod,omitempty"` - AverageMIcwPerMethod float32 `json:"averageMIcwPerMethod,omitempty"` - AverageAfferentCoupling float32 `json:"averageAfferentCoupling,omitempty"` - AverageEfferentCoupling float32 `json:"averageEfferentCoupling,omitempty"` - AverageInstability float32 `json:"averageInstability,omitempty"` + AverageHalsteadDifficulty float64 `json:"averageHalsteadDifficulty,omitempty"` + AverageHalsteadEffort float64 `json:"averageHalsteadEffort,omitempty"` + AverageHalsteadVolume float64 `json:"averageHalsteadVolume,omitempty"` + AverageHalsteadTime float64 `json:"averageHalsteadTime,omitempty"` + AverageHalsteadBugs float64 `json:"averageHalsteadBugs,omitempty"` + SumHalsteadDifficulty float64 `json:"sumHalsteadDifficulty,omitempty"` + SumHalsteadEffort float64 `json:"sumHalsteadEffort,omitempty"` + SumHalsteadVolume float64 `json:"sumHalsteadVolume,omitempty"` + SumHalsteadTime float64 `json:"sumHalsteadTime,omitempty"` + SumHalsteadBugs float64 `json:"sumHalsteadBugs,omitempty"` + AverageMI float64 `json:"averageMI,omitempty"` + AverageMIwoc float64 `json:"averageMIwoc,omitempty"` + AverageMIcw float64 `json:"averageMIcw,omitempty"` + AverageMIPerMethod float64 `json:"averageMIPerMethod,omitempty"` + AverageMIwocPerMethod float64 `json:"averageMIwocPerMethod,omitempty"` + AverageMIcwPerMethod float64 `json:"averageMIcwPerMethod,omitempty"` + AverageAfferentCoupling float64 `json:"averageAfferentCoupling,omitempty"` + AverageEfferentCoupling float64 `json:"averageEfferentCoupling,omitempty"` + AverageInstability float64 `json:"averageInstability,omitempty"` CommitCountForPeriod int `json:"commitCountForPeriod,omitempty"` CommittedFilesCountForPeriod int `json:"committedFilesCountForPeriod,omitempty"` // for example if one commit concerns 10 files, it will be 10 BusFactor int `json:"busFactor,omitempty"` @@ -79,19 +79,19 @@ type complexity struct { } type risk struct { - Score float32 `json:"score,omitempty"` // score of risk. Lower is better + Score float64 `json:"score,omitempty"` // score of risk. Lower is better } type coupling struct { Afferent int32 `json:"afferent,omitempty"` // number of classes that depends on this class Efferent int32 `json:"efferent,omitempty"` // number of classes that this class depends on - Instability float32 `json:"instability,omitempty"` // instability of the class + Instability float64 `json:"instability,omitempty"` // instability of the class } type maintainability struct { - MaintainabilityIndex float32 `json:"maintainabilityIndex,omitempty"` - MaintainabilityIndexWithoutComments float32 `json:"maintainabilityIndexWithoutComments,omitempty"` - CommentWeight float32 `json:"commentWeight,omitempty"` + MaintainabilityIndex float64 `json:"maintainabilityIndex,omitempty"` + MaintainabilityIndexWithoutComments float64 `json:"maintainabilityIndexWithoutComments,omitempty"` + CommentWeight float64 `json:"commentWeight,omitempty"` } type volume struct { @@ -100,9 +100,9 @@ type volume struct { Cloc int32 `json:"cloc,omitempty"` HalsteadVocabulary int32 `json:"halsteadVocabulary,omitempty"` HalsteadLength int32 `json:"halsteadLength,omitempty"` - HalsteadVolume float32 `json:"halsteadVolume,omitempty"` - HalsteadDifficulty float32 `json:"halsteadDifficulty,omitempty"` - HalsteadEffort float32 `json:"halsteadEffort,omitempty"` - HalsteadTime float32 `json:"halsteadTime,omitempty"` - HalsteadEstimatedLength float32 `json:"halsteadEstimatedLength,omitempty"` + HalsteadVolume float64 `json:"halsteadVolume,omitempty"` + HalsteadDifficulty float64 `json:"halsteadDifficulty,omitempty"` + HalsteadEffort float64 `json:"halsteadEffort,omitempty"` + HalsteadTime float64 `json:"halsteadTime,omitempty"` + HalsteadEstimatedLength float64 `json:"halsteadEstimatedLength,omitempty"` } diff --git a/src/Ui/ComponentBarchart.go b/src/Ui/ComponentBarchart.go index e27a52e..63005e4 100644 --- a/src/Ui/ComponentBarchart.go +++ b/src/Ui/ComponentBarchart.go @@ -5,7 +5,7 @@ import ( ) type ComponentBarchart struct { - data map[string]float32 + data map[string]float64 height int barWidth int } diff --git a/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go b/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go index c678bca..482d842 100644 --- a/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go +++ b/src/Ui/ComponentBarchartCyclomaticByMethodRepartition.go @@ -15,7 +15,7 @@ type ComponentBarchartCyclomaticByMethodRepartition struct { func (c *ComponentBarchartCyclomaticByMethodRepartition) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float32) + data := make(map[string]float64) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -27,8 +27,8 @@ func (c *ComponentBarchartCyclomaticByMethodRepartition) AsTerminalElement() str return graph.AsTerminalElement() } -func (c *ComponentBarchartCyclomaticByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float32] { - data := orderedmap.NewOrderedMap[string, float32]() +func (c *ComponentBarchartCyclomaticByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float64] { + data := orderedmap.NewOrderedMap[string, float64]() rangeOfLabels := []string{"0-5", "5-20", "> 20"} rangeOfValues := []int32{5, 20, 999999} diff --git a/src/Ui/ComponentBarchartLocByMethodRepartition.go b/src/Ui/ComponentBarchartLocByMethodRepartition.go index 2f8529c..cf5bea0 100644 --- a/src/Ui/ComponentBarchartLocByMethodRepartition.go +++ b/src/Ui/ComponentBarchartLocByMethodRepartition.go @@ -16,7 +16,7 @@ type ComponentBarchartLocByMethodRepartition struct { // Render is the method to render the component func (c *ComponentBarchartLocByMethodRepartition) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float32) + data := make(map[string]float64) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -33,8 +33,8 @@ func (c *ComponentBarchartLocByMethodRepartition) AsHtml() string { return Engine.HtmlChartLine(data, "Number of files", "chart-loc-by-method") } -func (c *ComponentBarchartLocByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float32] { - data := orderedmap.NewOrderedMap[string, float32]() +func (c *ComponentBarchartLocByMethodRepartition) GetData() *orderedmap.OrderedMap[string, float64] { + data := orderedmap.NewOrderedMap[string, float64]() rangeOfLabels := []string{"< 15", "< 35", "< 50", "> 50"} rangeOfValues := []int32{15, 35, 50, 999999} diff --git a/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go b/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go index 3261623..c7526e2 100644 --- a/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go +++ b/src/Ui/ComponentBarchartMaintainabilityIndexRepartition.go @@ -22,7 +22,7 @@ func (c *ComponentBarchartMaintainabilityIndexRepartition) AsHtml() string { // Render is the method to render the component func (c *ComponentBarchartMaintainabilityIndexRepartition) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float32) + data := make(map[string]float64) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -33,11 +33,11 @@ func (c *ComponentBarchartMaintainabilityIndexRepartition) AsTerminalElement() s } // GetData returns the data for the barchart -func (c *ComponentBarchartMaintainabilityIndexRepartition) GetData() *orderedmap.OrderedMap[string, float32] { - data := orderedmap.NewOrderedMap[string, float32]() +func (c *ComponentBarchartMaintainabilityIndexRepartition) GetData() *orderedmap.OrderedMap[string, float64] { + data := orderedmap.NewOrderedMap[string, float64]() rangeOfLabels := []string{"🔴 < 64", "🟡 < 85", "🟢 > 85"} - rangeOfValues := []float32{64, 85, 1000} + rangeOfValues := []float64{64, 85, 1000} for _, r := range rangeOfLabels { data.Set(r, 0) } diff --git a/src/Ui/ComponentLineChartGitActivity.go b/src/Ui/ComponentLineChartGitActivity.go index 9fec44c..55e6a0c 100644 --- a/src/Ui/ComponentLineChartGitActivity.go +++ b/src/Ui/ComponentLineChartGitActivity.go @@ -18,7 +18,7 @@ type ComponentLineChartGitActivity struct { // Render is the method to render the component func (c *ComponentLineChartGitActivity) AsTerminalElement() string { dataOrdered := c.GetData() - data := make(map[string]float32) + data := make(map[string]float64) for _, k := range dataOrdered.Keys() { value, _ := dataOrdered.Get(k) data[k] = value @@ -35,9 +35,9 @@ func (c *ComponentLineChartGitActivity) AsHtml() string { return Engine.HtmlChartArea(data, "Number of commits", "chart-git") } -func (c *ComponentLineChartGitActivity) GetData() *orderedmap.OrderedMap[string, float32] { - //data := make(map[string]float32)* - data := orderedmap.NewOrderedMap[string, float32]() +func (c *ComponentLineChartGitActivity) GetData() *orderedmap.OrderedMap[string, float64] { + //data := make(map[string]float64)* + data := orderedmap.NewOrderedMap[string, float64]() // 1 year ago oneYearAgo := time.Now().AddDate(-1, 0, 0) From 7ac7d48bbefe95e31907cabb5c0f4b142b169c8b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Wed, 27 Nov 2024 07:34:21 +0100 Subject: [PATCH 08/16] deal with concurrency issue in tests --- src/Analyzer/Aggregator.go | 18 ++-- src/Analyzer/Aggregator_test.go | 117 ++++++++++++++------------ src/Analyzer/AstAnalyzer.go | 6 +- src/Report/MarkdownReportGenerator.go | 4 +- 4 files changed, 82 insertions(+), 63 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index e88e89c..aab49d7 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -356,6 +356,7 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { aggregated.Lloc = 0 var wg sync.WaitGroup + var wgByCpu sync.WaitGroup var mu sync.Mutex numWorkers := runtime.NumCPU() filesChan := make(chan *pb.File, numWorkers) @@ -363,7 +364,12 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { reg := regexp.MustCompile("[^A-Za-z0-9.]+") for i := 0; i < numWorkers; i++ { + + wgByCpu.Add(1) + go func() { + defer wgByCpu.Done() + for file := range filesChan { wg.Add(1) @@ -404,12 +410,12 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { } averageForFile = averageForFile / float64(len(methods)) localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile - } - // Update the original file with processed data - mu.Lock() - file.Stmts = localFile.Stmts - mu.Unlock() + // Update the original file with processed data + mu.Lock() + file.Stmts = localFile.Stmts + mu.Unlock() + } // LOC of file is the sum of all classes and methods // That's useful when we navigate over the files instead of the classes @@ -555,6 +561,8 @@ func (r *Aggregator) consolidate(aggregated *Aggregated) { } wg.Wait() + close(filesChan) + wgByCpu.Wait() // Consolidate aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) diff --git a/src/Analyzer/Aggregator_test.go b/src/Analyzer/Aggregator_test.go index f548a6f..ec4c27a 100644 --- a/src/Analyzer/Aggregator_test.go +++ b/src/Analyzer/Aggregator_test.go @@ -80,69 +80,76 @@ func TestConsolidate(t *testing.T) { } func TestCalculate(t *testing.T) { - aggregator := Aggregator{} - stmts := pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(10), + + t.Run("TestCalculate", func(t *testing.T) { + aggregator := Aggregator{} + stmts := pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(10), + }, }, }, }, - }, - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(20), + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(20), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - {}, {}, {}, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(100), - Cloc: proto.Int32(200), - Lloc: proto.Int32(300), + StmtClass: []*pb.StmtClass{ + {}, {}, {}, }, - }, - } - file := pb.File{ - Stmts: &stmts, - } - aggregated := Aggregated{} - aggregator.calculateSums(&file, &aggregated) - aggregator.consolidate(&aggregated) - - if aggregated.NbMethods != 2 { - t.Errorf("Expected 2, got %d", aggregated.NbMethods) - } - - if aggregated.NbClasses != 3 { - t.Errorf("Expected 3 classes, got %d", aggregated.NbClasses) - } - - if aggregated.AverageCyclomaticComplexityPerMethod != 15 { - t.Errorf("Expected AverageCyclomaticComplexityPerMethod, got %f", aggregated.AverageCyclomaticComplexityPerMethod) - } - - if aggregated.Loc != 100 { - t.Errorf("Expected 100, got %d", aggregated.Loc) - } - - if aggregated.Cloc != 200 { - t.Errorf("Expected 200, got %d", aggregated.Cloc) - } - - if aggregated.Lloc != 300 { - t.Errorf("Expected 300, got %d", aggregated.Lloc) - } + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(100), + Cloc: proto.Int32(200), + Lloc: proto.Int32(300), + }, + }, + } + file := pb.File{ + Stmts: &stmts, + Path: "test.foo", + } + aggregated := Aggregated{} + aggregator.calculateSums(&file, &aggregated) + aggregated.ConcernedFiles = []*pb.File{ + &file, + } + aggregator.consolidate(&aggregated) + + if aggregated.NbMethods != 2 { + t.Errorf("Expected 2, got %d", aggregated.NbMethods) + } + + if aggregated.NbClasses != 3 { + t.Errorf("Expected 3 classes, got %d", aggregated.NbClasses) + } + + if aggregated.AverageCyclomaticComplexityPerMethod != 15 { + t.Errorf("Expected AverageCyclomaticComplexityPerMethod, got %f", aggregated.AverageCyclomaticComplexityPerMethod) + } + + if aggregated.Loc != 100 { + t.Errorf("Expected 100, got %d", aggregated.Loc) + } + + if aggregated.Cloc != 200 { + t.Errorf("Expected 200, got %d", aggregated.Cloc) + } + + if aggregated.Lloc != 300 { + t.Errorf("Expected 300, got %d", aggregated.Lloc) + } + }) } func TestAggregates(t *testing.T) { diff --git a/src/Analyzer/AstAnalyzer.go b/src/Analyzer/AstAnalyzer.go index 515fe02..cc98748 100644 --- a/src/Analyzer/AstAnalyzer.go +++ b/src/Analyzer/AstAnalyzer.go @@ -26,13 +26,13 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi // Wait for end of all goroutines var wg sync.WaitGroup + var wgByCpu sync.WaitGroup // store results // channel should have value // https://stackoverflow.com/questions/58743038/why-does-this-goroutine-not-call-wg-done channelResult := make(chan *pb.File, len(astFiles)) - nbParsingFiles := 0 // analyze each AST file running the runAnalysis function numWorkers := runtime.NumCPU() @@ -40,7 +40,9 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi filesChan := make(chan string, numWorkers) for i := 0; i < numWorkers; i++ { + wgByCpu.Add(1) go func() { + defer wgByCpu.Done() for file := range filesChan { mu.Lock() nbParsingFiles++ @@ -62,6 +64,8 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi } wg.Wait() + close(filesChan) + wgByCpu.Wait() if progressbar != nil { progressbar.Info("AST Analysis finished") } diff --git a/src/Report/MarkdownReportGenerator.go b/src/Report/MarkdownReportGenerator.go index a854539..98cd3bf 100644 --- a/src/Report/MarkdownReportGenerator.go +++ b/src/Report/MarkdownReportGenerator.go @@ -165,9 +165,9 @@ func (v *MarkdownReportGenerator) RegisterFilters() { // format it if number > 1000000 { - return pongo2.AsValue(fmt.Sprintf("%.1f M", number/1000000)), nil + return pongo2.AsValue(fmt.Sprintf("%.1f M", float64(number)/1000000)), nil } else if number > 1000 { - return pongo2.AsValue(fmt.Sprintf("%.1f K", number/1000)), nil + return pongo2.AsValue(fmt.Sprintf("%.1f K", float64(number)/1000)), nil } return pongo2.AsValue(number), nil From 0a374e1b200b549d865148de244766fcdc51a14b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Wed, 27 Nov 2024 07:47:10 +0100 Subject: [PATCH 09/16] fix race issue --- proto/NodeType.proto | 20 +- src/Analyzer/Aggregator copy.go.php | 996 ++++++++++++++++ src/Analyzer/Aggregator.go | 1040 +++++++++-------- src/Analyzer/Aggregator_test.go | 555 ++++----- src/Analyzer/AstAnalyzer.go | 30 +- src/Analyzer/Comparator.go | 59 +- src/Analyzer/Volume/HalsteadMetricsVisitor.go | 34 +- src/Cli/ComponentStatisticsOverview.go | 6 +- src/Cli/ScreenSummary.go | 14 +- src/Engine/NodeTypeEnsurer.go | 38 + src/NodeType/NodeType.pb.go | 40 +- src/Report/HtmlReportGenerator.go | 4 + src/Report/JsonReportGenerator.go | 60 +- src/Report/templates/html/index.html | 14 +- 14 files changed, 1974 insertions(+), 936 deletions(-) create mode 100644 src/Analyzer/Aggregator copy.go.php create mode 100644 src/Engine/NodeTypeEnsurer.go diff --git a/proto/NodeType.proto b/proto/NodeType.proto index 759bc1b..b7e4e68 100644 --- a/proto/NodeType.proto +++ b/proto/NodeType.proto @@ -209,16 +209,16 @@ message Volume { optional int32 cloc = 3; optional int32 halsteadVocabulary = 4; optional int32 halsteadLength = 5; - optional float halsteadVolume = 6; - optional float halsteadDifficulty = 7; - optional float halsteadEffort = 8; - optional float halsteadTime = 9; - optional float halsteadEstimatedLength = 10; + optional double halsteadVolume = 6; + optional double halsteadDifficulty = 7; + optional double halsteadEffort = 8; + optional double halsteadTime = 9; + optional double halsteadEstimatedLength = 10; } message Maintainability { - optional float maintainabilityIndex = 1; - optional float maintainabilityIndexWithoutComments = 2; - optional float commentWeight = 3; + optional double maintainabilityIndex = 1; + optional double maintainabilityIndexWithoutComments = 2; + optional double commentWeight = 3; } // ------------------------------------ @@ -239,7 +239,7 @@ message Commit { // -- Risk // ------------------------------------ message Risk { - float score = 1; // score of risk. Lower is better + double score = 1; // score of risk. Lower is better } // ------------------------------------ @@ -248,5 +248,5 @@ message Risk { message Coupling { int32 afferent = 1; // number of classes that depends on this class int32 efferent = 2; // number of classes that this class depends on - float instability = 3; // instability of the class + double instability = 3; // instability of the class } \ No newline at end of file diff --git a/src/Analyzer/Aggregator copy.go.php b/src/Analyzer/Aggregator copy.go.php new file mode 100644 index 0000000..77473f6 --- /dev/null +++ b/src/Analyzer/Aggregator copy.go.php @@ -0,0 +1,996 @@ +package AnalyzerOld + +import ( + "math" + "os" + "regexp" + "runtime" + "sync" + + "github.com/halleck45/ast-metrics/src/Engine" + pb "github.com/halleck45/ast-metrics/src/NodeType" + "github.com/halleck45/ast-metrics/src/Scm" +) + +type ProjectAggregated struct { + ByFile Aggregated + ByClass Aggregated + Combined Aggregated + ByProgrammingLanguage map[string]Aggregated + ErroredFiles []*pb.File + Evaluation *EvaluationResult + Comparaison *ProjectComparaison +} + +type AggregateResult struct { + Sum float64 + Min float64 + Max float64 + Avg float64 + Counter int +} + +func NewAggregateResult() AggregateResult { + return AggregateResult{ + Sum: 0, + Min: 0, + Max: 0, + Avg: 0, + Counter: 0, + } +} + +type Aggregated struct { + ProgrammingLanguages map[string]int + ConcernedFiles []*pb.File + Comparaison *Comparaison + // hashmap of classes, just with the qualified name, used for afferent coupling calculation + ClassesAfferentCoupling map[string]int + NbFiles int + NbFunctions int + NbClasses int + NbClassesWithCode int + NbMethods int + Loc AggregateResult + Cloc AggregateResult + Lloc AggregateResult + MethodsPerClass AggregateResult + LocPerClass AggregateResult + LocPerMethod AggregateResult + ClocPerMethod AggregateResult + CyclomaticComplexityPerMethod AggregateResult + CyclomaticComplexityPerClass AggregateResult + HalsteadEffort AggregateResult + HalsteadVolume AggregateResult + HalsteadTime AggregateResult + HalsteadBugs AggregateResult + MaintainabilityIndex AggregateResult + MaintainabilityIndexWithoutComments AggregateResult + MaintainabilityCommentWeight AggregateResult + Instability AggregateResult + EfferentCoupling AggregateResult + AfferentCoupling AggregateResult + MaintainabilityPerMethod AggregateResult + MaintainabilityPerMethodWithoutComments AggregateResult + MaintainabilityCommentWeightPerMethod AggregateResult + CommitCountForPeriod int + CommittedFilesCountForPeriod int + BusFactor int + TopCommitters []TopCommitter + ResultOfGitAnalysis []ResultOfGitAnalysis + PackageRelations map[string]map[string]int // counter of dependencies. Ex: A -> B -> 2 +} + +type ProjectComparaison struct { + ByFile Comparaison + ByClass Comparaison + Combined Comparaison + ByProgrammingLanguage map[string]Comparaison +} + +type Aggregator struct { + files []*pb.File + projectAggregated ProjectAggregated + analyzers []AggregateAnalyzer + gitSummaries []ResultOfGitAnalysis + ComparedFiles []*pb.File + ComparedBranch string +} + +type TopCommitter struct { + Name string + Count int +} + +type ResultOfGitAnalysis struct { + ProgrammingLanguage string + ReportRootDir string + CountCommits int + CountCommiters int + CountCommitsForLanguage int + CountCommitsIgnored int + GitRepository Scm.GitRepository +} + +func NewAggregator(files []*pb.File, gitSummaries []ResultOfGitAnalysis) *Aggregator { + return &Aggregator{ + files: files, + gitSummaries: gitSummaries, + } +} + +type AggregateAnalyzer interface { + Calculate(aggregate *Aggregated) +} + +func newAggregated() Aggregated { + return Aggregated{ + ProgrammingLanguages: make(map[string]int), + ConcernedFiles: make([]*pb.File, 0), + ClassesAfferentCoupling: make(map[string]int), + NbClasses: 0, + NbClassesWithCode: 0, + NbMethods: 0, + NbFunctions: 0, + Loc: NewAggregateResult(), + MethodsPerClass: NewAggregateResult(), + LocPerClass: NewAggregateResult(), + LocPerMethod: NewAggregateResult(), + ClocPerMethod: NewAggregateResult(), + CyclomaticComplexityPerMethod: NewAggregateResult(), + CyclomaticComplexityPerClass: NewAggregateResult(), + HalsteadEffort: NewAggregateResult(), + HalsteadVolume: NewAggregateResult(), + HalsteadTime: NewAggregateResult(), + HalsteadBugs: NewAggregateResult(), + MaintainabilityIndex: NewAggregateResult(), + MaintainabilityIndexWithoutComments: NewAggregateResult(), + MaintainabilityCommentWeight: NewAggregateResult(), + Instability: NewAggregateResult(), + EfferentCoupling: NewAggregateResult(), + AfferentCoupling: NewAggregateResult(), + MaintainabilityPerMethod: NewAggregateResult(), + MaintainabilityPerMethodWithoutComments: NewAggregateResult(), + MaintainabilityCommentWeightPerMethod: NewAggregateResult(), + CommitCountForPeriod: 0, + CommittedFilesCountForPeriod: 0, + BusFactor: 0, + TopCommitters: make([]TopCommitter, 0), + ResultOfGitAnalysis: nil, + PackageRelations: make(map[string]map[string]int), + } +} + +func (r *Aggregator) Aggregates() ProjectAggregated { + + // We create a new aggregated object for each type of aggregation + r.projectAggregated = r.executeAggregationOnFiles(r.files) + + // Do the same for the comparaison files (if needed) + if r.ComparedFiles != nil { + comparaidAggregated := r.executeAggregationOnFiles(r.ComparedFiles) + + // Compare + comparaison := ProjectComparaison{} + comparator := NewComparator(r.ComparedBranch) + comparaison.Combined = comparator.Compare(r.projectAggregated.Combined, comparaidAggregated.Combined) + r.projectAggregated.Combined.Comparaison = &comparaison.Combined + + comparaison.ByClass = comparator.Compare(r.projectAggregated.ByClass, comparaidAggregated.ByClass) + r.projectAggregated.ByClass.Comparaison = &comparaison.ByClass + + comparaison.ByFile = comparator.Compare(r.projectAggregated.ByFile, comparaidAggregated.ByFile) + r.projectAggregated.ByFile.Comparaison = &comparaison.ByFile + + // By language + comparaison.ByProgrammingLanguage = make(map[string]Comparaison) + for lng, byLanguage := range r.projectAggregated.ByProgrammingLanguage { + if _, ok := comparaidAggregated.ByProgrammingLanguage[lng]; !ok { + continue + } + c := comparator.Compare(byLanguage, comparaidAggregated.ByProgrammingLanguage[lng]) + comparaison.ByProgrammingLanguage[lng] = c + + // assign to the original object (slow, but otherwise we need to change the whole structure ByProgrammingLanguage map) + // @see https://stackoverflow.com/questions/42605337/cannot-assign-to-struct-field-in-a-map + // Feel free to change this + entry := r.projectAggregated.ByProgrammingLanguage[lng] + entry.Comparaison = &c + r.projectAggregated.ByProgrammingLanguage[lng] = entry + } + r.projectAggregated.Comparaison = &comparaison + } + + return r.projectAggregated +} + +func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregated { + + // do the sums. Group files by number of processors + var wg sync.WaitGroup + numberOfProcessors := runtime.NumCPU() + + // Split the files into chunks + chunkSize := len(files) / numberOfProcessors + chunks := make([][]*pb.File, numberOfProcessors) + for i := 0; i < numberOfProcessors; i++ { + start := i * chunkSize + end := start + chunkSize + if i == numberOfProcessors-1 { + end = len(files) + } + chunks[i] = files[start:end] + } + + // Prepare results + aggregateByFileChunk := newAggregated() + aggregateByClassChunk := newAggregated() + + // for each programming language, we create a separeted result + aggregateByLanguageChunk := make(map[string]Aggregated) + for _, file := range files { + if file.ProgrammingLanguage == "" { + continue + } + if _, ok := aggregateByLanguageChunk[file.ProgrammingLanguage]; !ok { + aggregateByLanguageChunk[file.ProgrammingLanguage] = newAggregated() + } + } + + // Create channels for the results + resultsByClass := make(chan *Aggregated, numberOfProcessors) + resultsByFile := make(chan *Aggregated, numberOfProcessors) + resultsByProgrammingLanguage := make(chan map[string]Aggregated, numberOfProcessors) + + // Process each chunk of files + chunkIndex := 0 + for i := 0; i < numberOfProcessors; i++ { + + wg.Add(1) + + // Reduce results : we want to get sums, and to count calculated values into a AggregateResult + go func(files []*pb.File) { + defer wg.Done() + + // the process deal with its own chunk + for _, file := range files { + aggregateByFileChunk = r.mapSums(file, aggregateByFileChunk) + aggregateByClassChunk = r.mapSums(file, aggregateByClassChunk) + aggregateByLanguageChunk[file.ProgrammingLanguage] = r.mapSums(file, aggregateByLanguageChunk[file.ProgrammingLanguage]) + } + + // Send the result to the channels + resultsByClass <- aggregateByClassChunk + resultsByFile <- aggregateByFileChunk + resultsByProgrammingLanguage <- aggregateByLanguageChunk + + }(chunks[chunkIndex]) + chunkIndex++ + } + + wg.Wait() + close(resultsByClass) + close(resultsByFile) + close(resultsByProgrammingLanguage) + + // Now we have chunk of sums. We want to reduce its into a single object + aggregatedByClass := newAggregated() + for chunk := range resultsByClass { + aggregatedByClass = r.calculateSums(aggregatedByClass, chunk) + } + + // @todo : en parallèle, et traiter les par fichier, par programming language + + // Now we have sums. We want to reduce metrics and get the averages + + // en parallèle, on traite le cas la complexité cyclomatique + // group 1: 4, 4, 5 + // group 2: 6, 1, 1 + + // sum = 0 + // for each group + // sum += group + + wg.Wait() + os.Exit(0) + + // We create a new aggregated object for each type of aggregation + // ByFile, ByClass, Combined + projectAggregated := ProjectAggregated{} + projectAggregated.ByFile = newAggregated() + projectAggregated.ByClass = newAggregated() + projectAggregated.Combined = newAggregated() + + // Count files + projectAggregated.ByClass.NbFiles = len(files) + projectAggregated.ByFile.NbFiles = len(files) + projectAggregated.Combined.NbFiles = len(files) + + // Prepare errors + projectAggregated.ErroredFiles = make([]*pb.File, 0) + + for _, file := range files { + + // Files with errors + if file.Errors != nil && len(file.Errors) > 0 { + projectAggregated.ErroredFiles = append(projectAggregated.ErroredFiles, file) + } + + if file.Stmts == nil { + continue + } + + // By language + if projectAggregated.ByProgrammingLanguage == nil { + projectAggregated.ByProgrammingLanguage = make(map[string]Aggregated) + } + if _, ok := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage]; !ok { + projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = newAggregated() + + } + byLanguage := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] + byLanguage.NbFiles++ + + // Prepare structure of file, in orger to avoid to checking its type in the future + + // Make calculations: sums of metrics, etc. + var wg sync.WaitGroup + wg.Add(4) + + go func() { + defer wg.Done() + localFile := file + r.calculateSums(localFile, &projectAggregated.ByFile) + }() + + go func() { + defer wg.Done() + localFile := file + r.calculateSums(localFile, &projectAggregated.ByClass) + }() + + go func() { + defer wg.Done() + localFile := file + r.calculateSums(localFile, &projectAggregated.Combined) + }() + + go func() { + defer wg.Done() + localFile := file + r.calculateSums(localFile, &byLanguage) + }() + + wg.Wait() + projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = byLanguage + } + + // Consolidate averages + var wg sync.WaitGroup + wg.Add(3) + + go func() { + defer wg.Done() + r.consolidate(&projectAggregated.ByFile) + }() + + go func() { + defer wg.Done() + r.consolidate(&projectAggregated.ByClass) + }() + + go func() { + defer wg.Done() + r.consolidate(&projectAggregated.Combined) + }() + + // by language + wg.Add(len(projectAggregated.ByProgrammingLanguage)) + for lng, byLanguage := range projectAggregated.ByProgrammingLanguage { + go func(language string, langAggregated Aggregated) { + defer wg.Done() + r.consolidate(&langAggregated) + projectAggregated.ByProgrammingLanguage[language] = langAggregated + }(lng, byLanguage) + } + wg.Wait() + + // Risks + riskAnalyzer := NewRiskAnalyzer() + riskAnalyzer.Analyze(projectAggregated) + + return projectAggregated +} + +// Consolidate the aggregated data +func (r *Aggregator) consolidate(aggregated *Aggregated) { + + if aggregated.NbClasses > 0 { + aggregated.AverageMethodsPerClass = float64(aggregated.NbMethods) / float64(aggregated.NbClasses) + aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float64(aggregated.NbClasses) + } else { + aggregated.AverageMethodsPerClass = 0 + aggregated.AverageCyclomaticComplexityPerClass = 0 + } + + if aggregated.AverageMI > 0 { + aggregated.AverageMI = aggregated.AverageMI / float64(aggregated.NbClasses) + aggregated.AverageMIwoc = aggregated.AverageMIwoc / float64(aggregated.NbClasses) + aggregated.AverageMIcw = aggregated.AverageMIcw / float64(aggregated.NbClasses) + } + + if aggregated.AverageInstability > 0 { + aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float64(aggregated.NbClasses) + aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float64(aggregated.NbClasses) + } + + if aggregated.NbMethods > 0 { + aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float64(aggregated.NbMethods) + aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float64(aggregated.NbMethods) + aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float64(aggregated.NbMethods) + aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float64(aggregated.NbMethods) + aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float64(aggregated.NbClasses) + aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float64(aggregated.NbClasses) + aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float64(aggregated.NbClasses) + aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float64(aggregated.NbClasses) + aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float64(aggregated.NbClasses) + } + + // if langage without classes + if aggregated.NbClasses == 0 { + aggregated.AverageMI = aggregated.AverageMIPerMethod + aggregated.AverageMIwoc = aggregated.AverageMIwocPerMethod + aggregated.AverageMIcw = aggregated.AverageMIcwPerMethod + aggregated.AverageInstability = 0 + aggregated.AverageEfferentCoupling = 0 + aggregated.AverageAfferentCoupling = 0 + } + + // Total locs: increment loc of each file + aggregated.Loc = 0 + aggregated.Cloc = 0 + aggregated.Lloc = 0 + + reg := regexp.MustCompile("[^A-Za-z0-9.]+") + + for _, file := range aggregated.ConcernedFiles { + + if file.LinesOfCode == nil { + return + } + + aggregated.Loc += int(file.LinesOfCode.LinesOfCode) + aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) + aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) + + // Create local variables for file processing + localFile := &pb.File{ + Stmts: file.Stmts, + } + + // Calculate alternate MI using average MI per method when file has no class + if len(localFile.Stmts.StmtClass) == 0 { + if localFile.Stmts.Analyze.Maintainability == nil { + localFile.Stmts.Analyze.Maintainability = &pb.Maintainability{} + } + + methods := file.Stmts.StmtFunction + if len(methods) == 0 { + return + } + averageForFile := float64(0) + for _, method := range methods { + if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { + continue + } + averageForFile += float64(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) + } + averageForFile = averageForFile / float64(len(methods)) + localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile + + // Update the original file with processed data + file.Stmts = localFile.Stmts + } + + // LOC of file is the sum of all classes and methods + // That's useful when we navigate over the files instead of the classes + zero := int32(0) + loc := int32(0) + lloc := int32(0) + cloc := int32(0) + + if file.Stmts.Analyze.Volume == nil { + file.Stmts.Analyze.Volume = &pb.Volume{ + Lloc: &zero, + Cloc: &zero, + Loc: &zero, + } + } + + classes := Engine.GetClassesInFile(file) + functions := file.Stmts.StmtFunction + + // Initialize file complexity if needed + if file.Stmts.Analyze.Complexity.Cyclomatic == nil { + file.Stmts.Analyze.Complexity.Cyclomatic = &zero + } + + // Process functions + for _, function := range functions { + // Handle LOC + if function.LinesOfCode != nil { + loc += function.LinesOfCode.LinesOfCode + lloc += function.LinesOfCode.LogicalLinesOfCode + cloc += function.LinesOfCode.CommentLinesOfCode + } + + // Handle complexity + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { + *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic + } + } + + // Process classes + for _, class := range classes { + // Handle LOC + if class.LinesOfCode != nil { + loc += class.LinesOfCode.LinesOfCode + lloc += class.LinesOfCode.LogicalLinesOfCode + cloc += class.LinesOfCode.CommentLinesOfCode + } + + // Handle coupling + if class.Stmts != nil && class.Stmts.Analyze != nil { + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + class.Stmts.Analyze.Coupling.Afferent = 0 + + if class.Name != nil { + // if in hashmap + if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { + class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) + file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + } + + // instability + if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { + instability := float64(class.Stmts.Analyze.Coupling.Efferent) / float64(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) + class.Stmts.Analyze.Coupling.Instability = instability + aggregated.AverageInstability += instability + } + } + } + } + + file.Stmts.Analyze.Volume.Loc = &loc + file.Stmts.Analyze.Volume.Lloc = &lloc + file.Stmts.Analyze.Volume.Cloc = &cloc + + dependencies := file.Stmts.StmtExternalDependencies + + for _, dependency := range dependencies { + if dependency == nil { + continue + } + + namespaceTo := dependency.Namespace + namespaceFrom := dependency.From + + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + // Keep only 2 levels in namespace + separator := reg.FindString(namespaceFrom) + parts := reg.Split(namespaceTo, -1) + if len(parts) > 2 { + namespaceTo = parts[0] + separator + parts[1] + } + + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + parts = reg.Split(namespaceFrom, -1) + if len(parts) > 2 { + namespaceFrom = parts[0] + separator + parts[1] + } + + // if same, continue + if namespaceFrom == namespaceTo { + continue + } + + // if root namespace, continue + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + // create the map if not exists + if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { + aggregated.PackageRelations[namespaceFrom] = make(map[string]int) + } + + if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { + aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 + } + + // increment the counter + aggregated.PackageRelations[namespaceFrom][namespaceTo]++ + } + } + + // Consolidate + aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) + + // Count commits for the period based on `ResultOfGitAnalysis` data + aggregated.ResultOfGitAnalysis = r.gitSummaries + if aggregated.ResultOfGitAnalysis != nil { + for _, result := range aggregated.ResultOfGitAnalysis { + aggregated.CommitCountForPeriod += result.CountCommitsForLanguage + } + } + + // Bus factor and other metrics based on aggregated data + for _, analyzer := range r.analyzers { + analyzer.Calculate(aggregated) + } +} + +// Add an analyzer to the aggregator +// You can add multiple analyzers. See the example of RiskAnalyzer +func (r *Aggregator) WithAggregateAnalyzer(analyzer AggregateAnalyzer) { + r.analyzers = append(r.analyzers, analyzer) +} + +func (r *Aggregator) WithComparaison(allResultsCloned []*pb.File, comparedBranch string) { + r.ComparedFiles = allResultsCloned + r.ComparedBranch = comparedBranch +} + +// Calculate the aggregated data +func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregated) { + classes := Engine.GetClassesInFile(file) + functions := Engine.GetFunctionsInFile(file) + + if specificAggregation.ConcernedFiles == nil { + specificAggregation.ConcernedFiles = make([]*pb.File, 0) + } + + specificAggregation.ConcernedFiles = append(specificAggregation.ConcernedFiles, file) + + // Number of classes + specificAggregation.NbClasses += len(classes) + + // Prepare the file for analysis + if file.Stmts == nil { + return + } + + if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: *file.Stmts.Analyze.Volume.Loc, + CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, + LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + } + } + + // Functions + for _, function := range functions { + + if function == nil || function.Stmts == nil { + continue + } + + specificAggregation.NbMethods++ + + // Average cyclomatic complexity per method + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { + if function.Stmts.Analyze.Complexity.Cyclomatic != nil { + specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) + } + } + + // Average maintainability index per method + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Maintainability != nil { + if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { + specificAggregation.AverageMIPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + specificAggregation.AverageMIwocPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + specificAggregation.AverageMIcwPerMethod += *function.Stmts.Analyze.Maintainability.CommentWeight + } + } + // average lines of code per method + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { + if function.Stmts.Analyze.Volume.Loc != nil { + specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) + } + if function.Stmts.Analyze.Volume.Cloc != nil { + specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) + } + if function.Stmts.Analyze.Volume.Lloc != nil { + specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) + } + } + } + + for _, class := range classes { + + if class == nil || class.Stmts == nil { + continue + } + + // Number of classes with code + //if class.LinesOfCode != nil && class.LinesOfCode.LinesOfCode > 0 { + specificAggregation.NbClassesWithCode++ + //} + + // Maintainability Index + if class.Stmts.Analyze.Maintainability != nil { + if class.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { + specificAggregation.AverageMI += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + specificAggregation.AverageMIwoc += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + specificAggregation.AverageMIcw += *class.Stmts.Analyze.Maintainability.CommentWeight + } + } + + // Coupling + if class.Stmts.Analyze.Coupling != nil { + specificAggregation.AverageInstability += class.Stmts.Analyze.Coupling.Instability + specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) + specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) + } + + // cyclomatic complexity per class + if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { + specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { + specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + } + if specificAggregation.MaxCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.MaxCyclomaticComplexity { + specificAggregation.MaxCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + } + } + + // Halstead + if class.Stmts.Analyze.Volume != nil { + if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty)) { + specificAggregation.AverageHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty + specificAggregation.SumHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty + } + if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadEffort)) { + specificAggregation.AverageHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort + specificAggregation.SumHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort + } + if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadVolume)) { + specificAggregation.AverageHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume + specificAggregation.SumHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume + } + if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadTime)) { + specificAggregation.AverageHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime + specificAggregation.SumHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime + } + } + + // Coupling + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + class.Stmts.Analyze.Coupling.Efferent = 0 + uniqueDependencies := make(map[string]bool) + for _, dependency := range class.Stmts.StmtExternalDependencies { + dependencyName := dependency.ClassName + + // check if dependency is already in hashmap + if _, ok := specificAggregation.ClassesAfferentCoupling[dependencyName]; !ok { + specificAggregation.ClassesAfferentCoupling[dependencyName] = 0 + } + specificAggregation.ClassesAfferentCoupling[dependencyName]++ + + // check if dependency is unique + if _, ok := uniqueDependencies[dependencyName]; !ok { + uniqueDependencies[dependencyName] = true + } + } + + class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) + + // Add dependencies to file + if file.Stmts.Analyze.Coupling == nil { + file.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + if file.Stmts.StmtExternalDependencies == nil { + file.Stmts.StmtExternalDependencies = make([]*pb.StmtExternalDependency, 0) + } + + file.Stmts.Analyze.Coupling.Efferent += class.Stmts.Analyze.Coupling.Efferent + file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + file.Stmts.StmtExternalDependencies = append(file.Stmts.StmtExternalDependencies, class.Stmts.StmtExternalDependencies...) + } + + // consolidate coupling for file + if len(classes) > 0 && file.Stmts.Analyze.Coupling != nil { + file.Stmts.Analyze.Coupling.Efferent = file.Stmts.Analyze.Coupling.Efferent / int32(len(classes)) + file.Stmts.Analyze.Coupling.Afferent = file.Stmts.Analyze.Coupling.Afferent / int32(len(classes)) + } + +} + +func (r *Aggregator) mapSums(file *pb.File, specificAggregation *Aggregated) *Aggregated { + classes := Engine.GetClassesInFile(file) + functions := Engine.GetFunctionsInFile(file) + + specificAggregation.ConcernedFiles = append(specificAggregation.ConcernedFiles, file) + + // Number of classes + specificAggregation.NbClasses += len(classes) + + // Prepare the file for analysis + if file.Stmts == nil { + return + } + + if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: *file.Stmts.Analyze.Volume.Loc, + CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, + LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + } + } + + // Functions + for _, function := range functions { + + if function == nil || function.Stmts == nil { + continue + } + + specificAggregation.NbMethods++ + + // Average cyclomatic complexity per method + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { + if function.Stmts.Analyze.Complexity.Cyclomatic != nil { + specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) + } + } + + // Average maintainability index per method + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Maintainability != nil { + if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { + specificAggregation.AverageMIPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + specificAggregation.AverageMIwocPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + specificAggregation.AverageMIcwPerMethod += *function.Stmts.Analyze.Maintainability.CommentWeight + } + } + // average lines of code per method + if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { + if function.Stmts.Analyze.Volume.Loc != nil { + specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) + } + if function.Stmts.Analyze.Volume.Cloc != nil { + specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) + } + if function.Stmts.Analyze.Volume.Lloc != nil { + specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) + } + } + } + + for _, class := range classes { + + if class == nil || class.Stmts == nil { + continue + } + + // Number of classes with code + //if class.LinesOfCode != nil && class.LinesOfCode.LinesOfCode > 0 { + specificAggregation.NbClassesWithCode++ + //} + + // Maintainability Index + if class.Stmts.Analyze.Maintainability != nil { + if class.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { + specificAggregation.AverageMI += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + specificAggregation.AverageMIwoc += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + specificAggregation.AverageMIcw += *class.Stmts.Analyze.Maintainability.CommentWeight + } + } + + // Coupling + if class.Stmts.Analyze.Coupling != nil { + specificAggregation.AverageInstability += class.Stmts.Analyze.Coupling.Instability + specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) + specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) + } + + // cyclomatic complexity per class + if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { + specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { + specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + } + if specificAggregation.MaxCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.MaxCyclomaticComplexity { + specificAggregation.MaxCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + } + } + + // Halstead + if class.Stmts.Analyze.Volume != nil { + if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty)) { + specificAggregation.AverageHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty + specificAggregation.SumHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty + } + if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadEffort)) { + specificAggregation.AverageHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort + specificAggregation.SumHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort + } + if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadVolume)) { + specificAggregation.AverageHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume + specificAggregation.SumHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume + } + if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadTime)) { + specificAggregation.AverageHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime + specificAggregation.SumHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime + } + } + + // Coupling + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + class.Stmts.Analyze.Coupling.Efferent = 0 + uniqueDependencies := make(map[string]bool) + for _, dependency := range class.Stmts.StmtExternalDependencies { + dependencyName := dependency.ClassName + + // check if dependency is already in hashmap + if _, ok := specificAggregation.ClassesAfferentCoupling[dependencyName]; !ok { + specificAggregation.ClassesAfferentCoupling[dependencyName] = 0 + } + specificAggregation.ClassesAfferentCoupling[dependencyName]++ + + // check if dependency is unique + if _, ok := uniqueDependencies[dependencyName]; !ok { + uniqueDependencies[dependencyName] = true + } + } + + class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) + + // Add dependencies to file + if file.Stmts.Analyze.Coupling == nil { + file.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + if file.Stmts.StmtExternalDependencies == nil { + file.Stmts.StmtExternalDependencies = make([]*pb.StmtExternalDependency, 0) + } + + file.Stmts.Analyze.Coupling.Efferent += class.Stmts.Analyze.Coupling.Efferent + file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent + file.Stmts.StmtExternalDependencies = append(file.Stmts.StmtExternalDependencies, class.Stmts.StmtExternalDependencies...) + } + + // consolidate coupling for file + if len(classes) > 0 && file.Stmts.Analyze.Coupling != nil { + file.Stmts.Analyze.Coupling.Efferent = file.Stmts.Analyze.Coupling.Efferent / int32(len(classes)) + file.Stmts.Analyze.Coupling.Afferent = file.Stmts.Analyze.Coupling.Afferent / int32(len(classes)) + } +} diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index aab49d7..97e0ce4 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -21,52 +21,65 @@ type ProjectAggregated struct { Comparaison *ProjectComparaison } +type AggregateResult struct { + Sum float64 + Min float64 + Max float64 + Avg float64 + Counter int +} + +func NewAggregateResult() AggregateResult { + return AggregateResult{ + Sum: 0, + Min: 0, + Max: 0, + Avg: 0, + Counter: 0, + } +} + type Aggregated struct { - ConcernedFiles []*pb.File - Comparaison *Comparaison + ProgrammingLanguages map[string]int + ConcernedFiles []*pb.File + Comparaison *Comparaison // hashmap of classes, just with the qualified name, used for afferent coupling calculation - ClassesAfferentCoupling map[string]int - NbFiles int - NbFunctions int - NbClasses int - NbClassesWithCode int - NbMethods int - Loc int - Cloc int - Lloc int - AverageMethodsPerClass float64 - AverageLocPerMethod float64 - AverageLlocPerMethod float64 - AverageClocPerMethod float64 - AverageCyclomaticComplexityPerMethod float64 - AverageCyclomaticComplexityPerClass float64 - MinCyclomaticComplexity int - MaxCyclomaticComplexity int - AverageHalsteadDifficulty float64 - AverageHalsteadEffort float64 - AverageHalsteadVolume float64 - AverageHalsteadTime float64 - AverageHalsteadBugs float64 - SumHalsteadDifficulty float64 - SumHalsteadEffort float64 - SumHalsteadVolume float64 - SumHalsteadTime float64 - SumHalsteadBugs float64 - AverageMI float64 - AverageMIwoc float64 - AverageMIcw float64 - AverageMIPerMethod float64 - AverageMIwocPerMethod float64 - AverageMIcwPerMethod float64 - AverageAfferentCoupling float64 - AverageEfferentCoupling float64 - AverageInstability float64 - CommitCountForPeriod int - CommittedFilesCountForPeriod int // for example if one commit concerns 10 files, it will be 10 - BusFactor int - TopCommitters []TopCommitter - ResultOfGitAnalysis []ResultOfGitAnalysis - PackageRelations map[string]map[string]int // counter of dependencies. Ex: A -> B -> 2 + ClassesAfferentCoupling map[string]int + NbFiles int + NbFunctions int + NbClasses int + NbClassesWithCode int + NbMethods int + Loc AggregateResult + Cloc AggregateResult + Lloc AggregateResult + MethodsPerClass AggregateResult + LocPerClass AggregateResult + LocPerMethod AggregateResult + LlocPerMethod AggregateResult + ClocPerMethod AggregateResult + CyclomaticComplexityPerMethod AggregateResult + CyclomaticComplexityPerClass AggregateResult + HalsteadDifficulty AggregateResult + HalsteadEffort AggregateResult + HalsteadVolume AggregateResult + HalsteadTime AggregateResult + HalsteadBugs AggregateResult + MaintainabilityIndex AggregateResult + MaintainabilityIndexWithoutComments AggregateResult + MaintainabilityCommentWeight AggregateResult + Instability AggregateResult + EfferentCoupling AggregateResult + AfferentCoupling AggregateResult + MaintainabilityPerMethod AggregateResult + MaintainabilityPerMethodWithoutComments AggregateResult + MaintainabilityCommentWeightPerMethod AggregateResult + CommitCountForPeriod int + CommittedFilesCountForPeriod int + BusFactor int + TopCommitters []TopCommitter + ResultOfGitAnalysis []ResultOfGitAnalysis + PackageRelations map[string]map[string]int // counter of dependencies. Ex: A -> B -> 2 } type ProjectComparaison struct { @@ -113,44 +126,39 @@ type AggregateAnalyzer interface { func newAggregated() Aggregated { return Aggregated{ - ConcernedFiles: make([]*pb.File, 0), - ClassesAfferentCoupling: make(map[string]int), - NbClasses: 0, - NbClassesWithCode: 0, - NbMethods: 0, - NbFunctions: 0, - Loc: 0, - Cloc: 0, - Lloc: 0, - AverageLocPerMethod: 0, - AverageLlocPerMethod: 0, - AverageClocPerMethod: 0, - AverageCyclomaticComplexityPerMethod: 0, - AverageCyclomaticComplexityPerClass: 0, - MinCyclomaticComplexity: 0, - MaxCyclomaticComplexity: 0, - AverageHalsteadDifficulty: 0, - AverageHalsteadEffort: 0, - AverageHalsteadVolume: 0, - AverageHalsteadTime: 0, - AverageHalsteadBugs: 0, - SumHalsteadDifficulty: 0, - SumHalsteadEffort: 0, - SumHalsteadVolume: 0, - SumHalsteadTime: 0, - SumHalsteadBugs: 0, - AverageMI: 0, - AverageMIwoc: 0, - AverageMIcw: 0, - AverageMIPerMethod: 0, - AverageMIwocPerMethod: 0, - AverageAfferentCoupling: 0, - AverageEfferentCoupling: 0, - AverageInstability: 0, - AverageMIcwPerMethod: 0, - CommitCountForPeriod: 0, - ResultOfGitAnalysis: nil, - PackageRelations: make(map[string]map[string]int), + ProgrammingLanguages: make(map[string]int), + ConcernedFiles: make([]*pb.File, 0), + ClassesAfferentCoupling: make(map[string]int), + NbClasses: 0, + NbClassesWithCode: 0, + NbMethods: 0, + NbFunctions: 0, + Loc: NewAggregateResult(), + MethodsPerClass: NewAggregateResult(), + LocPerClass: NewAggregateResult(), + LocPerMethod: NewAggregateResult(), + ClocPerMethod: NewAggregateResult(), + CyclomaticComplexityPerMethod: NewAggregateResult(), + CyclomaticComplexityPerClass: NewAggregateResult(), + HalsteadEffort: NewAggregateResult(), + HalsteadVolume: NewAggregateResult(), + HalsteadTime: NewAggregateResult(), + HalsteadBugs: NewAggregateResult(), + MaintainabilityIndex: NewAggregateResult(), + MaintainabilityIndexWithoutComments: NewAggregateResult(), + MaintainabilityCommentWeight: NewAggregateResult(), + Instability: NewAggregateResult(), + EfferentCoupling: NewAggregateResult(), + AfferentCoupling: NewAggregateResult(), + MaintainabilityPerMethod: NewAggregateResult(), + MaintainabilityPerMethodWithoutComments: NewAggregateResult(), + MaintainabilityCommentWeightPerMethod: NewAggregateResult(), + CommitCountForPeriod: 0, + CommittedFilesCountForPeriod: 0, + BusFactor: 0, + TopCommitters: make([]TopCommitter, 0), + ResultOfGitAnalysis: nil, + PackageRelations: make(map[string]map[string]int), } } @@ -199,402 +207,160 @@ func (r *Aggregator) Aggregates() ProjectAggregated { func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregated { - // We create a new aggregated object for each type of aggregation - // ByFile, ByClass, Combined - projectAggregated := ProjectAggregated{} - projectAggregated.ByFile = newAggregated() - projectAggregated.ByClass = newAggregated() - projectAggregated.Combined = newAggregated() + projectAggregated := ProjectAggregated{ + ByFile: newAggregated(), + ByClass: newAggregated(), + Combined: newAggregated(), + ByProgrammingLanguage: make(map[string]Aggregated), + ErroredFiles: make([]*pb.File, 0), + Evaluation: nil, + Comparaison: nil, + } - // Count files - projectAggregated.ByClass.NbFiles = len(files) - projectAggregated.ByFile.NbFiles = len(files) - projectAggregated.Combined.NbFiles = len(files) + // do the sums. Group files by number of processors + var wg sync.WaitGroup + numberOfProcessors := runtime.NumCPU() + + // Split the files into chunks + chunkSize := len(files) / numberOfProcessors + chunks := make([][]*pb.File, numberOfProcessors) + for i := 0; i < numberOfProcessors; i++ { + start := i * chunkSize + end := start + chunkSize + if i == numberOfProcessors-1 { + end = len(files) + } + chunks[i] = files[start:end] + } - // Prepare errors - projectAggregated.ErroredFiles = make([]*pb.File, 0) + // Prepare results + aggregateByFileChunk := newAggregated() + aggregateByClassChunk := newAggregated() + // for each programming language, we create a separeted result + aggregateByLanguageChunk := make(map[string]Aggregated) for _, file := range files { - - // Files with errors - if file.Errors != nil && len(file.Errors) > 0 { - projectAggregated.ErroredFiles = append(projectAggregated.ErroredFiles, file) - } - - if file.Stmts == nil { + if file.ProgrammingLanguage == "" { continue } - - // By language - if projectAggregated.ByProgrammingLanguage == nil { - projectAggregated.ByProgrammingLanguage = make(map[string]Aggregated) + if _, ok := aggregateByLanguageChunk[file.ProgrammingLanguage]; !ok { + aggregateByLanguageChunk[file.ProgrammingLanguage] = newAggregated() } - if _, ok := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage]; !ok { - projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = newAggregated() + } - } - byLanguage := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] - byLanguage.NbFiles++ + // Create channels for the results + resultsByClass := make(chan *Aggregated, numberOfProcessors) + resultsByFile := make(chan *Aggregated, numberOfProcessors) + resultsByProgrammingLanguage := make(chan *map[string]Aggregated, numberOfProcessors) - // Make calculations: sums of metrics, etc. - var wg sync.WaitGroup - wg.Add(4) + // Deadlock prevention + mu := sync.Mutex{} - go func() { - defer wg.Done() - r.calculateSums(file, &projectAggregated.ByFile) - }() + // Process each chunk of files + // Please ensure that there is no data race here. If needed, use the mutex + chunkIndex := 0 + for i := 0; i < numberOfProcessors; i++ { - go func() { - defer wg.Done() - r.calculateSums(file, &projectAggregated.ByClass) - }() + wg.Add(1) - go func() { + // Reduce results : we want to get sums, and to count calculated values into a AggregateResult + go func(files []*pb.File) { defer wg.Done() - r.calculateSums(file, &projectAggregated.Combined) - }() - go func() { - defer wg.Done() - r.calculateSums(file, &byLanguage) - }() + // the process deal with its own chunk + for _, file := range files { + localFile := file + + if file.Stmts == nil { + continue + } + + // by file + result := r.mapSums(localFile, aggregateByFileChunk) + result.ConcernedFiles = append(result.ConcernedFiles, localFile) + aggregateByFileChunk = result - wg.Wait() - projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = byLanguage + // by class + result = r.mapSums(localFile, aggregateByClassChunk) + result.ConcernedFiles = append(result.ConcernedFiles, localFile) + aggregateByClassChunk = result + + // by language + mu.Lock() + byLanguage := r.mapSums(localFile, aggregateByLanguageChunk[localFile.ProgrammingLanguage]) + byLanguage.ConcernedFiles = append(byLanguage.ConcernedFiles, localFile) + aggregateByLanguageChunk[localFile.ProgrammingLanguage] = byLanguage + mu.Unlock() + } + + // Send the result to the channels + resultsByClass <- &aggregateByClassChunk + resultsByFile <- &aggregateByFileChunk + resultsByProgrammingLanguage <- &aggregateByLanguageChunk + + }(chunks[chunkIndex]) + chunkIndex++ } - // Consolidate averages - var wg sync.WaitGroup - wg.Add(3) + wg.Wait() + close(resultsByClass) + close(resultsByFile) + close(resultsByProgrammingLanguage) + // Now we have chunk of sums. We want to reduce its into a single object + wg.Add(1) go func() { defer wg.Done() - r.consolidate(&projectAggregated.ByFile) + for chunk := range resultsByClass { + r := r.mergeChunks(projectAggregated.ByClass, chunk) + projectAggregated.ByClass = r + } }() + wg.Add(1) go func() { defer wg.Done() - r.consolidate(&projectAggregated.ByClass) + for chunk := range resultsByFile { + r := r.mergeChunks(projectAggregated.ByFile, chunk) + projectAggregated.ByFile = r + } }() + wg.Add(1) go func() { + mu.Lock() defer wg.Done() - r.consolidate(&projectAggregated.Combined) + defer mu.Unlock() + + for chunk := range resultsByProgrammingLanguage { + for k, v := range *chunk { + projectAggregated.ByProgrammingLanguage[k] = v + } + } }() wg.Wait() - // by language - wg.Add(len(projectAggregated.ByProgrammingLanguage)) - for lng, byLanguage := range projectAggregated.ByProgrammingLanguage { - go func(language string, langAggregated Aggregated) { - defer wg.Done() - r.consolidate(&langAggregated) - projectAggregated.ByProgrammingLanguage[language] = langAggregated - }(lng, byLanguage) + // Now we have sums. We want to reduce metrics and get the averages + projectAggregated.ByClass = r.reduceMetrics(projectAggregated.ByClass) + projectAggregated.ByFile = r.reduceMetrics(projectAggregated.ByFile) + for k, v := range projectAggregated.ByProgrammingLanguage { + v = r.reduceMetrics(v) + projectAggregated.ByProgrammingLanguage[k] = v } - wg.Wait() + + // Coupling (should be done separately, to avoid race condition) + projectAggregated.ByClass = r.mapCoupling(&projectAggregated.ByClass) + projectAggregated.ByFile = r.mapCoupling(&projectAggregated.ByFile) // Risks riskAnalyzer := NewRiskAnalyzer() riskAnalyzer.Analyze(projectAggregated) - return projectAggregated -} - -// Consolidate the aggregated data -func (r *Aggregator) consolidate(aggregated *Aggregated) { - - if aggregated.NbClasses > 0 { - aggregated.AverageMethodsPerClass = float64(aggregated.NbMethods) / float64(aggregated.NbClasses) - aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float64(aggregated.NbClasses) - } else { - aggregated.AverageMethodsPerClass = 0 - aggregated.AverageCyclomaticComplexityPerClass = 0 - } - - if aggregated.AverageMI > 0 { - aggregated.AverageMI = aggregated.AverageMI / float64(aggregated.NbClasses) - aggregated.AverageMIwoc = aggregated.AverageMIwoc / float64(aggregated.NbClasses) - aggregated.AverageMIcw = aggregated.AverageMIcw / float64(aggregated.NbClasses) - } - - if aggregated.AverageInstability > 0 { - aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float64(aggregated.NbClasses) - aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float64(aggregated.NbClasses) - } - - if aggregated.NbMethods > 0 { - aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float64(aggregated.NbMethods) - aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float64(aggregated.NbClasses) - aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float64(aggregated.NbClasses) - aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float64(aggregated.NbClasses) - aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float64(aggregated.NbClasses) - aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float64(aggregated.NbClasses) - } - - // if langage without classes - if aggregated.NbClasses == 0 { - aggregated.AverageMI = aggregated.AverageMIPerMethod - aggregated.AverageMIwoc = aggregated.AverageMIwocPerMethod - aggregated.AverageMIcw = aggregated.AverageMIcwPerMethod - aggregated.AverageInstability = 0 - aggregated.AverageEfferentCoupling = 0 - aggregated.AverageAfferentCoupling = 0 - } - - // Total locs: increment loc of each file - aggregated.Loc = 0 - aggregated.Cloc = 0 - aggregated.Lloc = 0 - - var wg sync.WaitGroup - var wgByCpu sync.WaitGroup - var mu sync.Mutex - numWorkers := runtime.NumCPU() - filesChan := make(chan *pb.File, numWorkers) - - reg := regexp.MustCompile("[^A-Za-z0-9.]+") - - for i := 0; i < numWorkers; i++ { - - wgByCpu.Add(1) - - go func() { - defer wgByCpu.Done() - - for file := range filesChan { - - wg.Add(1) - go func(file *pb.File) { - defer wg.Done() - - if file.LinesOfCode == nil { - return - } - - mu.Lock() - aggregated.Loc += int(file.LinesOfCode.LinesOfCode) - aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) - aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) - mu.Unlock() - - // Create local variables for file processing - localFile := &pb.File{ - Stmts: file.Stmts, - } - - // Calculate alternate MI using average MI per method when file has no class - if len(localFile.Stmts.StmtClass) == 0 { - if localFile.Stmts.Analyze.Maintainability == nil { - localFile.Stmts.Analyze.Maintainability = &pb.Maintainability{} - } - - methods := file.Stmts.StmtFunction - if len(methods) == 0 { - return - } - averageForFile := float64(0) - for _, method := range methods { - if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { - continue - } - averageForFile += float64(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) - } - averageForFile = averageForFile / float64(len(methods)) - localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile - - // Update the original file with processed data - mu.Lock() - file.Stmts = localFile.Stmts - mu.Unlock() - } - - // LOC of file is the sum of all classes and methods - // That's useful when we navigate over the files instead of the classes - zero := int32(0) - loc := int32(0) - lloc := int32(0) - cloc := int32(0) - - if file.Stmts.Analyze.Volume == nil { - file.Stmts.Analyze.Volume = &pb.Volume{ - Lloc: &zero, - Cloc: &zero, - Loc: &zero, - } - } - - classes := Engine.GetClassesInFile(file) - functions := file.Stmts.StmtFunction - - // Initialize file complexity if needed - if file.Stmts.Analyze.Complexity.Cyclomatic == nil { - file.Stmts.Analyze.Complexity.Cyclomatic = &zero - } - - // Process functions - for _, function := range functions { - // Handle LOC - if function.LinesOfCode != nil { - loc += function.LinesOfCode.LinesOfCode - lloc += function.LinesOfCode.LogicalLinesOfCode - cloc += function.LinesOfCode.CommentLinesOfCode - } - - // Handle complexity - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { - *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic - } - } - - // Process classes - for _, class := range classes { - // Handle LOC - if class.LinesOfCode != nil { - loc += class.LinesOfCode.LinesOfCode - lloc += class.LinesOfCode.LogicalLinesOfCode - cloc += class.LinesOfCode.CommentLinesOfCode - } - - // Handle coupling - if class.Stmts != nil && class.Stmts.Analyze != nil { - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - class.Stmts.Analyze.Coupling.Afferent = 0 - - if class.Name != nil { - mu.Lock() - // if in hashmap - if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { - class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent - } - - // instability - if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { - instability := float64(class.Stmts.Analyze.Coupling.Efferent) / float64(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) - class.Stmts.Analyze.Coupling.Instability = instability - aggregated.AverageInstability += instability - } - mu.Unlock() - } - } - } - - file.Stmts.Analyze.Volume.Loc = &loc - file.Stmts.Analyze.Volume.Lloc = &lloc - file.Stmts.Analyze.Volume.Cloc = &cloc - - dependencies := file.Stmts.StmtExternalDependencies - - for _, dependency := range dependencies { - if dependency == nil { - continue - } - - namespaceTo := dependency.Namespace - namespaceFrom := dependency.From - - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - // Keep only 2 levels in namespace - separator := reg.FindString(namespaceFrom) - parts := reg.Split(namespaceTo, -1) - if len(parts) > 2 { - namespaceTo = parts[0] + separator + parts[1] - } - - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - parts = reg.Split(namespaceFrom, -1) - if len(parts) > 2 { - namespaceFrom = parts[0] + separator + parts[1] - } - - // if same, continue - if namespaceFrom == namespaceTo { - continue - } - - // if root namespace, continue - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - mu.Lock() - // create the map if not exists - if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { - aggregated.PackageRelations[namespaceFrom] = make(map[string]int) - } - - if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { - aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 - } - - // increment the counter - aggregated.PackageRelations[namespaceFrom][namespaceTo]++ - mu.Unlock() - } - }(file) - } - }() - } - - for _, file := range aggregated.ConcernedFiles { - filesChan <- file - } - - wg.Wait() - close(filesChan) - wgByCpu.Wait() - - // Consolidate - aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) - - // Count commits for the period based on `ResultOfGitAnalysis` data - aggregated.ResultOfGitAnalysis = r.gitSummaries - if aggregated.ResultOfGitAnalysis != nil { - var wg sync.WaitGroup - var mu sync.Mutex - - for _, result := range aggregated.ResultOfGitAnalysis { - wg.Add(1) - go func(res ResultOfGitAnalysis) { - defer wg.Done() - mu.Lock() - aggregated.CommitCountForPeriod += res.CountCommitsForLanguage - mu.Unlock() - }(result) - } - wg.Wait() - } + // For all languages + projectAggregated.Combined = projectAggregated.ByFile - // Bus factor and other metrics based on aggregated data - var wgAnalyzers sync.WaitGroup - wgAnalyzers.Add(len(r.analyzers)) - for _, analyzer := range r.analyzers { - go func(a AggregateAnalyzer) { - defer wgAnalyzers.Done() - a.Calculate(aggregated) - }(analyzer) - } - wgAnalyzers.Wait() + return projectAggregated } // Add an analyzer to the aggregator @@ -608,30 +374,17 @@ func (r *Aggregator) WithComparaison(allResultsCloned []*pb.File, comparedBranch r.ComparedBranch = comparedBranch } -// Calculate the aggregated data -func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregated) { +func (r *Aggregator) mapSums(file *pb.File, specificAggregation Aggregated) Aggregated { classes := Engine.GetClassesInFile(file) functions := Engine.GetFunctionsInFile(file) - if specificAggregation.ConcernedFiles == nil { - specificAggregation.ConcernedFiles = make([]*pb.File, 0) - } - - specificAggregation.ConcernedFiles = append(specificAggregation.ConcernedFiles, file) + // copy the specific aggregation to new object to avoid side effects + result := specificAggregation // Number of classes - specificAggregation.NbClasses += len(classes) + result.NbClasses += len(classes) - // Prepare the file for analysis - if file.Stmts == nil { - return - } - - if file.Stmts.Analyze == nil { - file.Stmts.Analyze = &pb.Analyze{} - } - - // lines of code (it should be done in the analayzer. This case occurs only in test, or when the analyzer has issue) + // Ensure LOC is set if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { file.LinesOfCode = &pb.LinesOfCode{ LinesOfCode: *file.Stmts.Analyze.Volume.Loc, @@ -640,16 +393,12 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate } } - // Prepare the file for analysis - if file.Stmts.Analyze == nil { - file.Stmts.Analyze = &pb.Analyze{} - } - if file.Stmts.Analyze.Complexity == nil { - zero := int32(0) - file.Stmts.Analyze.Complexity = &pb.Complexity{ - Cyclomatic: &zero, - } - } + result.Loc.Sum += float64(file.LinesOfCode.LinesOfCode) + result.Loc.Counter++ + result.Cloc.Sum += float64(file.LinesOfCode.CommentLinesOfCode) + result.Cloc.Counter++ + result.Lloc.Sum += float64(file.LinesOfCode.LogicalLinesOfCode) + result.Lloc.Counter++ // Functions for _, function := range functions { @@ -658,33 +407,52 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate continue } - specificAggregation.NbMethods++ + result.NbMethods++ // Average cyclomatic complexity per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { if function.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) + + // @todo: only for functions and methods of classes (not interfaces) + // otherwise, average may be lower than 1 + ccn := float64(*function.Stmts.Analyze.Complexity.Cyclomatic) + result.CyclomaticComplexityPerMethod.Sum += ccn + result.CyclomaticComplexityPerMethod.Counter++ + if specificAggregation.CyclomaticComplexityPerMethod.Min == 0 || ccn < specificAggregation.CyclomaticComplexityPerMethod.Min { + result.CyclomaticComplexityPerMethod.Min = ccn + } + if specificAggregation.CyclomaticComplexityPerMethod.Max == 0 || ccn > specificAggregation.CyclomaticComplexityPerMethod.Max { + result.CyclomaticComplexityPerMethod.Max = ccn + } } } // Average maintainability index per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Maintainability != nil { if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMIPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex - specificAggregation.AverageMIwocPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - specificAggregation.AverageMIcwPerMethod += *function.Stmts.Analyze.Maintainability.CommentWeight + result.MaintainabilityIndex.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + result.MaintainabilityIndex.Counter++ + if specificAggregation.MaintainabilityIndex.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex < specificAggregation.MaintainabilityIndex.Min { + result.MaintainabilityIndex.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + if specificAggregation.MaintainabilityIndex.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex > specificAggregation.MaintainabilityIndex.Max { + result.MaintainabilityIndex.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } } } // average lines of code per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { if function.Stmts.Analyze.Volume.Loc != nil { - specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) + result.LocPerMethod.Sum += float64(*function.Stmts.Analyze.Volume.Loc) + result.LocPerMethod.Counter++ } if function.Stmts.Analyze.Volume.Cloc != nil { - specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) + result.ClocPerMethod.Sum += float64(*function.Stmts.Analyze.Volume.Cloc) + result.ClocPerMethod.Counter++ } if function.Stmts.Analyze.Volume.Lloc != nil { - specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) + result.LlocPerMethod.Sum += float64(*function.Stmts.Analyze.Volume.Lloc) + result.LlocPerMethod.Counter++ } } } @@ -697,53 +465,73 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // Number of classes with code //if class.LinesOfCode != nil && class.LinesOfCode.LinesOfCode > 0 { - specificAggregation.NbClassesWithCode++ + result.NbClassesWithCode++ //} // Maintainability Index if class.Stmts.Analyze.Maintainability != nil { if class.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMI += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex - specificAggregation.AverageMIwoc += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - specificAggregation.AverageMIcw += *class.Stmts.Analyze.Maintainability.CommentWeight + result.MaintainabilityIndex.Sum += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + result.MaintainabilityIndex.Counter++ + if specificAggregation.MaintainabilityIndex.Min == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndex < specificAggregation.MaintainabilityIndex.Min { + result.MaintainabilityIndex.Min = *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + if specificAggregation.MaintainabilityIndex.Max == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndex > specificAggregation.MaintainabilityIndex.Max { + result.MaintainabilityIndex.Max = *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + } + if class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments)) { + result.MaintainabilityIndexWithoutComments.Sum += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + result.MaintainabilityIndexWithoutComments.Counter++ + if specificAggregation.MaintainabilityIndexWithoutComments.Min == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments < specificAggregation.MaintainabilityIndexWithoutComments.Min { + result.MaintainabilityIndexWithoutComments.Min = *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + if specificAggregation.MaintainabilityIndexWithoutComments.Max == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments > specificAggregation.MaintainabilityIndexWithoutComments.Max { + result.MaintainabilityIndexWithoutComments.Max = *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } } } // Coupling if class.Stmts.Analyze.Coupling != nil { - specificAggregation.AverageInstability += class.Stmts.Analyze.Coupling.Instability - specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) - specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) + result.EfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Efferent) + result.EfferentCoupling.Counter++ + result.AfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Afferent) + result.AfferentCoupling.Counter++ + result.Instability.Sum += float64(class.Stmts.Analyze.Coupling.Instability) + result.Instability.Counter++ } // cyclomatic complexity per class if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) - if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { - specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + + result.CyclomaticComplexityPerClass.Sum += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + result.CyclomaticComplexityPerClass.Counter++ + if specificAggregation.CyclomaticComplexityPerClass.Min == 0 || float64(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.CyclomaticComplexityPerClass.Min { + result.CyclomaticComplexityPerClass.Min = float64(*class.Stmts.Analyze.Complexity.Cyclomatic) } - if specificAggregation.MaxCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.MaxCyclomaticComplexity { - specificAggregation.MaxCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + if specificAggregation.CyclomaticComplexityPerClass.Max == 0 || float64(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.CyclomaticComplexityPerClass.Max { + result.CyclomaticComplexityPerClass.Max = float64(*class.Stmts.Analyze.Complexity.Cyclomatic) } } // Halstead if class.Stmts.Analyze.Volume != nil { - if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty)) { - specificAggregation.AverageHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty - specificAggregation.SumHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty + if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadDifficulty) { + result.HalsteadDifficulty.Sum += *class.Stmts.Analyze.Volume.HalsteadDifficulty + result.HalsteadDifficulty.Counter++ } - if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadEffort)) { - specificAggregation.AverageHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort - specificAggregation.SumHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort + if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadEffort) { + result.HalsteadEffort.Sum += *class.Stmts.Analyze.Volume.HalsteadEffort + result.HalsteadEffort.Counter++ } - if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadVolume)) { - specificAggregation.AverageHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume - specificAggregation.SumHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume + if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadVolume) { + result.HalsteadVolume.Sum += *class.Stmts.Analyze.Volume.HalsteadVolume + result.HalsteadVolume.Counter++ } - if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadTime)) { - specificAggregation.AverageHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime - specificAggregation.SumHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime + if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadTime) { + result.HalsteadTime.Sum += *class.Stmts.Analyze.Volume.HalsteadTime + result.HalsteadTime.Counter++ } } @@ -754,24 +542,6 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate Afferent: 0, } } - class.Stmts.Analyze.Coupling.Efferent = 0 - uniqueDependencies := make(map[string]bool) - for _, dependency := range class.Stmts.StmtExternalDependencies { - dependencyName := dependency.ClassName - - // check if dependency is already in hashmap - if _, ok := specificAggregation.ClassesAfferentCoupling[dependencyName]; !ok { - specificAggregation.ClassesAfferentCoupling[dependencyName] = 0 - } - specificAggregation.ClassesAfferentCoupling[dependencyName]++ - - // check if dependency is unique - if _, ok := uniqueDependencies[dependencyName]; !ok { - uniqueDependencies[dependencyName] = true - } - } - - class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) // Add dependencies to file if file.Stmts.Analyze.Coupling == nil { @@ -790,9 +560,243 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate } // consolidate coupling for file - if file.Stmts.Analyze.Coupling != nil && len(classes) > 0 { + if len(classes) > 0 && file.Stmts.Analyze.Coupling != nil { file.Stmts.Analyze.Coupling.Efferent = file.Stmts.Analyze.Coupling.Efferent / int32(len(classes)) file.Stmts.Analyze.Coupling.Afferent = file.Stmts.Analyze.Coupling.Afferent / int32(len(classes)) } + return result +} + +func (r *Aggregator) mergeChunks(aggregated Aggregated, chunk *Aggregated) Aggregated { + + result := aggregated + result.ConcernedFiles = append(result.ConcernedFiles, chunk.ConcernedFiles...) + result.NbFiles += chunk.NbFiles + result.NbClasses += chunk.NbClasses + result.NbClassesWithCode += chunk.NbClassesWithCode + result.NbMethods += chunk.NbMethods + + result.Loc.Sum += chunk.Loc.Sum + result.Loc.Counter += chunk.Loc.Counter + result.Cloc.Sum += chunk.Cloc.Sum + result.Cloc.Counter += chunk.Cloc.Counter + result.Lloc.Sum += chunk.Lloc.Sum + result.Lloc.Counter += chunk.Lloc.Counter + + result.MethodsPerClass.Sum += chunk.MethodsPerClass.Sum + result.MethodsPerClass.Counter += chunk.MethodsPerClass.Counter + result.LocPerClass.Sum += chunk.LocPerClass.Sum + result.LocPerClass.Counter += chunk.LocPerClass.Counter + result.LocPerMethod.Sum += chunk.LocPerMethod.Sum + result.LocPerMethod.Counter += chunk.LocPerMethod.Counter + result.CyclomaticComplexityPerMethod.Sum += chunk.CyclomaticComplexityPerMethod.Sum + result.CyclomaticComplexityPerMethod.Counter += chunk.CyclomaticComplexityPerMethod.Counter + + result.CyclomaticComplexityPerClass.Sum += chunk.CyclomaticComplexityPerClass.Sum + result.CyclomaticComplexityPerClass.Counter += chunk.CyclomaticComplexityPerClass.Counter + + result.HalsteadDifficulty.Sum += chunk.HalsteadDifficulty.Sum + result.HalsteadDifficulty.Counter += chunk.HalsteadDifficulty.Counter + result.HalsteadEffort.Sum += chunk.HalsteadEffort.Sum + result.HalsteadEffort.Counter += chunk.HalsteadEffort.Counter + result.HalsteadVolume.Sum += chunk.HalsteadVolume.Sum + result.HalsteadVolume.Counter += chunk.HalsteadVolume.Counter + result.HalsteadTime.Sum += chunk.HalsteadTime.Sum + result.HalsteadTime.Counter += chunk.HalsteadTime.Counter + result.HalsteadBugs.Sum += chunk.HalsteadBugs.Sum + result.HalsteadBugs.Counter += chunk.HalsteadBugs.Counter + + result.MaintainabilityIndex.Sum += chunk.MaintainabilityIndex.Sum + result.MaintainabilityIndex.Counter += chunk.MaintainabilityIndex.Counter + result.MaintainabilityIndexWithoutComments.Sum += chunk.MaintainabilityIndexWithoutComments.Sum + result.MaintainabilityIndexWithoutComments.Counter += chunk.MaintainabilityIndexWithoutComments.Counter + result.MaintainabilityCommentWeight.Sum += chunk.MaintainabilityCommentWeight.Sum + result.MaintainabilityCommentWeight.Counter += chunk.MaintainabilityCommentWeight.Counter + + result.Instability.Sum += chunk.Instability.Sum + result.Instability.Counter += chunk.Instability.Counter + + result.EfferentCoupling.Sum += chunk.EfferentCoupling.Sum + result.EfferentCoupling.Counter += chunk.EfferentCoupling.Counter + result.AfferentCoupling.Sum += chunk.AfferentCoupling.Sum + result.AfferentCoupling.Counter += chunk.AfferentCoupling.Counter + + result.MaintainabilityPerMethod.Sum += chunk.MaintainabilityPerMethod.Sum + result.MaintainabilityPerMethod.Counter += chunk.MaintainabilityPerMethod.Counter + result.MaintainabilityPerMethodWithoutComments.Sum += chunk.MaintainabilityPerMethodWithoutComments.Sum + result.MaintainabilityPerMethodWithoutComments.Counter += chunk.MaintainabilityPerMethodWithoutComments.Counter + result.MaintainabilityCommentWeightPerMethod.Sum += chunk.MaintainabilityCommentWeightPerMethod.Sum + result.MaintainabilityCommentWeightPerMethod.Counter += chunk.MaintainabilityCommentWeightPerMethod.Counter + + result.CommitCountForPeriod += chunk.CommitCountForPeriod + result.CommittedFilesCountForPeriod += chunk.CommittedFilesCountForPeriod + + result.PackageRelations = make(map[string]map[string]int) + for k, v := range chunk.PackageRelations { + result.PackageRelations[k] = v + } + + return result +} + +func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { + // here we reduce metrics by averaging them + result := aggregated + if result.Loc.Counter > 0 { + result.Loc.Avg = result.Loc.Sum / float64(result.Loc.Counter) + } + if result.Cloc.Counter > 0 { + result.Cloc.Avg = result.Cloc.Sum / float64(result.Cloc.Counter) + } + if result.Lloc.Counter > 0 { + result.Lloc.Avg = result.Lloc.Sum / float64(result.Lloc.Counter) + } + if result.MethodsPerClass.Counter > 0 { + result.MethodsPerClass.Avg = result.MethodsPerClass.Sum / float64(result.MethodsPerClass.Counter) + } + if result.LocPerClass.Counter > 0 { + result.LocPerClass.Avg = result.LocPerClass.Sum / float64(result.LocPerClass.Counter) + } + if result.LocPerMethod.Counter > 0 { + result.LocPerMethod.Avg = result.LocPerMethod.Sum / float64(result.LocPerMethod.Counter) + } + if result.CyclomaticComplexityPerMethod.Counter > 0 { + result.CyclomaticComplexityPerMethod.Avg = result.CyclomaticComplexityPerMethod.Sum / float64(result.CyclomaticComplexityPerMethod.Counter) + } + if result.CyclomaticComplexityPerClass.Counter > 0 { + result.CyclomaticComplexityPerClass.Avg = result.CyclomaticComplexityPerClass.Sum / float64(result.CyclomaticComplexityPerClass.Counter) + } + if result.HalsteadDifficulty.Counter > 0 { + result.HalsteadDifficulty.Avg = result.HalsteadDifficulty.Sum / float64(result.HalsteadDifficulty.Counter) + } + if result.HalsteadEffort.Counter > 0 { + result.HalsteadEffort.Avg = result.HalsteadEffort.Sum / float64(result.HalsteadEffort.Counter) + } + if result.HalsteadVolume.Counter > 0 { + result.HalsteadVolume.Avg = result.HalsteadVolume.Sum / float64(result.HalsteadVolume.Counter) + } + if result.HalsteadTime.Counter > 0 { + result.HalsteadTime.Avg = result.HalsteadTime.Sum / float64(result.HalsteadTime.Counter) + } + if result.MaintainabilityIndex.Counter > 0 { + result.MaintainabilityIndex.Avg = result.MaintainabilityIndex.Sum / float64(result.MaintainabilityIndex.Counter) + } + if result.MaintainabilityIndexWithoutComments.Counter > 0 { + result.MaintainabilityIndexWithoutComments.Avg = result.MaintainabilityIndexWithoutComments.Sum / float64(result.MaintainabilityIndexWithoutComments.Counter) + } + if result.MaintainabilityCommentWeight.Counter > 0 { + result.MaintainabilityCommentWeight.Avg = result.MaintainabilityCommentWeight.Sum / float64(result.MaintainabilityCommentWeight.Counter) + } + + // afferent coupling + if result.Instability.Counter > 0 { + result.Instability.Avg = result.Instability.Sum / float64(result.Instability.Counter) + } + if result.EfferentCoupling.Counter > 0 { + result.EfferentCoupling.Avg = result.EfferentCoupling.Sum / float64(result.EfferentCoupling.Counter) + } + if result.AfferentCoupling.Counter > 0 { + result.AfferentCoupling.Avg = result.AfferentCoupling.Sum / float64(result.AfferentCoupling.Counter) + } + + // Count commits for the period based on `ResultOfGitAnalysis` data + result.ResultOfGitAnalysis = r.gitSummaries + if result.ResultOfGitAnalysis != nil { + for _, gitAnalysis := range result.ResultOfGitAnalysis { + result.CommitCountForPeriod += gitAnalysis.CountCommitsForLanguage + } + } + + // Bus factor and other metrics based on aggregated data + for _, analyzer := range r.analyzers { + analyzer.Calculate(&result) + } + + return result +} + +func (r *Aggregator) mapCoupling(aggregated *Aggregated) Aggregated { + result := *aggregated + reg := regexp.MustCompile("[^A-Za-z0-9.]+") + + for _, file := range aggregated.ConcernedFiles { + classes := Engine.GetClassesInFile(file) + + for _, class := range classes { + + // dependencies + dependencies := file.Stmts.StmtExternalDependencies + + for _, dependency := range dependencies { + if dependency == nil { + continue + } + + namespaceTo := dependency.Namespace + namespaceFrom := dependency.From + + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + // Keep only 2 levels in namespace + separator := reg.FindString(namespaceFrom) + parts := reg.Split(namespaceTo, -1) + if len(parts) > 2 { + namespaceTo = parts[0] + separator + parts[1] + } + + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + parts = reg.Split(namespaceFrom, -1) + if len(parts) > 2 { + namespaceFrom = parts[0] + separator + parts[1] + } + + // if same, continue + if namespaceFrom == namespaceTo { + continue + } + + // if root namespace, continue + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + // create the map if not exists + if _, ok := result.PackageRelations[namespaceFrom]; !ok { + result.PackageRelations[namespaceFrom] = make(map[string]int) + } + + if _, ok := result.PackageRelations[namespaceFrom][namespaceTo]; !ok { + result.PackageRelations[namespaceFrom][namespaceTo] = 0 + } + + // increment the counter + result.PackageRelations[namespaceFrom][namespaceTo]++ + } + + class.Stmts.Analyze.Coupling.Efferent = 0 + uniqueDependencies := make(map[string]bool) + for _, dependency := range class.Stmts.StmtExternalDependencies { + dependencyName := dependency.ClassName + + // check if dependency is already in hashmap + if _, ok := result.ClassesAfferentCoupling[dependencyName]; !ok { + result.ClassesAfferentCoupling[dependencyName] = 0 + } + result.ClassesAfferentCoupling[dependencyName]++ + + // check if dependency is unique + if _, ok := uniqueDependencies[dependencyName]; !ok { + uniqueDependencies[dependencyName] = true + } + } + class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) + } + } + return result } diff --git a/src/Analyzer/Aggregator_test.go b/src/Analyzer/Aggregator_test.go index ec4c27a..9926485 100644 --- a/src/Analyzer/Aggregator_test.go +++ b/src/Analyzer/Aggregator_test.go @@ -153,109 +153,111 @@ func TestCalculate(t *testing.T) { } func TestAggregates(t *testing.T) { - // Create a new Aggregator with some dummy data - aggregator := Aggregator{ - files: []*pb.File{ - // file 1 - { - ProgrammingLanguage: "Go", - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(10), + t.Run("TestAggregates", func(t *testing.T) { + // Create a new Aggregator with some dummy data + aggregator := Aggregator{ + files: []*pb.File{ + // file 1 + { + ProgrammingLanguage: "Go", + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(10), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(120), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(85), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(85), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(65), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(65), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(100), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(100), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, }, - }, - StmtNamespace: []*pb.StmtNamespace{ - { - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(20), + StmtNamespace: []*pb.StmtNamespace{ + { + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(20), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(70), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(70), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(100), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(100), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, @@ -263,80 +265,80 @@ func TestAggregates(t *testing.T) { }, }, }, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(100), - Cloc: proto.Int32(200), - Lloc: proto.Int32(50), + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(100), + Cloc: proto.Int32(200), + Lloc: proto.Int32(50), + }, }, }, }, - }, - // file 2 - { - ProgrammingLanguage: "Go", - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(60), + // file 2 + { + ProgrammingLanguage: "Go", + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(60), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(75), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(75), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(120), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, }, - }, - StmtNamespace: []*pb.StmtNamespace{ - { - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(30), + StmtNamespace: []*pb.StmtNamespace{ + { + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(30), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(90), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(90), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, @@ -344,43 +346,43 @@ func TestAggregates(t *testing.T) { }, }, }, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(200), - Cloc: proto.Int32(300), - Lloc: proto.Int32(150), + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(200), + Cloc: proto.Int32(300), + Lloc: proto.Int32(150), + }, }, }, }, - }, - // file 3 - { - ProgrammingLanguage: "Php", - Stmts: &pb.Stmts{ - StmtNamespace: []*pb.StmtNamespace{ - { - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(30), + // file 3 + { + ProgrammingLanguage: "Php", + Stmts: &pb.Stmts{ + StmtNamespace: []*pb.StmtNamespace{ + { + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(30), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(120), - MaintainabilityIndexWithoutComments: proto.Float64(48), - CommentWeight: proto.Float64(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, @@ -388,154 +390,157 @@ func TestAggregates(t *testing.T) { }, }, }, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(600), - Cloc: proto.Int32(100), - Lloc: proto.Int32(400), + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(600), + Cloc: proto.Int32(100), + Lloc: proto.Int32(400), + }, }, }, }, }, - }, - } - - // Call the Aggregates method - projectAggregated := aggregator.Aggregates() + } - // Check that the returned ProjectAggregated struct has the expected values - if projectAggregated.ByFile.NbFiles != 3 { - t.Errorf("Expected 3 files, got %d", projectAggregated.ByFile.NbFiles) - } + // Call the Aggregates method + projectAggregated := aggregator.Aggregates() - // Checks on Combined aggregate - if projectAggregated.ByClass.NbClasses != 10 { - t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) - } + // Check that the returned ProjectAggregated struct has the expected values + if projectAggregated.ByFile.NbFiles != 3 { + t.Errorf("Expected 3 files, got %d", projectAggregated.ByFile.NbFiles) + } - if projectAggregated.Combined.NbClasses != 10 { - t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) - } + // Checks on Combined aggregate + if projectAggregated.ByClass.NbClasses != 10 { + t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) + } - if projectAggregated.Combined.NbMethods != 5 { - t.Errorf("Expected 5 methods, got %d", projectAggregated.Combined.NbMethods) - } + if projectAggregated.Combined.NbClasses != 10 { + t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) + } - if projectAggregated.Combined.AverageCyclomaticComplexityPerMethod != 30 { - t.Errorf("Expected AverageCyclomaticComplexityPerMethod 30, got %f", projectAggregated.Combined.AverageCyclomaticComplexityPerMethod) - } + if projectAggregated.Combined.NbMethods != 5 { + t.Errorf("Expected 5 methods, got %d", projectAggregated.Combined.NbMethods) + } - if int(projectAggregated.Combined.AverageMI) != 94 { - t.Errorf("Expected MI of 94 for all files, got %v", int(projectAggregated.Combined.AverageMI)) - } + if projectAggregated.Combined.AverageCyclomaticComplexityPerMethod != 30 { + t.Errorf("Expected AverageCyclomaticComplexityPerMethod 30, got %f", projectAggregated.Combined.AverageCyclomaticComplexityPerMethod) + } - // Check on Go aggregate - if projectAggregated.ByProgrammingLanguage["Go"].NbClasses != 9 { - t.Errorf("Expected 9 classes, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbClasses) - } + if int(projectAggregated.Combined.AverageMI) != 94 { + t.Errorf("Expected MI of 94 for all files, got %v", int(projectAggregated.Combined.AverageMI)) + } - if projectAggregated.ByProgrammingLanguage["Go"].NbMethods != 4 { - t.Errorf("Expected 4 methods in Go, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbMethods) - } + // Check on Go aggregate + if projectAggregated.ByProgrammingLanguage["Go"].NbClasses != 9 { + t.Errorf("Expected 9 classes, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbClasses) + } - if projectAggregated.ByProgrammingLanguage["Go"].NbFiles != 2 { - t.Errorf("Expected 2 Go files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) - } + if projectAggregated.ByProgrammingLanguage["Go"].NbMethods != 4 { + t.Errorf("Expected 4 methods in Go, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbMethods) + } - if int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI) != 91 { - t.Errorf("Expected MI of 91 for Go files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) - } + if projectAggregated.ByProgrammingLanguage["Go"].NbFiles != 2 { + t.Errorf("Expected 2 Go files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) + } - // Check on Php aggregate - if projectAggregated.ByProgrammingLanguage["Php"].NbClasses != 1 { - t.Errorf("Expected 1 class, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbClasses) - } + if int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI) != 91 { + t.Errorf("Expected MI of 91 for Go files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) + } - if projectAggregated.ByProgrammingLanguage["Php"].NbMethods != 1 { - t.Errorf("Expected 1 methods in PHP, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbMethods) - } + // Check on Php aggregate + if projectAggregated.ByProgrammingLanguage["Php"].NbClasses != 1 { + t.Errorf("Expected 1 class, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbClasses) + } - if projectAggregated.ByProgrammingLanguage["Php"].NbFiles != 1 { - t.Errorf("Expected 1 PHP files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) - } + if projectAggregated.ByProgrammingLanguage["Php"].NbMethods != 1 { + t.Errorf("Expected 1 methods in PHP, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbMethods) + } - if projectAggregated.ByProgrammingLanguage["Php"].AverageMI != 120 { - t.Errorf("Expected MI of 120 for PHP files, got %f", projectAggregated.ByProgrammingLanguage["Php"].AverageMI) - } + if projectAggregated.ByProgrammingLanguage["Php"].NbFiles != 1 { + t.Errorf("Expected 1 PHP files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) + } - if int(projectAggregated.ByProgrammingLanguage["Php"].AverageMI) != 120 { - t.Errorf("Expected MI of 120 for PHP files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) - } + if projectAggregated.ByProgrammingLanguage["Php"].AverageMI != 120 { + t.Errorf("Expected MI of 120 for PHP files, got %f", projectAggregated.ByProgrammingLanguage["Php"].AverageMI) + } + if int(projectAggregated.ByProgrammingLanguage["Php"].AverageMI) != 120 { + t.Errorf("Expected MI of 120 for PHP files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) + } + }) } func TestCalculateMaintainabilityIndex(t *testing.T) { - aggregator := Aggregator{} - file := pb.File{ - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(15), - MaintainabilityIndexWithoutComments: proto.Float64(20), - CommentWeight: proto.Float64(25), + t.Run("TestCalculateMaintainabilityIndex", func(t *testing.T) { + aggregator := Aggregator{} + file := pb.File{ + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(15), + MaintainabilityIndexWithoutComments: proto.Float64(20), + CommentWeight: proto.Float64(25), + }, }, }, }, - }, - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float64(30), - MaintainabilityIndexWithoutComments: proto.Float64(35), - CommentWeight: proto.Float64(40), + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(30), + MaintainabilityIndexWithoutComments: proto.Float64(35), + CommentWeight: proto.Float64(40), + }, }, }, }, }, }, - }, - } - aggregated := Aggregated{} + } + aggregated := Aggregated{} - aggregator.calculateSums(&file, &aggregated) - aggregator.consolidate(&aggregated) + aggregator.calculateSums(&file, &aggregated) + aggregator.consolidate(&aggregated) - if aggregated.AverageMI != 22.5 { - t.Errorf("Expected 22.5, got %f", aggregated.AverageMI) - } + if aggregated.AverageMI != 22.5 { + t.Errorf("Expected 22.5, got %f", aggregated.AverageMI) + } - if aggregated.AverageMIwoc != 27.5 { - t.Errorf("Expected 27.5, got %f", aggregated.AverageMIwoc) - } + if aggregated.AverageMIwoc != 27.5 { + t.Errorf("Expected 27.5, got %f", aggregated.AverageMIwoc) + } - if aggregated.AverageMIcw != 32.5 { - t.Errorf("Expected 32.5, got %f", aggregated.AverageMIcw) - } + if aggregated.AverageMIcw != 32.5 { + t.Errorf("Expected 32.5, got %f", aggregated.AverageMIcw) + } - // Average per method - if aggregated.AverageMIPerMethod != 22.5 { - t.Errorf("Expected AverageMIPerMethod, got %f", aggregated.AverageMIPerMethod) - } + // Average per method + if aggregated.AverageMIPerMethod != 22.5 { + t.Errorf("Expected AverageMIPerMethod, got %f", aggregated.AverageMIPerMethod) + } + }) } func TestFIlesWithErrorAreDetected(t *testing.T) { - aggregator := Aggregator{} - files := []*pb.File{ - &pb.File{ - Stmts: &pb.Stmts{}, - }, - &pb.File{ - Errors: []string{"Error1", "Error2"}, - }, - } - aggregator.files = files - aggregated := aggregator.Aggregates() + t.Run("TestFilesWithErrorAreDetected", func(t *testing.T) { + aggregator := Aggregator{} + files := []*pb.File{ + &pb.File{ + Stmts: &pb.Stmts{}, + }, + &pb.File{ + Errors: []string{"Error1", "Error2"}, + }, + } + aggregator.files = files + aggregated := aggregator.Aggregates() - assert.Equal(t, 2, aggregated.ByFile.NbFiles) - assert.Equal(t, 1, len(aggregated.ErroredFiles)) + assert.Equal(t, 2, aggregated.ByFile.NbFiles) + assert.Equal(t, 1, len(aggregated.ErroredFiles)) + }) } diff --git a/src/Analyzer/AstAnalyzer.go b/src/Analyzer/AstAnalyzer.go index cc98748..a634553 100644 --- a/src/Analyzer/AstAnalyzer.go +++ b/src/Analyzer/AstAnalyzer.go @@ -9,6 +9,7 @@ import ( Complexity "github.com/halleck45/ast-metrics/src/Analyzer/Complexity" Component "github.com/halleck45/ast-metrics/src/Analyzer/Component" Volume "github.com/halleck45/ast-metrics/src/Analyzer/Volume" + "github.com/halleck45/ast-metrics/src/Engine" pb "github.com/halleck45/ast-metrics/src/NodeType" "github.com/halleck45/ast-metrics/src/Storage" "github.com/pterm/pterm" @@ -26,7 +27,6 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi // Wait for end of all goroutines var wg sync.WaitGroup - var wgByCpu sync.WaitGroup // store results // channel should have value @@ -40,32 +40,34 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi filesChan := make(chan string, numWorkers) for i := 0; i < numWorkers; i++ { - wgByCpu.Add(1) go func() { - defer wgByCpu.Done() for file := range filesChan { - mu.Lock() - nbParsingFiles++ - mu.Unlock() + go func(file string) { + defer wg.Done() + mu.Lock() + nbParsingFiles++ + mu.Unlock() - executeFileAnalysis(file, channelResult) + executeFileAnalysis(file, channelResult) - details := strconv.Itoa(nbParsingFiles) + "/" + strconv.Itoa(len(astFiles)) + details := strconv.Itoa(nbParsingFiles) + "/" + strconv.Itoa(len(astFiles)) - if progressbar != nil { - progressbar.UpdateText("Analyzing (" + details + ")") - } + if progressbar != nil { + progressbar.UpdateText("Analyzing (" + details + ")") + } + }(file) } }() } for _, file := range astFiles { + wg.Add(1) filesChan <- file } wg.Wait() close(filesChan) - wgByCpu.Wait() + if progressbar != nil { progressbar.Info("AST Analysis finished") } @@ -130,6 +132,10 @@ func executeFileAnalysis(file string, channelResult chan<- *pb.File) error { // visit AST root.Visit() + + // Ensure structure is complete + Engine.EnsureNodeTypeIsComplete(pbFile) + channelResult <- pbFile return nil } diff --git a/src/Analyzer/Comparator.go b/src/Analyzer/Comparator.go index fabb819..8550618 100644 --- a/src/Analyzer/Comparator.go +++ b/src/Analyzer/Comparator.go @@ -88,36 +88,35 @@ func (c *Comparator) Compare(first Aggregated, second Aggregated) Comparaison { comparaison.NbClasses = first.NbClasses - second.NbClasses comparaison.NbClassesWithCode = first.NbClassesWithCode - second.NbClassesWithCode comparaison.NbMethods = first.NbMethods - second.NbMethods - comparaison.Loc = first.Loc - second.Loc - comparaison.Cloc = first.Cloc - second.Cloc - comparaison.Lloc = first.Lloc - second.Lloc - comparaison.AverageMethodsPerClass = first.AverageMethodsPerClass - second.AverageMethodsPerClass - comparaison.AverageLocPerMethod = first.AverageLocPerMethod - second.AverageLocPerMethod - comparaison.AverageLlocPerMethod = first.AverageLlocPerMethod - second.AverageLlocPerMethod - comparaison.AverageClocPerMethod = first.AverageClocPerMethod - second.AverageClocPerMethod - comparaison.AverageCyclomaticComplexityPerMethod = first.AverageCyclomaticComplexityPerMethod - second.AverageCyclomaticComplexityPerMethod - comparaison.AverageCyclomaticComplexityPerClass = first.AverageCyclomaticComplexityPerClass - second.AverageCyclomaticComplexityPerClass - comparaison.MinCyclomaticComplexity = first.MinCyclomaticComplexity - second.MinCyclomaticComplexity - comparaison.MaxCyclomaticComplexity = first.MaxCyclomaticComplexity - second.MaxCyclomaticComplexity - comparaison.AverageHalsteadDifficulty = first.AverageHalsteadDifficulty - second.AverageHalsteadDifficulty - comparaison.AverageHalsteadEffort = first.AverageHalsteadEffort - second.AverageHalsteadEffort - comparaison.AverageHalsteadVolume = first.AverageHalsteadVolume - second.AverageHalsteadVolume - comparaison.AverageHalsteadTime = first.AverageHalsteadTime - second.AverageHalsteadTime - comparaison.AverageHalsteadBugs = first.AverageHalsteadBugs - second.AverageHalsteadBugs - comparaison.SumHalsteadDifficulty = first.SumHalsteadDifficulty - second.SumHalsteadDifficulty - comparaison.SumHalsteadEffort = first.SumHalsteadEffort - second.SumHalsteadEffort - comparaison.SumHalsteadVolume = first.SumHalsteadVolume - second.SumHalsteadVolume - comparaison.SumHalsteadTime = first.SumHalsteadTime - second.SumHalsteadTime - comparaison.SumHalsteadBugs = first.SumHalsteadBugs - second.SumHalsteadBugs - comparaison.AverageMI = first.AverageMI - second.AverageMI - comparaison.AverageMIwoc = first.AverageMIwoc - second.AverageMIwoc - comparaison.AverageMIcw = first.AverageMIcw - second.AverageMIcw - comparaison.AverageMIPerMethod = first.AverageMIPerMethod - second.AverageMIPerMethod - comparaison.AverageMIwocPerMethod = first.AverageMIwocPerMethod - second.AverageMIwocPerMethod - comparaison.AverageMIcwPerMethod = first.AverageMIcwPerMethod - second.AverageMIcwPerMethod - comparaison.AverageAfferentCoupling = first.AverageAfferentCoupling - second.AverageAfferentCoupling - comparaison.AverageEfferentCoupling = first.AverageEfferentCoupling - second.AverageEfferentCoupling - comparaison.AverageInstability = first.AverageInstability - second.AverageInstability + comparaison.Loc = int(first.Loc.Sum - second.Loc.Sum) + comparaison.Cloc = int(first.Cloc.Sum - second.Cloc.Sum) + comparaison.Lloc = int(first.Lloc.Sum - second.Lloc.Sum) + comparaison.AverageMethodsPerClass = first.MethodsPerClass.Avg - second.MethodsPerClass.Avg + comparaison.AverageLocPerMethod = first.LocPerMethod.Avg - second.LocPerMethod.Avg + comparaison.AverageLlocPerMethod = first.LlocPerMethod.Avg - second.LlocPerMethod.Avg + comparaison.AverageClocPerMethod = first.ClocPerMethod.Avg - second.ClocPerMethod.Avg + comparaison.AverageCyclomaticComplexityPerMethod = first.CyclomaticComplexityPerMethod.Avg - second.CyclomaticComplexityPerMethod.Avg + comparaison.AverageCyclomaticComplexityPerClass = first.CyclomaticComplexityPerClass.Avg - second.CyclomaticComplexityPerClass.Avg + comparaison.MinCyclomaticComplexity = int(first.CyclomaticComplexityPerMethod.Min - second.CyclomaticComplexityPerMethod.Min) + comparaison.MaxCyclomaticComplexity = int(first.CyclomaticComplexityPerMethod.Max - second.CyclomaticComplexityPerMethod.Max) + comparaison.AverageHalsteadDifficulty = first.HalsteadDifficulty.Avg - second.HalsteadDifficulty.Avg + comparaison.AverageHalsteadEffort = first.HalsteadEffort.Avg - second.HalsteadEffort.Avg + comparaison.AverageHalsteadVolume = first.HalsteadVolume.Avg - second.HalsteadVolume.Avg + comparaison.AverageHalsteadTime = first.HalsteadTime.Avg - second.HalsteadTime.Avg + comparaison.AverageHalsteadBugs = first.HalsteadBugs.Avg - second.HalsteadBugs.Avg + comparaison.SumHalsteadDifficulty = first.HalsteadDifficulty.Sum - second.HalsteadDifficulty.Sum + comparaison.SumHalsteadEffort = first.HalsteadEffort.Sum - second.HalsteadEffort.Sum + comparaison.SumHalsteadVolume = first.HalsteadVolume.Sum - second.HalsteadVolume.Sum + comparaison.SumHalsteadTime = first.HalsteadTime.Sum - second.HalsteadTime.Sum + comparaison.SumHalsteadBugs = first.HalsteadBugs.Sum - second.HalsteadBugs.Sum + comparaison.AverageMI = first.MaintainabilityIndex.Avg - second.MaintainabilityIndex.Avg + comparaison.AverageMIwoc = first.MaintainabilityIndexWithoutComments.Avg - second.MaintainabilityIndexWithoutComments.Avg + comparaison.AverageMIPerMethod = first.MaintainabilityPerMethod.Avg - second.MaintainabilityPerMethod.Avg + comparaison.AverageMIwocPerMethod = first.MaintainabilityCommentWeightPerMethod.Avg - second.MaintainabilityCommentWeightPerMethod.Avg + comparaison.AverageMIcwPerMethod = first.MaintainabilityCommentWeightPerMethod.Avg - second.MaintainabilityCommentWeightPerMethod.Avg + comparaison.AverageAfferentCoupling = first.AfferentCoupling.Avg - second.AfferentCoupling.Avg + comparaison.AverageEfferentCoupling = first.EfferentCoupling.Avg - second.EfferentCoupling.Avg + comparaison.AverageInstability = first.Instability.Avg - second.Instability.Avg comparaison.CommitCountForPeriod = first.CommitCountForPeriod - second.CommitCountForPeriod comparaison.CommittedFilesCountForPeriod = first.CommittedFilesCountForPeriod - second.CommittedFilesCountForPeriod comparaison.BusFactor = first.BusFactor - second.BusFactor diff --git a/src/Analyzer/Volume/HalsteadMetricsVisitor.go b/src/Analyzer/Volume/HalsteadMetricsVisitor.go index d11af04..d23c8b2 100644 --- a/src/Analyzer/Volume/HalsteadMetricsVisitor.go +++ b/src/Analyzer/Volume/HalsteadMetricsVisitor.go @@ -97,13 +97,6 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { // Calculate time required to program (T) T = E / 18 - // convert float to float64 - V32 := float64(V) - hatN32 := float64(hatN) - D32 := float64(D) - E32 := float64(E) - T32 := float64(T) - // Assign to result if stmt.Stmts.Analyze == nil { stmt.Stmts.Analyze = &pb.Analyze{} @@ -112,11 +105,11 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { stmt.Stmts.Analyze.Volume.HalsteadVocabulary = &n stmt.Stmts.Analyze.Volume.HalsteadLength = &N - stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN32 - stmt.Stmts.Analyze.Volume.HalsteadVolume = &V32 - stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D32 - stmt.Stmts.Analyze.Volume.HalsteadEffort = &E32 - stmt.Stmts.Analyze.Volume.HalsteadTime = &T32 + stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN + stmt.Stmts.Analyze.Volume.HalsteadVolume = &V + stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D + stmt.Stmts.Analyze.Volume.HalsteadEffort = &E + stmt.Stmts.Analyze.Volume.HalsteadTime = &T } } @@ -174,13 +167,6 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { T = T / float64(len(stmt.Stmts.StmtFunction)) } - // convert float to float64 - V32 := float64(V) - hatN32 := float64(hatN) - D32 := float64(D) - E32 := float64(E) - T32 := float64(T) - // Assign to result if stmt.Stmts.Analyze == nil { stmt.Stmts.Analyze = &pb.Analyze{} @@ -191,11 +177,11 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { stmt.Stmts.Analyze.Volume.HalsteadVocabulary = &n stmt.Stmts.Analyze.Volume.HalsteadLength = &N - stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN32 - stmt.Stmts.Analyze.Volume.HalsteadVolume = &V32 - stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D32 - stmt.Stmts.Analyze.Volume.HalsteadEffort = &E32 - stmt.Stmts.Analyze.Volume.HalsteadTime = &T32 + stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN + stmt.Stmts.Analyze.Volume.HalsteadVolume = &V + stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D + stmt.Stmts.Analyze.Volume.HalsteadEffort = &E + stmt.Stmts.Analyze.Volume.HalsteadTime = &T } } } diff --git a/src/Cli/ComponentStatisticsOverview.go b/src/Cli/ComponentStatisticsOverview.go index 8cdf2a4..26b1c2a 100644 --- a/src/Cli/ComponentStatisticsOverview.go +++ b/src/Cli/ComponentStatisticsOverview.go @@ -31,7 +31,7 @@ func (v *ComponentStatisticsOverview) Render() string { Files: v.files, } boxCcn := StyleNumberBox( - fmt.Sprintf("%.2f", v.aggregated.AverageCyclomaticComplexityPerMethod), + fmt.Sprintf("%.2f", v.aggregated.CyclomaticComplexityPerMethod.Avg), "Cycl. complexity per method", chartRepartitionCyclomatic.AsTerminalElement(), ) @@ -42,7 +42,7 @@ func (v *ComponentStatisticsOverview) Render() string { Files: v.files, } boxMethods := StyleNumberBox( - fmt.Sprintf("%.2f", v.aggregated.AverageLocPerMethod), + fmt.Sprintf("%.2f", v.aggregated.LocPerMethod.Avg), "Average LOC per method", chartRepartitionLocByMethod.AsTerminalElement()+" ", ) @@ -53,7 +53,7 @@ func (v *ComponentStatisticsOverview) Render() string { Files: v.files, } boxMaintainability := StyleNumberBox( - DecorateMaintainabilityIndex(int(v.aggregated.AverageMI), nil), + DecorateMaintainabilityIndex(int(v.aggregated.MaintainabilityIndex.Avg), nil), "Maintainability index", chartRepartitionMI.AsTerminalElement(), ) diff --git a/src/Cli/ScreenSummary.go b/src/Cli/ScreenSummary.go index c1e5343..256598a 100644 --- a/src/Cli/ScreenSummary.go +++ b/src/Cli/ScreenSummary.go @@ -82,10 +82,10 @@ func (m modelScreenSummary) View() string { | Min | Max | Average per class | Average per method | | --- | --- | --- | --- | | ` + - strconv.Itoa(combined.MinCyclomaticComplexity) + - ` | ` + strconv.Itoa(combined.MaxCyclomaticComplexity) + - ` | ` + fmt.Sprintf("%.2f", combined.AverageCyclomaticComplexityPerClass) + - ` | ` + fmt.Sprintf("%.2f", combined.AverageCyclomaticComplexityPerMethod) + + strconv.Itoa(int(combined.CyclomaticComplexityPerMethod.Min)) + + ` | ` + strconv.Itoa(int(combined.CyclomaticComplexityPerMethod.Max)) + + ` | ` + fmt.Sprintf("%.2f", combined.CyclomaticComplexityPerClass.Avg) + + ` | ` + fmt.Sprintf("%.2f", combined.CyclomaticComplexityPerMethod.Avg) + ` | ### Classes and methods @@ -94,8 +94,8 @@ func (m modelScreenSummary) View() string { | --- | --- | --- | --- |` + "\n" + ` | ` + strconv.Itoa(aggregatedByClass.NbClasses) + ` | ` + strconv.Itoa(combined.NbMethods) + - ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.AverageMethodsPerClass) + - ` | ` + fmt.Sprintf("%.2f", combined.AverageLocPerMethod) + + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.MethodsPerClass.Avg) + + ` | ` + fmt.Sprintf("%.2f", combined.LocPerMethod.Avg) + ` | ## Maintainability @@ -105,7 +105,7 @@ func (m modelScreenSummary) View() string { | Maintainability index | MI without comments | Comment weight | | --- | --- | --- | - | ` + DecorateMaintainabilityIndex(int(aggregatedByClass.AverageMI), nil) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.AverageMIwoc) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.AverageMIcw) + ` | + | ` + DecorateMaintainabilityIndex(int(aggregatedByClass.MaintainabilityIndex.Avg), nil) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.MaintainabilityIndexWithoutComments.Avg) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.MaintainabilityCommentWeight.Avg) + ` | ` out, _ := glamour.Render(in, "dark") diff --git a/src/Engine/NodeTypeEnsurer.go b/src/Engine/NodeTypeEnsurer.go new file mode 100644 index 0000000..1140a05 --- /dev/null +++ b/src/Engine/NodeTypeEnsurer.go @@ -0,0 +1,38 @@ +package Engine + +import ( + pb "github.com/halleck45/ast-metrics/src/NodeType" +) + +func EnsureNodeTypeIsComplete(file *pb.File) { + + if file.Stmts.Analyze == nil { + file.Stmts.Analyze = &pb.Analyze{} + } + + if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: *file.Stmts.Analyze.Volume.Loc, + CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, + LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + } + } + + if file.Stmts.Analyze == nil { + file.Stmts.Analyze = &pb.Analyze{} + } + + if file.Stmts.Analyze.Complexity == nil { + zero := int32(0) + file.Stmts.Analyze.Complexity = &pb.Complexity{ + Cyclomatic: &zero, + } + } + + if file.Stmts.Analyze.Coupling == nil { + file.Stmts.Analyze.Coupling = &pb.Coupling{ + Afferent: 0, + Efferent: 0, + } + } +} diff --git a/src/NodeType/NodeType.pb.go b/src/NodeType/NodeType.pb.go index dd1bab4..b6043ea 100644 --- a/src/NodeType/NodeType.pb.go +++ b/src/NodeType/NodeType.pb.go @@ -1767,11 +1767,11 @@ type Volume struct { Cloc *int32 `protobuf:"varint,3,opt,name=cloc,proto3,oneof" json:"cloc,omitempty"` HalsteadVocabulary *int32 `protobuf:"varint,4,opt,name=halsteadVocabulary,proto3,oneof" json:"halsteadVocabulary,omitempty"` HalsteadLength *int32 `protobuf:"varint,5,opt,name=halsteadLength,proto3,oneof" json:"halsteadLength,omitempty"` - HalsteadVolume *float64 `protobuf:"fixed32,6,opt,name=halsteadVolume,proto3,oneof" json:"halsteadVolume,omitempty"` - HalsteadDifficulty *float64 `protobuf:"fixed32,7,opt,name=halsteadDifficulty,proto3,oneof" json:"halsteadDifficulty,omitempty"` - HalsteadEffort *float64 `protobuf:"fixed32,8,opt,name=halsteadEffort,proto3,oneof" json:"halsteadEffort,omitempty"` - HalsteadTime *float64 `protobuf:"fixed32,9,opt,name=halsteadTime,proto3,oneof" json:"halsteadTime,omitempty"` - HalsteadEstimatedLength *float64 `protobuf:"fixed32,10,opt,name=halsteadEstimatedLength,proto3,oneof" json:"halsteadEstimatedLength,omitempty"` + HalsteadVolume *float64 `protobuf:"fixed64,6,opt,name=halsteadVolume,proto3,oneof" json:"halsteadVolume,omitempty"` + HalsteadDifficulty *float64 `protobuf:"fixed64,7,opt,name=halsteadDifficulty,proto3,oneof" json:"halsteadDifficulty,omitempty"` + HalsteadEffort *float64 `protobuf:"fixed64,8,opt,name=halsteadEffort,proto3,oneof" json:"halsteadEffort,omitempty"` + HalsteadTime *float64 `protobuf:"fixed64,9,opt,name=halsteadTime,proto3,oneof" json:"halsteadTime,omitempty"` + HalsteadEstimatedLength *float64 `protobuf:"fixed64,10,opt,name=halsteadEstimatedLength,proto3,oneof" json:"halsteadEstimatedLength,omitempty"` } func (x *Volume) Reset() { @@ -1881,9 +1881,9 @@ type Maintainability struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - MaintainabilityIndex *float64 `protobuf:"fixed32,1,opt,name=maintainabilityIndex,proto3,oneof" json:"maintainabilityIndex,omitempty"` - MaintainabilityIndexWithoutComments *float64 `protobuf:"fixed32,2,opt,name=maintainabilityIndexWithoutComments,proto3,oneof" json:"maintainabilityIndexWithoutComments,omitempty"` - CommentWeight *float64 `protobuf:"fixed32,3,opt,name=commentWeight,proto3,oneof" json:"commentWeight,omitempty"` + MaintainabilityIndex *float64 `protobuf:"fixed64,1,opt,name=maintainabilityIndex,proto3,oneof" json:"maintainabilityIndex,omitempty"` + MaintainabilityIndexWithoutComments *float64 `protobuf:"fixed64,2,opt,name=maintainabilityIndexWithoutComments,proto3,oneof" json:"maintainabilityIndexWithoutComments,omitempty"` + CommentWeight *float64 `protobuf:"fixed64,3,opt,name=commentWeight,proto3,oneof" json:"commentWeight,omitempty"` } func (x *Maintainability) Reset() { @@ -2076,7 +2076,7 @@ type Risk struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Score float64 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` // score of risk. Lower is better + Score float64 `protobuf:"fixed64,1,opt,name=score,proto3" json:"score,omitempty"` // score of risk. Lower is better } func (x *Risk) Reset() { @@ -2128,7 +2128,7 @@ type Coupling struct { Afferent int32 `protobuf:"varint,1,opt,name=afferent,proto3" json:"afferent,omitempty"` // number of classes that depends on this class Efferent int32 `protobuf:"varint,2,opt,name=efferent,proto3" json:"efferent,omitempty"` // number of classes that this class depends on - Instability float64 `protobuf:"fixed32,3,opt,name=instability,proto3" json:"instability,omitempty"` // instability of the class + Instability float64 `protobuf:"fixed64,3,opt,name=instability,proto3" json:"instability,omitempty"` // instability of the class } func (x *Coupling) Reset() { @@ -2497,19 +2497,19 @@ var file_proto_NodeType_proto_rawDesc = []byte{ 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x04, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x88, 0x01, 0x01, 0x12, 0x2b, 0x0a, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x06, 0x20, - 0x01, 0x28, 0x02, 0x48, 0x05, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x56, + 0x01, 0x28, 0x01, 0x48, 0x05, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x12, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x44, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x02, 0x48, 0x06, 0x52, 0x12, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, + 0x20, 0x01, 0x28, 0x01, 0x48, 0x06, 0x52, 0x12, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x44, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x88, 0x01, 0x01, 0x12, 0x2b, 0x0a, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x18, - 0x08, 0x20, 0x01, 0x28, 0x02, 0x48, 0x07, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, + 0x08, 0x20, 0x01, 0x28, 0x01, 0x48, 0x07, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x88, 0x01, 0x01, 0x12, 0x27, 0x0a, 0x0c, 0x68, 0x61, - 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x02, + 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x01, 0x48, 0x08, 0x52, 0x0c, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x17, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x73, 0x74, 0x69, 0x6d, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0a, - 0x20, 0x01, 0x28, 0x02, 0x48, 0x09, 0x52, 0x17, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, + 0x20, 0x01, 0x28, 0x01, 0x48, 0x09, 0x52, 0x17, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x73, 0x74, 0x69, 0x6d, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x88, 0x01, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x6c, 0x6f, 0x63, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6c, 0x6c, 0x6f, 0x63, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x63, 0x6c, 0x6f, 0x63, 0x42, 0x15, 0x0a, 0x13, @@ -2525,15 +2525,15 @@ var file_proto_NodeType_proto_rawDesc = []byte{ 0x22, 0x9f, 0x02, 0x0a, 0x0f, 0x4d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x37, 0x0a, 0x14, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x02, 0x48, 0x00, 0x52, 0x14, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, + 0x28, 0x01, 0x48, 0x00, 0x52, 0x14, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x55, 0x0a, 0x23, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x57, 0x69, 0x74, 0x68, 0x6f, 0x75, 0x74, 0x43, 0x6f, 0x6d, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x48, 0x01, 0x52, 0x23, 0x6d, 0x61, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x48, 0x01, 0x52, 0x23, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x57, 0x69, 0x74, 0x68, 0x6f, 0x75, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x88, 0x01, 0x01, 0x12, 0x29, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x57, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x48, 0x02, 0x52, 0x0d, 0x63, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x02, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x57, 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x42, 0x17, 0x0a, 0x15, 0x5f, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x26, 0x0a, 0x24, 0x5f, 0x6d, 0x61, 0x69, @@ -2553,13 +2553,13 @@ var file_proto_NodeType_proto_rawDesc = []byte{ 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x64, 0x61, 0x74, 0x65, 0x22, 0x1c, 0x0a, 0x04, 0x52, 0x69, 0x73, 0x6b, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, - 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x22, + 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x64, 0x0a, 0x08, 0x43, 0x6f, 0x75, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x61, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x65, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x62, + 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x6c, 0x6c, 0x65, 0x63, 0x6b, 0x34, 0x35, 0x2f, 0x61, 0x73, 0x74, 0x2d, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, diff --git a/src/Report/HtmlReportGenerator.go b/src/Report/HtmlReportGenerator.go index e4c1614..917a1b2 100644 --- a/src/Report/HtmlReportGenerator.go +++ b/src/Report/HtmlReportGenerator.go @@ -262,6 +262,10 @@ func (v *HtmlReportGenerator) RegisterFilters() { files := in.Interface().([]*pb.File) sort.Slice(files, func(i, j int) bool { + if files[i].Stmts.Analyze.Risk == nil && files[j].Stmts.Analyze.Risk == nil { + return false + } + if files[i].Stmts.Analyze.Risk == nil { return false } diff --git a/src/Report/JsonReportGenerator.go b/src/Report/JsonReportGenerator.go index a28de11..2e0e9db 100644 --- a/src/Report/JsonReportGenerator.go +++ b/src/Report/JsonReportGenerator.go @@ -143,36 +143,36 @@ func (j *JsonReportGenerator) buildReport(projectAggregated Analyzer.ProjectAggr r.NbClasses = combined.NbClasses r.NbClassesWithCode = combined.NbClassesWithCode r.NbMethods = combined.NbMethods - r.Loc = combined.Loc - r.Cloc = combined.Cloc - r.Lloc = combined.Lloc - r.AverageMethodsPerClass = combined.AverageMethodsPerClass - r.AverageLocPerMethod = combined.AverageLocPerMethod - r.AverageLlocPerMethod = combined.AverageLlocPerMethod - r.AverageClocPerMethod = combined.AverageClocPerMethod - r.AverageCyclomaticComplexityPerMethod = combined.AverageCyclomaticComplexityPerMethod - r.AverageCyclomaticComplexityPerClass = combined.AverageCyclomaticComplexityPerClass - r.MinCyclomaticComplexity = combined.MinCyclomaticComplexity - r.MaxCyclomaticComplexity = combined.MaxCyclomaticComplexity - r.AverageHalsteadDifficulty = combined.AverageHalsteadDifficulty - r.AverageHalsteadEffort = combined.AverageHalsteadEffort - r.AverageHalsteadVolume = combined.AverageHalsteadVolume - r.AverageHalsteadTime = combined.AverageHalsteadTime - r.AverageHalsteadBugs = combined.AverageHalsteadBugs - r.SumHalsteadDifficulty = combined.SumHalsteadDifficulty - r.SumHalsteadEffort = combined.SumHalsteadEffort - r.SumHalsteadVolume = combined.SumHalsteadVolume - r.SumHalsteadTime = combined.SumHalsteadTime - r.SumHalsteadBugs = combined.SumHalsteadBugs - r.AverageMI = combined.AverageMI - r.AverageMIwoc = combined.AverageMIwoc - r.AverageMIcw = combined.AverageMIcw - r.AverageMIPerMethod = combined.AverageMIPerMethod - r.AverageMIwocPerMethod = combined.AverageMIwocPerMethod - r.AverageMIcwPerMethod = combined.AverageMIcwPerMethod - r.AverageAfferentCoupling = combined.AverageAfferentCoupling - r.AverageEfferentCoupling = combined.AverageEfferentCoupling - r.AverageInstability = combined.AverageInstability + r.Loc = int(combined.Loc.Sum) + r.Cloc = int(combined.Cloc.Sum) + r.Lloc = int(combined.Lloc.Sum) + r.AverageMethodsPerClass = combined.MethodsPerClass.Avg + r.AverageLocPerMethod = combined.LocPerMethod.Avg + r.AverageLlocPerMethod = combined.LlocPerMethod.Avg + r.AverageClocPerMethod = combined.ClocPerMethod.Avg + r.AverageCyclomaticComplexityPerMethod = combined.CyclomaticComplexityPerMethod.Avg + r.AverageCyclomaticComplexityPerClass = combined.CyclomaticComplexityPerClass.Avg + r.MinCyclomaticComplexity = int(combined.CyclomaticComplexityPerMethod.Min) + r.MaxCyclomaticComplexity = int(combined.CyclomaticComplexityPerMethod.Max) + r.AverageHalsteadDifficulty = combined.HalsteadDifficulty.Avg + r.AverageHalsteadEffort = combined.HalsteadEffort.Avg + r.AverageHalsteadVolume = combined.HalsteadVolume.Avg + r.AverageHalsteadTime = combined.HalsteadTime.Avg + r.AverageHalsteadBugs = combined.HalsteadBugs.Avg + r.SumHalsteadDifficulty = combined.HalsteadDifficulty.Sum + r.SumHalsteadEffort = combined.HalsteadEffort.Sum + r.SumHalsteadVolume = combined.HalsteadVolume.Sum + r.SumHalsteadTime = combined.HalsteadTime.Sum + r.SumHalsteadBugs = combined.HalsteadBugs.Sum + r.AverageMI = combined.MaintainabilityIndex.Avg + r.AverageMIwoc = combined.MaintainabilityIndexWithoutComments.Avg + r.AverageMIcw = combined.MaintainabilityCommentWeight.Avg + r.AverageMIPerMethod = combined.MaintainabilityPerMethod.Avg + r.AverageMIwocPerMethod = combined.MaintainabilityCommentWeightPerMethod.Avg + r.AverageMIcwPerMethod = combined.MaintainabilityCommentWeightPerMethod.Avg + r.AverageAfferentCoupling = combined.AfferentCoupling.Avg + r.AverageEfferentCoupling = combined.EfferentCoupling.Avg + r.AverageInstability = combined.Instability.Avg r.CommitCountForPeriod = combined.CommitCountForPeriod r.CommittedFilesCountForPeriod = combined.CommittedFilesCountForPeriod r.BusFactor = combined.BusFactor diff --git a/src/Report/templates/html/index.html b/src/Report/templates/html/index.html index 1e4ec89..fef2da1 100644 --- a/src/Report/templates/html/index.html +++ b/src/Report/templates/html/index.html @@ -78,7 +78,7 @@
- {{ currentView.Loc|stringifyNumber }} + {{ currentView.Loc.Sum|stringifyNumber }}

{% if currentView.Comparaison %} @@ -102,7 +102,7 @@

- {{ currentView.AverageCyclomaticComplexityPerMethod | floatformat:2 }} + {{ currentView.CyclomaticComplexityPerMethod.Avg | floatformat:2 }}

{% if currentView.Comparaison %} @@ -128,12 +128,12 @@

{% set color="red" %} - {% if currentView.AverageMI > 84 %} + {% if currentView.MaintainabilityIndex.Avg > 84 %} {% set color="green" %} - {% elif currentView.AverageMI > 64 %} + {% elif currentView.MaintainabilityIndex.Avg > 64 %} {% set color="yellow" %} {% endif %} - {{ currentView.AverageMI | floatformat:0 }} + {{ currentView.MaintainabilityIndex.Avg | floatformat:0 }}

{% if currentView.Comparaison %} @@ -158,7 +158,7 @@

- {{ currentView.AverageLocPerMethod | floatformat:0 }} + {{ currentView.LocPerMethod.Avg | floatformat:0 }}

{% if currentView.Comparaison %} @@ -354,7 +354,7 @@

- {{ currentView.AverageInstability | floatformat:2 }} + {{ currentView.Instability.Avg | floatformat:2 }}

Average From 9c16c01914e2a2f59aff777e093c2282f3485bf0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Sat, 30 Nov 2024 06:50:09 +0100 Subject: [PATCH 10/16] fixes reference issue --- src/Analyzer/Aggregator.go | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 97e0ce4..a2ee81e 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -1,6 +1,7 @@ package Analyzer import ( + "fmt" "math" "regexp" "runtime" @@ -233,10 +234,6 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat chunks[i] = files[start:end] } - // Prepare results - aggregateByFileChunk := newAggregated() - aggregateByClassChunk := newAggregated() - // for each programming language, we create a separeted result aggregateByLanguageChunk := make(map[string]Aggregated) for _, file := range files { @@ -253,6 +250,7 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat resultsByFile := make(chan *Aggregated, numberOfProcessors) resultsByProgrammingLanguage := make(chan *map[string]Aggregated, numberOfProcessors) + fmt.Println("Chunks:", len(chunks)) // Deadlock prevention mu := sync.Mutex{} @@ -267,6 +265,14 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat go func(files []*pb.File) { defer wg.Done() + if len(files) == 0 { + return + } + + // Prepare results + aggregateByFileChunk := newAggregated() + aggregateByClassChunk := newAggregated() + // the process deal with its own chunk for _, file := range files { localFile := file From dbdacead45e2b9230e5fa1cfbcdb0545811ea5fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Sat, 30 Nov 2024 07:43:04 +0100 Subject: [PATCH 11/16] fixed tests --- src/Analyzer/Aggregator.go | 121 ++++++++++++++++--- src/Analyzer/Aggregator_test.go | 204 +++++++++----------------------- src/Analyzer/AstAnalyzer.go | 11 +- 3 files changed, 165 insertions(+), 171 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index a2ee81e..bcbbc6d 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -1,7 +1,6 @@ package Analyzer import ( - "fmt" "math" "regexp" "runtime" @@ -43,6 +42,7 @@ func NewAggregateResult() AggregateResult { type Aggregated struct { ProgrammingLanguages map[string]int ConcernedFiles []*pb.File + ErroredFiles []*pb.File Comparaison *Comparaison // hashmap of classes, just with the qualified name, used for afferent coupling calculation ClassesAfferentCoupling map[string]int @@ -130,6 +130,7 @@ func newAggregated() Aggregated { ProgrammingLanguages: make(map[string]int), ConcernedFiles: make([]*pb.File, 0), ClassesAfferentCoupling: make(map[string]int), + ErroredFiles: make([]*pb.File, 0), NbClasses: 0, NbClassesWithCode: 0, NbMethods: 0, @@ -250,7 +251,6 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat resultsByFile := make(chan *Aggregated, numberOfProcessors) resultsByProgrammingLanguage := make(chan *map[string]Aggregated, numberOfProcessors) - fmt.Println("Chunks:", len(chunks)) // Deadlock prevention mu := sync.Mutex{} @@ -277,10 +277,6 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat for _, file := range files { localFile := file - if file.Stmts == nil { - continue - } - // by file result := r.mapSums(localFile, aggregateByFileChunk) result.ConcernedFiles = append(result.ConcernedFiles, localFile) @@ -365,6 +361,7 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat // For all languages projectAggregated.Combined = projectAggregated.ByFile + projectAggregated.ErroredFiles = projectAggregated.ByFile.ErroredFiles return projectAggregated } @@ -381,21 +378,40 @@ func (r *Aggregator) WithComparaison(allResultsCloned []*pb.File, comparedBranch } func (r *Aggregator) mapSums(file *pb.File, specificAggregation Aggregated) Aggregated { - classes := Engine.GetClassesInFile(file) - functions := Engine.GetFunctionsInFile(file) - // copy the specific aggregation to new object to avoid side effects result := specificAggregation + result.NbFiles++ + + // deal with errors + if len(file.Errors) > 0 { + result.ErroredFiles = append(result.ErroredFiles, file) + return result + } + + if file.Stmts == nil { + return result + } + + classes := Engine.GetClassesInFile(file) + functions := Engine.GetFunctionsInFile(file) // Number of classes result.NbClasses += len(classes) // Ensure LOC is set - if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { - file.LinesOfCode = &pb.LinesOfCode{ - LinesOfCode: *file.Stmts.Analyze.Volume.Loc, - CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, - LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + if file.LinesOfCode == nil { + if file.Stmts != nil && file.Stmts.Analyze != nil && file.Stmts.Analyze.Volume != nil { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: *file.Stmts.Analyze.Volume.Loc, + CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, + LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + } + } else { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: 0, + CommentLinesOfCode: 0, + LogicalLinesOfCode: 0, + } } } @@ -445,6 +461,66 @@ func (r *Aggregator) mapSums(file *pb.File, specificAggregation Aggregated) Aggr result.MaintainabilityIndex.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex } } + + // Maintainability index without comments + if function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments)) { + result.MaintainabilityIndexWithoutComments.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + result.MaintainabilityIndexWithoutComments.Counter++ + if specificAggregation.MaintainabilityIndexWithoutComments.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments < specificAggregation.MaintainabilityIndexWithoutComments.Min { + result.MaintainabilityIndexWithoutComments.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + if specificAggregation.MaintainabilityIndexWithoutComments.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments > specificAggregation.MaintainabilityIndexWithoutComments.Max { + result.MaintainabilityIndexWithoutComments.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + } + + // Comment weight + if function.Stmts.Analyze.Maintainability.CommentWeight != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.CommentWeight)) { + result.MaintainabilityCommentWeight.Sum += *function.Stmts.Analyze.Maintainability.CommentWeight + result.MaintainabilityCommentWeight.Counter++ + if specificAggregation.MaintainabilityCommentWeight.Min == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight < specificAggregation.MaintainabilityCommentWeight.Min { + result.MaintainabilityCommentWeight.Min = *function.Stmts.Analyze.Maintainability.CommentWeight + } + if specificAggregation.MaintainabilityCommentWeight.Max == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight > specificAggregation.MaintainabilityCommentWeight.Max { + result.MaintainabilityCommentWeight.Max = *function.Stmts.Analyze.Maintainability.CommentWeight + } + } + + // Maintainability index per method + if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { + result.MaintainabilityPerMethod.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + result.MaintainabilityPerMethod.Counter++ + if specificAggregation.MaintainabilityPerMethod.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex < specificAggregation.MaintainabilityPerMethod.Min { + result.MaintainabilityPerMethod.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + if specificAggregation.MaintainabilityPerMethod.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex > specificAggregation.MaintainabilityPerMethod.Max { + result.MaintainabilityPerMethod.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + } + + // Maintainability index per method without comments + if function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments)) { + result.MaintainabilityPerMethodWithoutComments.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + result.MaintainabilityPerMethodWithoutComments.Counter++ + if specificAggregation.MaintainabilityPerMethodWithoutComments.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments < specificAggregation.MaintainabilityPerMethodWithoutComments.Min { + result.MaintainabilityPerMethodWithoutComments.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + if specificAggregation.MaintainabilityPerMethodWithoutComments.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments > specificAggregation.MaintainabilityPerMethodWithoutComments.Max { + result.MaintainabilityPerMethodWithoutComments.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + } + + // Comment weight per method + if function.Stmts.Analyze.Maintainability.CommentWeight != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.CommentWeight)) { + result.MaintainabilityCommentWeightPerMethod.Sum += *function.Stmts.Analyze.Maintainability.CommentWeight + result.MaintainabilityCommentWeightPerMethod.Counter++ + if specificAggregation.MaintainabilityCommentWeightPerMethod.Min == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight < specificAggregation.MaintainabilityCommentWeightPerMethod.Min { + result.MaintainabilityCommentWeightPerMethod.Min = *function.Stmts.Analyze.Maintainability.CommentWeight + } + if specificAggregation.MaintainabilityCommentWeightPerMethod.Max == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight > specificAggregation.MaintainabilityCommentWeightPerMethod.Max { + result.MaintainabilityCommentWeightPerMethod.Max = *function.Stmts.Analyze.Maintainability.CommentWeight + } + } } // average lines of code per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { @@ -643,6 +719,8 @@ func (r *Aggregator) mergeChunks(aggregated Aggregated, chunk *Aggregated) Aggre result.PackageRelations[k] = v } + result.ErroredFiles = append(result.ErroredFiles, chunk.ErroredFiles...) + return result } @@ -664,6 +742,12 @@ func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { if result.LocPerClass.Counter > 0 { result.LocPerClass.Avg = result.LocPerClass.Sum / float64(result.LocPerClass.Counter) } + if result.ClocPerMethod.Counter > 0 { + result.ClocPerMethod.Avg = result.ClocPerMethod.Sum / float64(result.ClocPerMethod.Counter) + } + if result.LlocPerMethod.Counter > 0 { + result.LlocPerMethod.Avg = result.LlocPerMethod.Sum / float64(result.LlocPerMethod.Counter) + } if result.LocPerMethod.Counter > 0 { result.LocPerMethod.Avg = result.LocPerMethod.Sum / float64(result.LocPerMethod.Counter) } @@ -694,6 +778,15 @@ func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { if result.MaintainabilityCommentWeight.Counter > 0 { result.MaintainabilityCommentWeight.Avg = result.MaintainabilityCommentWeight.Sum / float64(result.MaintainabilityCommentWeight.Counter) } + if result.MaintainabilityPerMethod.Counter > 0 { + result.MaintainabilityPerMethod.Avg = result.MaintainabilityPerMethod.Sum / float64(result.MaintainabilityPerMethod.Counter) + } + if result.MaintainabilityPerMethodWithoutComments.Counter > 0 { + result.MaintainabilityPerMethodWithoutComments.Avg = result.MaintainabilityPerMethodWithoutComments.Sum / float64(result.MaintainabilityPerMethodWithoutComments.Counter) + } + if result.MaintainabilityCommentWeightPerMethod.Counter > 0 { + result.MaintainabilityCommentWeightPerMethod.Avg = result.MaintainabilityCommentWeightPerMethod.Sum / float64(result.MaintainabilityCommentWeightPerMethod.Counter) + } // afferent coupling if result.Instability.Counter > 0 { diff --git a/src/Analyzer/Aggregator_test.go b/src/Analyzer/Aggregator_test.go index 9926485..b4ca2c5 100644 --- a/src/Analyzer/Aggregator_test.go +++ b/src/Analyzer/Aggregator_test.go @@ -12,71 +12,36 @@ func TestConsolidate(t *testing.T) { aggregator := Aggregator{} aggregated := Aggregated{ - NbMethods: 10, + MethodsPerClass: AggregateResult{Sum: 10, Counter: 5}, NbClasses: 5, NbClassesWithCode: 5, - AverageCyclomaticComplexityPerClass: 20, - AverageHalsteadDifficulty: 30, - AverageHalsteadEffort: 40, - AverageHalsteadVolume: 50, - AverageHalsteadTime: 60, - AverageLocPerMethod: 70, - AverageClocPerMethod: 80, - AverageLlocPerMethod: 90, - AverageMI: 100, - AverageMIwoc: 110, - AverageMIcw: 120, + CyclomaticComplexityPerClass: AggregateResult{Sum: 20, Counter: 5}, + HalsteadDifficulty: AggregateResult{Sum: 30, Counter: 5}, + HalsteadEffort: AggregateResult{Sum: 40, Counter: 5}, + HalsteadVolume: AggregateResult{Sum: 50, Counter: 5}, + HalsteadTime: AggregateResult{Sum: 60, Counter: 5}, + LocPerMethod: AggregateResult{Sum: 70, Counter: 10}, + ClocPerMethod: AggregateResult{Sum: 80, Counter: 10}, + LlocPerMethod: AggregateResult{Sum: 90, Counter: 10}, + MaintainabilityIndex: AggregateResult{Sum: 100, Counter: 5}, + MaintainabilityIndexWithoutComments: AggregateResult{Sum: 110, Counter: 5}, } - aggregator.consolidate(&aggregated) + aggregated = aggregator.reduceMetrics(aggregated) + + assert.Equal(t, float64(2), aggregated.MethodsPerClass.Avg, "Should have 2 methods per class") + assert.Equal(t, float64(10), aggregated.MethodsPerClass.Sum, "Should have 10 methods per class sum") + assert.Equal(t, float64(4), aggregated.CyclomaticComplexityPerClass.Avg, "Should have 4 cyclomatic complexity per class") + assert.Equal(t, float64(6), aggregated.HalsteadDifficulty.Avg, "Should have 6 halstead difficulty") + assert.Equal(t, float64(8), aggregated.HalsteadEffort.Avg, "Should have 8 halstead effort") + assert.Equal(t, float64(10), aggregated.HalsteadVolume.Avg, "Should have 10 halstead volume") + assert.Equal(t, float64(12), aggregated.HalsteadTime.Avg, "Should have 12 halstead time") + assert.Equal(t, float64(7), aggregated.LocPerMethod.Avg, "Should have 7 loc per method") + assert.Equal(t, float64(8), aggregated.ClocPerMethod.Avg, "Should have 8 cloc per method") + assert.Equal(t, float64(9), aggregated.LlocPerMethod.Avg, "Should have 9 lloc per method") + assert.Equal(t, float64(20), aggregated.MaintainabilityIndex.Avg, "Should have 20 maintainability index") + assert.Equal(t, float64(22), aggregated.MaintainabilityIndexWithoutComments.Avg, "Should have 22 maintainability index without comments") - if aggregated.AverageMethodsPerClass != 2 { - t.Errorf("Expected 2, got %f", aggregated.AverageMethodsPerClass) - } - - if aggregated.AverageCyclomaticComplexityPerClass != 4 { - t.Errorf("Expected 4, got %f", aggregated.AverageCyclomaticComplexityPerClass) - } - - if aggregated.AverageHalsteadDifficulty != 6 { - t.Errorf("Expected 6, got %f", aggregated.AverageHalsteadDifficulty) - } - - if aggregated.AverageHalsteadEffort != 8 { - t.Errorf("Expected 8, got %f", aggregated.AverageHalsteadEffort) - } - - if aggregated.AverageHalsteadVolume != 10 { - t.Errorf("Expected 10, got %f", aggregated.AverageHalsteadVolume) - } - - if aggregated.AverageHalsteadTime != 12 { - t.Errorf("Expected 12, got %f", aggregated.AverageHalsteadTime) - } - - if aggregated.AverageLocPerMethod != 7 { - t.Errorf("Expected 7, got %f", aggregated.AverageLocPerMethod) - } - - if aggregated.AverageClocPerMethod != 8 { - t.Errorf("Expected 8, got %f", aggregated.AverageClocPerMethod) - } - - if aggregated.AverageLlocPerMethod != 9 { - t.Errorf("Expected 9, got %f", aggregated.AverageLlocPerMethod) - } - - if aggregated.AverageMI != 20 { - t.Errorf("Expected 20, got %f", aggregated.AverageMI) - } - - if aggregated.AverageMIwoc != 22 { - t.Errorf("Expected 22, got %f", aggregated.AverageMIwoc) - } - - if aggregated.AverageMIcw != 24 { - t.Errorf("Expected 24, got %f", aggregated.AverageMIcw) - } } func TestCalculate(t *testing.T) { @@ -120,35 +85,18 @@ func TestCalculate(t *testing.T) { Path: "test.foo", } aggregated := Aggregated{} - aggregator.calculateSums(&file, &aggregated) + aggregated = aggregator.mapSums(&file, aggregated) aggregated.ConcernedFiles = []*pb.File{ &file, } - aggregator.consolidate(&aggregated) - - if aggregated.NbMethods != 2 { - t.Errorf("Expected 2, got %d", aggregated.NbMethods) - } - - if aggregated.NbClasses != 3 { - t.Errorf("Expected 3 classes, got %d", aggregated.NbClasses) - } - - if aggregated.AverageCyclomaticComplexityPerMethod != 15 { - t.Errorf("Expected AverageCyclomaticComplexityPerMethod, got %f", aggregated.AverageCyclomaticComplexityPerMethod) - } - - if aggregated.Loc != 100 { - t.Errorf("Expected 100, got %d", aggregated.Loc) - } - - if aggregated.Cloc != 200 { - t.Errorf("Expected 200, got %d", aggregated.Cloc) - } - - if aggregated.Lloc != 300 { - t.Errorf("Expected 300, got %d", aggregated.Lloc) - } + aggregated = aggregator.reduceMetrics(aggregated) + + assert.Equal(t, 2, aggregated.NbMethods, "Should have 2 methods") + assert.Equal(t, 3, aggregated.NbClasses, "Should have 3 classes") + assert.Equal(t, float64(15), aggregated.CyclomaticComplexityPerMethod.Avg, "Should have 15 average cyclomatic complexity per method") + assert.Equal(t, float64(100), aggregated.Loc.Avg, "Should have 100 loc") + assert.Equal(t, float64(200), aggregated.Cloc.Avg, "Should have 200 cloc") + assert.Equal(t, float64(300), aggregated.Lloc.Avg, "Should have 300 lloc") }) } @@ -404,70 +352,37 @@ func TestAggregates(t *testing.T) { // Call the Aggregates method projectAggregated := aggregator.Aggregates() + result := projectAggregated.Combined // Check that the returned ProjectAggregated struct has the expected values - if projectAggregated.ByFile.NbFiles != 3 { - t.Errorf("Expected 3 files, got %d", projectAggregated.ByFile.NbFiles) - } + assert.Equal(t, 3, result.NbFiles, "Should have 3 files") // Checks on Combined aggregate - if projectAggregated.ByClass.NbClasses != 10 { - t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) - } + assert.Equal(t, 10, projectAggregated.ByClass.NbClasses, "Should have 10 classes") - if projectAggregated.Combined.NbClasses != 10 { - t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) - } + assert.Equal(t, 5, result.NbMethods, "Should have 5 methods") - if projectAggregated.Combined.NbMethods != 5 { - t.Errorf("Expected 5 methods, got %d", projectAggregated.Combined.NbMethods) - } - - if projectAggregated.Combined.AverageCyclomaticComplexityPerMethod != 30 { - t.Errorf("Expected AverageCyclomaticComplexityPerMethod 30, got %f", projectAggregated.Combined.AverageCyclomaticComplexityPerMethod) - } + assert.Equal(t, float64(30), result.CyclomaticComplexityPerMethod.Avg, "Should have 30 average cyclomatic complexity per method") - if int(projectAggregated.Combined.AverageMI) != 94 { - t.Errorf("Expected MI of 94 for all files, got %v", int(projectAggregated.Combined.AverageMI)) - } + assert.Equal(t, 94, int(result.MaintainabilityIndex.Avg), "Should have 94 average maintainability index") // Check on Go aggregate - if projectAggregated.ByProgrammingLanguage["Go"].NbClasses != 9 { - t.Errorf("Expected 9 classes, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbClasses) - } + assert.Equal(t, 9, projectAggregated.ByProgrammingLanguage["Go"].NbClasses, "Should have 9 classes") - if projectAggregated.ByProgrammingLanguage["Go"].NbMethods != 4 { - t.Errorf("Expected 4 methods in Go, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbMethods) - } + assert.Equal(t, 4, projectAggregated.ByProgrammingLanguage["Go"].NbMethods, "Should have 4 methods in Go") - if projectAggregated.ByProgrammingLanguage["Go"].NbFiles != 2 { - t.Errorf("Expected 2 Go files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) - } + assert.Equal(t, 2, projectAggregated.ByProgrammingLanguage["Go"].NbFiles, "Should have 2 Go files") - if int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI) != 91 { - t.Errorf("Expected MI of 91 for Go files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) - } + assert.Equal(t, 91, int(projectAggregated.ByProgrammingLanguage["Go"].MaintainabilityIndex.Avg), "Should have 91 average maintainability index for Go files") // Check on Php aggregate - if projectAggregated.ByProgrammingLanguage["Php"].NbClasses != 1 { - t.Errorf("Expected 1 class, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbClasses) - } + assert.Equal(t, 1, projectAggregated.ByProgrammingLanguage["Php"].NbClasses, "Should have 1 class") - if projectAggregated.ByProgrammingLanguage["Php"].NbMethods != 1 { - t.Errorf("Expected 1 methods in PHP, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbMethods) - } + assert.Equal(t, 1, projectAggregated.ByProgrammingLanguage["Php"].NbMethods, "Should have 1 methods in PHP") - if projectAggregated.ByProgrammingLanguage["Php"].NbFiles != 1 { - t.Errorf("Expected 1 PHP files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) - } + assert.Equal(t, 1, projectAggregated.ByProgrammingLanguage["Php"].NbFiles, "Should have 1 PHP files") - if projectAggregated.ByProgrammingLanguage["Php"].AverageMI != 120 { - t.Errorf("Expected MI of 120 for PHP files, got %f", projectAggregated.ByProgrammingLanguage["Php"].AverageMI) - } - - if int(projectAggregated.ByProgrammingLanguage["Php"].AverageMI) != 120 { - t.Errorf("Expected MI of 120 for PHP files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) - } + assert.Equal(t, 120, int(projectAggregated.ByProgrammingLanguage["Php"].MaintainabilityIndex.Avg), "Should have 120 average maintainability index for PHP files") }) } @@ -504,25 +419,12 @@ func TestCalculateMaintainabilityIndex(t *testing.T) { } aggregated := Aggregated{} - aggregator.calculateSums(&file, &aggregated) - aggregator.consolidate(&aggregated) + aggregated = aggregator.mapSums(&file, aggregated) + aggregated = aggregator.reduceMetrics(aggregated) - if aggregated.AverageMI != 22.5 { - t.Errorf("Expected 22.5, got %f", aggregated.AverageMI) - } - - if aggregated.AverageMIwoc != 27.5 { - t.Errorf("Expected 27.5, got %f", aggregated.AverageMIwoc) - } - - if aggregated.AverageMIcw != 32.5 { - t.Errorf("Expected 32.5, got %f", aggregated.AverageMIcw) - } - - // Average per method - if aggregated.AverageMIPerMethod != 22.5 { - t.Errorf("Expected AverageMIPerMethod, got %f", aggregated.AverageMIPerMethod) - } + assert.Equal(t, float64(22.5), aggregated.MaintainabilityIndex.Avg, "Should have 22.5 average maintainability index") + assert.Equal(t, float64(27.5), aggregated.MaintainabilityIndexWithoutComments.Avg, "Should have 27.5 average maintainability index without comments") + assert.Equal(t, float64(22.5), aggregated.MaintainabilityPerMethod.Avg, "Should have 22.5 average maintainability index per method") }) } diff --git a/src/Analyzer/AstAnalyzer.go b/src/Analyzer/AstAnalyzer.go index a634553..5d80372 100644 --- a/src/Analyzer/AstAnalyzer.go +++ b/src/Analyzer/AstAnalyzer.go @@ -5,6 +5,7 @@ import ( "runtime" "strconv" "sync" + "sync/atomic" Complexity "github.com/halleck45/ast-metrics/src/Analyzer/Complexity" Component "github.com/halleck45/ast-metrics/src/Analyzer/Component" @@ -33,10 +34,10 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi // https://stackoverflow.com/questions/58743038/why-does-this-goroutine-not-call-wg-done channelResult := make(chan *pb.File, len(astFiles)) - nbParsingFiles := 0 + var nbParsingFiles atomic.Uint64 + // analyze each AST file running the runAnalysis function numWorkers := runtime.NumCPU() - mu := sync.Mutex{} filesChan := make(chan string, numWorkers) for i := 0; i < numWorkers; i++ { @@ -44,13 +45,11 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi for file := range filesChan { go func(file string) { defer wg.Done() - mu.Lock() - nbParsingFiles++ - mu.Unlock() + nbParsingFiles.Add(1) executeFileAnalysis(file, channelResult) - details := strconv.Itoa(nbParsingFiles) + "/" + strconv.Itoa(len(astFiles)) + details := strconv.Itoa(int(nbParsingFiles.Load())) + "/" + strconv.Itoa(len(astFiles)) if progressbar != nil { progressbar.UpdateText("Analyzing (" + details + ")") From 176f491bee02d167838bf1036435295f83e1b44e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Sat, 30 Nov 2024 07:49:54 +0100 Subject: [PATCH 12/16] fixed nil conditions --- src/Analyzer/Aggregator copy.go.php | 996 ---------------------------- src/Analyzer/Aggregator.go | 12 +- src/Report/HtmlReportGenerator.go | 3 + 3 files changed, 14 insertions(+), 997 deletions(-) delete mode 100644 src/Analyzer/Aggregator copy.go.php diff --git a/src/Analyzer/Aggregator copy.go.php b/src/Analyzer/Aggregator copy.go.php deleted file mode 100644 index 77473f6..0000000 --- a/src/Analyzer/Aggregator copy.go.php +++ /dev/null @@ -1,996 +0,0 @@ -package AnalyzerOld - -import ( - "math" - "os" - "regexp" - "runtime" - "sync" - - "github.com/halleck45/ast-metrics/src/Engine" - pb "github.com/halleck45/ast-metrics/src/NodeType" - "github.com/halleck45/ast-metrics/src/Scm" -) - -type ProjectAggregated struct { - ByFile Aggregated - ByClass Aggregated - Combined Aggregated - ByProgrammingLanguage map[string]Aggregated - ErroredFiles []*pb.File - Evaluation *EvaluationResult - Comparaison *ProjectComparaison -} - -type AggregateResult struct { - Sum float64 - Min float64 - Max float64 - Avg float64 - Counter int -} - -func NewAggregateResult() AggregateResult { - return AggregateResult{ - Sum: 0, - Min: 0, - Max: 0, - Avg: 0, - Counter: 0, - } -} - -type Aggregated struct { - ProgrammingLanguages map[string]int - ConcernedFiles []*pb.File - Comparaison *Comparaison - // hashmap of classes, just with the qualified name, used for afferent coupling calculation - ClassesAfferentCoupling map[string]int - NbFiles int - NbFunctions int - NbClasses int - NbClassesWithCode int - NbMethods int - Loc AggregateResult - Cloc AggregateResult - Lloc AggregateResult - MethodsPerClass AggregateResult - LocPerClass AggregateResult - LocPerMethod AggregateResult - ClocPerMethod AggregateResult - CyclomaticComplexityPerMethod AggregateResult - CyclomaticComplexityPerClass AggregateResult - HalsteadEffort AggregateResult - HalsteadVolume AggregateResult - HalsteadTime AggregateResult - HalsteadBugs AggregateResult - MaintainabilityIndex AggregateResult - MaintainabilityIndexWithoutComments AggregateResult - MaintainabilityCommentWeight AggregateResult - Instability AggregateResult - EfferentCoupling AggregateResult - AfferentCoupling AggregateResult - MaintainabilityPerMethod AggregateResult - MaintainabilityPerMethodWithoutComments AggregateResult - MaintainabilityCommentWeightPerMethod AggregateResult - CommitCountForPeriod int - CommittedFilesCountForPeriod int - BusFactor int - TopCommitters []TopCommitter - ResultOfGitAnalysis []ResultOfGitAnalysis - PackageRelations map[string]map[string]int // counter of dependencies. Ex: A -> B -> 2 -} - -type ProjectComparaison struct { - ByFile Comparaison - ByClass Comparaison - Combined Comparaison - ByProgrammingLanguage map[string]Comparaison -} - -type Aggregator struct { - files []*pb.File - projectAggregated ProjectAggregated - analyzers []AggregateAnalyzer - gitSummaries []ResultOfGitAnalysis - ComparedFiles []*pb.File - ComparedBranch string -} - -type TopCommitter struct { - Name string - Count int -} - -type ResultOfGitAnalysis struct { - ProgrammingLanguage string - ReportRootDir string - CountCommits int - CountCommiters int - CountCommitsForLanguage int - CountCommitsIgnored int - GitRepository Scm.GitRepository -} - -func NewAggregator(files []*pb.File, gitSummaries []ResultOfGitAnalysis) *Aggregator { - return &Aggregator{ - files: files, - gitSummaries: gitSummaries, - } -} - -type AggregateAnalyzer interface { - Calculate(aggregate *Aggregated) -} - -func newAggregated() Aggregated { - return Aggregated{ - ProgrammingLanguages: make(map[string]int), - ConcernedFiles: make([]*pb.File, 0), - ClassesAfferentCoupling: make(map[string]int), - NbClasses: 0, - NbClassesWithCode: 0, - NbMethods: 0, - NbFunctions: 0, - Loc: NewAggregateResult(), - MethodsPerClass: NewAggregateResult(), - LocPerClass: NewAggregateResult(), - LocPerMethod: NewAggregateResult(), - ClocPerMethod: NewAggregateResult(), - CyclomaticComplexityPerMethod: NewAggregateResult(), - CyclomaticComplexityPerClass: NewAggregateResult(), - HalsteadEffort: NewAggregateResult(), - HalsteadVolume: NewAggregateResult(), - HalsteadTime: NewAggregateResult(), - HalsteadBugs: NewAggregateResult(), - MaintainabilityIndex: NewAggregateResult(), - MaintainabilityIndexWithoutComments: NewAggregateResult(), - MaintainabilityCommentWeight: NewAggregateResult(), - Instability: NewAggregateResult(), - EfferentCoupling: NewAggregateResult(), - AfferentCoupling: NewAggregateResult(), - MaintainabilityPerMethod: NewAggregateResult(), - MaintainabilityPerMethodWithoutComments: NewAggregateResult(), - MaintainabilityCommentWeightPerMethod: NewAggregateResult(), - CommitCountForPeriod: 0, - CommittedFilesCountForPeriod: 0, - BusFactor: 0, - TopCommitters: make([]TopCommitter, 0), - ResultOfGitAnalysis: nil, - PackageRelations: make(map[string]map[string]int), - } -} - -func (r *Aggregator) Aggregates() ProjectAggregated { - - // We create a new aggregated object for each type of aggregation - r.projectAggregated = r.executeAggregationOnFiles(r.files) - - // Do the same for the comparaison files (if needed) - if r.ComparedFiles != nil { - comparaidAggregated := r.executeAggregationOnFiles(r.ComparedFiles) - - // Compare - comparaison := ProjectComparaison{} - comparator := NewComparator(r.ComparedBranch) - comparaison.Combined = comparator.Compare(r.projectAggregated.Combined, comparaidAggregated.Combined) - r.projectAggregated.Combined.Comparaison = &comparaison.Combined - - comparaison.ByClass = comparator.Compare(r.projectAggregated.ByClass, comparaidAggregated.ByClass) - r.projectAggregated.ByClass.Comparaison = &comparaison.ByClass - - comparaison.ByFile = comparator.Compare(r.projectAggregated.ByFile, comparaidAggregated.ByFile) - r.projectAggregated.ByFile.Comparaison = &comparaison.ByFile - - // By language - comparaison.ByProgrammingLanguage = make(map[string]Comparaison) - for lng, byLanguage := range r.projectAggregated.ByProgrammingLanguage { - if _, ok := comparaidAggregated.ByProgrammingLanguage[lng]; !ok { - continue - } - c := comparator.Compare(byLanguage, comparaidAggregated.ByProgrammingLanguage[lng]) - comparaison.ByProgrammingLanguage[lng] = c - - // assign to the original object (slow, but otherwise we need to change the whole structure ByProgrammingLanguage map) - // @see https://stackoverflow.com/questions/42605337/cannot-assign-to-struct-field-in-a-map - // Feel free to change this - entry := r.projectAggregated.ByProgrammingLanguage[lng] - entry.Comparaison = &c - r.projectAggregated.ByProgrammingLanguage[lng] = entry - } - r.projectAggregated.Comparaison = &comparaison - } - - return r.projectAggregated -} - -func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregated { - - // do the sums. Group files by number of processors - var wg sync.WaitGroup - numberOfProcessors := runtime.NumCPU() - - // Split the files into chunks - chunkSize := len(files) / numberOfProcessors - chunks := make([][]*pb.File, numberOfProcessors) - for i := 0; i < numberOfProcessors; i++ { - start := i * chunkSize - end := start + chunkSize - if i == numberOfProcessors-1 { - end = len(files) - } - chunks[i] = files[start:end] - } - - // Prepare results - aggregateByFileChunk := newAggregated() - aggregateByClassChunk := newAggregated() - - // for each programming language, we create a separeted result - aggregateByLanguageChunk := make(map[string]Aggregated) - for _, file := range files { - if file.ProgrammingLanguage == "" { - continue - } - if _, ok := aggregateByLanguageChunk[file.ProgrammingLanguage]; !ok { - aggregateByLanguageChunk[file.ProgrammingLanguage] = newAggregated() - } - } - - // Create channels for the results - resultsByClass := make(chan *Aggregated, numberOfProcessors) - resultsByFile := make(chan *Aggregated, numberOfProcessors) - resultsByProgrammingLanguage := make(chan map[string]Aggregated, numberOfProcessors) - - // Process each chunk of files - chunkIndex := 0 - for i := 0; i < numberOfProcessors; i++ { - - wg.Add(1) - - // Reduce results : we want to get sums, and to count calculated values into a AggregateResult - go func(files []*pb.File) { - defer wg.Done() - - // the process deal with its own chunk - for _, file := range files { - aggregateByFileChunk = r.mapSums(file, aggregateByFileChunk) - aggregateByClassChunk = r.mapSums(file, aggregateByClassChunk) - aggregateByLanguageChunk[file.ProgrammingLanguage] = r.mapSums(file, aggregateByLanguageChunk[file.ProgrammingLanguage]) - } - - // Send the result to the channels - resultsByClass <- aggregateByClassChunk - resultsByFile <- aggregateByFileChunk - resultsByProgrammingLanguage <- aggregateByLanguageChunk - - }(chunks[chunkIndex]) - chunkIndex++ - } - - wg.Wait() - close(resultsByClass) - close(resultsByFile) - close(resultsByProgrammingLanguage) - - // Now we have chunk of sums. We want to reduce its into a single object - aggregatedByClass := newAggregated() - for chunk := range resultsByClass { - aggregatedByClass = r.calculateSums(aggregatedByClass, chunk) - } - - // @todo : en parallèle, et traiter les par fichier, par programming language - - // Now we have sums. We want to reduce metrics and get the averages - - // en parallèle, on traite le cas la complexité cyclomatique - // group 1: 4, 4, 5 - // group 2: 6, 1, 1 - - // sum = 0 - // for each group - // sum += group - - wg.Wait() - os.Exit(0) - - // We create a new aggregated object for each type of aggregation - // ByFile, ByClass, Combined - projectAggregated := ProjectAggregated{} - projectAggregated.ByFile = newAggregated() - projectAggregated.ByClass = newAggregated() - projectAggregated.Combined = newAggregated() - - // Count files - projectAggregated.ByClass.NbFiles = len(files) - projectAggregated.ByFile.NbFiles = len(files) - projectAggregated.Combined.NbFiles = len(files) - - // Prepare errors - projectAggregated.ErroredFiles = make([]*pb.File, 0) - - for _, file := range files { - - // Files with errors - if file.Errors != nil && len(file.Errors) > 0 { - projectAggregated.ErroredFiles = append(projectAggregated.ErroredFiles, file) - } - - if file.Stmts == nil { - continue - } - - // By language - if projectAggregated.ByProgrammingLanguage == nil { - projectAggregated.ByProgrammingLanguage = make(map[string]Aggregated) - } - if _, ok := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage]; !ok { - projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = newAggregated() - - } - byLanguage := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] - byLanguage.NbFiles++ - - // Prepare structure of file, in orger to avoid to checking its type in the future - - // Make calculations: sums of metrics, etc. - var wg sync.WaitGroup - wg.Add(4) - - go func() { - defer wg.Done() - localFile := file - r.calculateSums(localFile, &projectAggregated.ByFile) - }() - - go func() { - defer wg.Done() - localFile := file - r.calculateSums(localFile, &projectAggregated.ByClass) - }() - - go func() { - defer wg.Done() - localFile := file - r.calculateSums(localFile, &projectAggregated.Combined) - }() - - go func() { - defer wg.Done() - localFile := file - r.calculateSums(localFile, &byLanguage) - }() - - wg.Wait() - projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = byLanguage - } - - // Consolidate averages - var wg sync.WaitGroup - wg.Add(3) - - go func() { - defer wg.Done() - r.consolidate(&projectAggregated.ByFile) - }() - - go func() { - defer wg.Done() - r.consolidate(&projectAggregated.ByClass) - }() - - go func() { - defer wg.Done() - r.consolidate(&projectAggregated.Combined) - }() - - // by language - wg.Add(len(projectAggregated.ByProgrammingLanguage)) - for lng, byLanguage := range projectAggregated.ByProgrammingLanguage { - go func(language string, langAggregated Aggregated) { - defer wg.Done() - r.consolidate(&langAggregated) - projectAggregated.ByProgrammingLanguage[language] = langAggregated - }(lng, byLanguage) - } - wg.Wait() - - // Risks - riskAnalyzer := NewRiskAnalyzer() - riskAnalyzer.Analyze(projectAggregated) - - return projectAggregated -} - -// Consolidate the aggregated data -func (r *Aggregator) consolidate(aggregated *Aggregated) { - - if aggregated.NbClasses > 0 { - aggregated.AverageMethodsPerClass = float64(aggregated.NbMethods) / float64(aggregated.NbClasses) - aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float64(aggregated.NbClasses) - } else { - aggregated.AverageMethodsPerClass = 0 - aggregated.AverageCyclomaticComplexityPerClass = 0 - } - - if aggregated.AverageMI > 0 { - aggregated.AverageMI = aggregated.AverageMI / float64(aggregated.NbClasses) - aggregated.AverageMIwoc = aggregated.AverageMIwoc / float64(aggregated.NbClasses) - aggregated.AverageMIcw = aggregated.AverageMIcw / float64(aggregated.NbClasses) - } - - if aggregated.AverageInstability > 0 { - aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float64(aggregated.NbClasses) - aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float64(aggregated.NbClasses) - } - - if aggregated.NbMethods > 0 { - aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float64(aggregated.NbMethods) - aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float64(aggregated.NbClasses) - aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float64(aggregated.NbClasses) - aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float64(aggregated.NbClasses) - aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float64(aggregated.NbClasses) - aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float64(aggregated.NbClasses) - } - - // if langage without classes - if aggregated.NbClasses == 0 { - aggregated.AverageMI = aggregated.AverageMIPerMethod - aggregated.AverageMIwoc = aggregated.AverageMIwocPerMethod - aggregated.AverageMIcw = aggregated.AverageMIcwPerMethod - aggregated.AverageInstability = 0 - aggregated.AverageEfferentCoupling = 0 - aggregated.AverageAfferentCoupling = 0 - } - - // Total locs: increment loc of each file - aggregated.Loc = 0 - aggregated.Cloc = 0 - aggregated.Lloc = 0 - - reg := regexp.MustCompile("[^A-Za-z0-9.]+") - - for _, file := range aggregated.ConcernedFiles { - - if file.LinesOfCode == nil { - return - } - - aggregated.Loc += int(file.LinesOfCode.LinesOfCode) - aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) - aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) - - // Create local variables for file processing - localFile := &pb.File{ - Stmts: file.Stmts, - } - - // Calculate alternate MI using average MI per method when file has no class - if len(localFile.Stmts.StmtClass) == 0 { - if localFile.Stmts.Analyze.Maintainability == nil { - localFile.Stmts.Analyze.Maintainability = &pb.Maintainability{} - } - - methods := file.Stmts.StmtFunction - if len(methods) == 0 { - return - } - averageForFile := float64(0) - for _, method := range methods { - if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { - continue - } - averageForFile += float64(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) - } - averageForFile = averageForFile / float64(len(methods)) - localFile.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile - - // Update the original file with processed data - file.Stmts = localFile.Stmts - } - - // LOC of file is the sum of all classes and methods - // That's useful when we navigate over the files instead of the classes - zero := int32(0) - loc := int32(0) - lloc := int32(0) - cloc := int32(0) - - if file.Stmts.Analyze.Volume == nil { - file.Stmts.Analyze.Volume = &pb.Volume{ - Lloc: &zero, - Cloc: &zero, - Loc: &zero, - } - } - - classes := Engine.GetClassesInFile(file) - functions := file.Stmts.StmtFunction - - // Initialize file complexity if needed - if file.Stmts.Analyze.Complexity.Cyclomatic == nil { - file.Stmts.Analyze.Complexity.Cyclomatic = &zero - } - - // Process functions - for _, function := range functions { - // Handle LOC - if function.LinesOfCode != nil { - loc += function.LinesOfCode.LinesOfCode - lloc += function.LinesOfCode.LogicalLinesOfCode - cloc += function.LinesOfCode.CommentLinesOfCode - } - - // Handle complexity - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { - *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic - } - } - - // Process classes - for _, class := range classes { - // Handle LOC - if class.LinesOfCode != nil { - loc += class.LinesOfCode.LinesOfCode - lloc += class.LinesOfCode.LogicalLinesOfCode - cloc += class.LinesOfCode.CommentLinesOfCode - } - - // Handle coupling - if class.Stmts != nil && class.Stmts.Analyze != nil { - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - class.Stmts.Analyze.Coupling.Afferent = 0 - - if class.Name != nil { - // if in hashmap - if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { - class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent - } - - // instability - if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { - instability := float64(class.Stmts.Analyze.Coupling.Efferent) / float64(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) - class.Stmts.Analyze.Coupling.Instability = instability - aggregated.AverageInstability += instability - } - } - } - } - - file.Stmts.Analyze.Volume.Loc = &loc - file.Stmts.Analyze.Volume.Lloc = &lloc - file.Stmts.Analyze.Volume.Cloc = &cloc - - dependencies := file.Stmts.StmtExternalDependencies - - for _, dependency := range dependencies { - if dependency == nil { - continue - } - - namespaceTo := dependency.Namespace - namespaceFrom := dependency.From - - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - // Keep only 2 levels in namespace - separator := reg.FindString(namespaceFrom) - parts := reg.Split(namespaceTo, -1) - if len(parts) > 2 { - namespaceTo = parts[0] + separator + parts[1] - } - - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - parts = reg.Split(namespaceFrom, -1) - if len(parts) > 2 { - namespaceFrom = parts[0] + separator + parts[1] - } - - // if same, continue - if namespaceFrom == namespaceTo { - continue - } - - // if root namespace, continue - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - // create the map if not exists - if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { - aggregated.PackageRelations[namespaceFrom] = make(map[string]int) - } - - if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { - aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 - } - - // increment the counter - aggregated.PackageRelations[namespaceFrom][namespaceTo]++ - } - } - - // Consolidate - aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) - - // Count commits for the period based on `ResultOfGitAnalysis` data - aggregated.ResultOfGitAnalysis = r.gitSummaries - if aggregated.ResultOfGitAnalysis != nil { - for _, result := range aggregated.ResultOfGitAnalysis { - aggregated.CommitCountForPeriod += result.CountCommitsForLanguage - } - } - - // Bus factor and other metrics based on aggregated data - for _, analyzer := range r.analyzers { - analyzer.Calculate(aggregated) - } -} - -// Add an analyzer to the aggregator -// You can add multiple analyzers. See the example of RiskAnalyzer -func (r *Aggregator) WithAggregateAnalyzer(analyzer AggregateAnalyzer) { - r.analyzers = append(r.analyzers, analyzer) -} - -func (r *Aggregator) WithComparaison(allResultsCloned []*pb.File, comparedBranch string) { - r.ComparedFiles = allResultsCloned - r.ComparedBranch = comparedBranch -} - -// Calculate the aggregated data -func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregated) { - classes := Engine.GetClassesInFile(file) - functions := Engine.GetFunctionsInFile(file) - - if specificAggregation.ConcernedFiles == nil { - specificAggregation.ConcernedFiles = make([]*pb.File, 0) - } - - specificAggregation.ConcernedFiles = append(specificAggregation.ConcernedFiles, file) - - // Number of classes - specificAggregation.NbClasses += len(classes) - - // Prepare the file for analysis - if file.Stmts == nil { - return - } - - if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { - file.LinesOfCode = &pb.LinesOfCode{ - LinesOfCode: *file.Stmts.Analyze.Volume.Loc, - CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, - LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, - } - } - - // Functions - for _, function := range functions { - - if function == nil || function.Stmts == nil { - continue - } - - specificAggregation.NbMethods++ - - // Average cyclomatic complexity per method - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { - if function.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) - } - } - - // Average maintainability index per method - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Maintainability != nil { - if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMIPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex - specificAggregation.AverageMIwocPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - specificAggregation.AverageMIcwPerMethod += *function.Stmts.Analyze.Maintainability.CommentWeight - } - } - // average lines of code per method - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { - if function.Stmts.Analyze.Volume.Loc != nil { - specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) - } - if function.Stmts.Analyze.Volume.Cloc != nil { - specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) - } - if function.Stmts.Analyze.Volume.Lloc != nil { - specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) - } - } - } - - for _, class := range classes { - - if class == nil || class.Stmts == nil { - continue - } - - // Number of classes with code - //if class.LinesOfCode != nil && class.LinesOfCode.LinesOfCode > 0 { - specificAggregation.NbClassesWithCode++ - //} - - // Maintainability Index - if class.Stmts.Analyze.Maintainability != nil { - if class.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMI += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex - specificAggregation.AverageMIwoc += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - specificAggregation.AverageMIcw += *class.Stmts.Analyze.Maintainability.CommentWeight - } - } - - // Coupling - if class.Stmts.Analyze.Coupling != nil { - specificAggregation.AverageInstability += class.Stmts.Analyze.Coupling.Instability - specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) - specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) - } - - // cyclomatic complexity per class - if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) - if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { - specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) - } - if specificAggregation.MaxCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.MaxCyclomaticComplexity { - specificAggregation.MaxCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) - } - } - - // Halstead - if class.Stmts.Analyze.Volume != nil { - if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty)) { - specificAggregation.AverageHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty - specificAggregation.SumHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty - } - if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadEffort)) { - specificAggregation.AverageHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort - specificAggregation.SumHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort - } - if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadVolume)) { - specificAggregation.AverageHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume - specificAggregation.SumHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume - } - if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadTime)) { - specificAggregation.AverageHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime - specificAggregation.SumHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime - } - } - - // Coupling - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - class.Stmts.Analyze.Coupling.Efferent = 0 - uniqueDependencies := make(map[string]bool) - for _, dependency := range class.Stmts.StmtExternalDependencies { - dependencyName := dependency.ClassName - - // check if dependency is already in hashmap - if _, ok := specificAggregation.ClassesAfferentCoupling[dependencyName]; !ok { - specificAggregation.ClassesAfferentCoupling[dependencyName] = 0 - } - specificAggregation.ClassesAfferentCoupling[dependencyName]++ - - // check if dependency is unique - if _, ok := uniqueDependencies[dependencyName]; !ok { - uniqueDependencies[dependencyName] = true - } - } - - class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) - - // Add dependencies to file - if file.Stmts.Analyze.Coupling == nil { - file.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - if file.Stmts.StmtExternalDependencies == nil { - file.Stmts.StmtExternalDependencies = make([]*pb.StmtExternalDependency, 0) - } - - file.Stmts.Analyze.Coupling.Efferent += class.Stmts.Analyze.Coupling.Efferent - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent - file.Stmts.StmtExternalDependencies = append(file.Stmts.StmtExternalDependencies, class.Stmts.StmtExternalDependencies...) - } - - // consolidate coupling for file - if len(classes) > 0 && file.Stmts.Analyze.Coupling != nil { - file.Stmts.Analyze.Coupling.Efferent = file.Stmts.Analyze.Coupling.Efferent / int32(len(classes)) - file.Stmts.Analyze.Coupling.Afferent = file.Stmts.Analyze.Coupling.Afferent / int32(len(classes)) - } - -} - -func (r *Aggregator) mapSums(file *pb.File, specificAggregation *Aggregated) *Aggregated { - classes := Engine.GetClassesInFile(file) - functions := Engine.GetFunctionsInFile(file) - - specificAggregation.ConcernedFiles = append(specificAggregation.ConcernedFiles, file) - - // Number of classes - specificAggregation.NbClasses += len(classes) - - // Prepare the file for analysis - if file.Stmts == nil { - return - } - - if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { - file.LinesOfCode = &pb.LinesOfCode{ - LinesOfCode: *file.Stmts.Analyze.Volume.Loc, - CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, - LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, - } - } - - // Functions - for _, function := range functions { - - if function == nil || function.Stmts == nil { - continue - } - - specificAggregation.NbMethods++ - - // Average cyclomatic complexity per method - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { - if function.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) - } - } - - // Average maintainability index per method - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Maintainability != nil { - if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMIPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex - specificAggregation.AverageMIwocPerMethod += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - specificAggregation.AverageMIcwPerMethod += *function.Stmts.Analyze.Maintainability.CommentWeight - } - } - // average lines of code per method - if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { - if function.Stmts.Analyze.Volume.Loc != nil { - specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) - } - if function.Stmts.Analyze.Volume.Cloc != nil { - specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) - } - if function.Stmts.Analyze.Volume.Lloc != nil { - specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) - } - } - } - - for _, class := range classes { - - if class == nil || class.Stmts == nil { - continue - } - - // Number of classes with code - //if class.LinesOfCode != nil && class.LinesOfCode.LinesOfCode > 0 { - specificAggregation.NbClassesWithCode++ - //} - - // Maintainability Index - if class.Stmts.Analyze.Maintainability != nil { - if class.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMI += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex - specificAggregation.AverageMIwoc += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - specificAggregation.AverageMIcw += *class.Stmts.Analyze.Maintainability.CommentWeight - } - } - - // Coupling - if class.Stmts.Analyze.Coupling != nil { - specificAggregation.AverageInstability += class.Stmts.Analyze.Coupling.Instability - specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) - specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) - } - - // cyclomatic complexity per class - if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) - if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { - specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) - } - if specificAggregation.MaxCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.MaxCyclomaticComplexity { - specificAggregation.MaxCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) - } - } - - // Halstead - if class.Stmts.Analyze.Volume != nil { - if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty)) { - specificAggregation.AverageHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty - specificAggregation.SumHalsteadDifficulty += *class.Stmts.Analyze.Volume.HalsteadDifficulty - } - if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadEffort)) { - specificAggregation.AverageHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort - specificAggregation.SumHalsteadEffort += *class.Stmts.Analyze.Volume.HalsteadEffort - } - if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadVolume)) { - specificAggregation.AverageHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume - specificAggregation.SumHalsteadVolume += *class.Stmts.Analyze.Volume.HalsteadVolume - } - if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadTime)) { - specificAggregation.AverageHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime - specificAggregation.SumHalsteadTime += *class.Stmts.Analyze.Volume.HalsteadTime - } - } - - // Coupling - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - class.Stmts.Analyze.Coupling.Efferent = 0 - uniqueDependencies := make(map[string]bool) - for _, dependency := range class.Stmts.StmtExternalDependencies { - dependencyName := dependency.ClassName - - // check if dependency is already in hashmap - if _, ok := specificAggregation.ClassesAfferentCoupling[dependencyName]; !ok { - specificAggregation.ClassesAfferentCoupling[dependencyName] = 0 - } - specificAggregation.ClassesAfferentCoupling[dependencyName]++ - - // check if dependency is unique - if _, ok := uniqueDependencies[dependencyName]; !ok { - uniqueDependencies[dependencyName] = true - } - } - - class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) - - // Add dependencies to file - if file.Stmts.Analyze.Coupling == nil { - file.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - if file.Stmts.StmtExternalDependencies == nil { - file.Stmts.StmtExternalDependencies = make([]*pb.StmtExternalDependency, 0) - } - - file.Stmts.Analyze.Coupling.Efferent += class.Stmts.Analyze.Coupling.Efferent - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent - file.Stmts.StmtExternalDependencies = append(file.Stmts.StmtExternalDependencies, class.Stmts.StmtExternalDependencies...) - } - - // consolidate coupling for file - if len(classes) > 0 && file.Stmts.Analyze.Coupling != nil { - file.Stmts.Analyze.Coupling.Efferent = file.Stmts.Analyze.Coupling.Efferent / int32(len(classes)) - file.Stmts.Analyze.Coupling.Afferent = file.Stmts.Analyze.Coupling.Afferent / int32(len(classes)) - } -} diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index bcbbc6d..9c12fee 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -824,6 +824,10 @@ func (r *Aggregator) mapCoupling(aggregated *Aggregated) Aggregated { for _, class := range classes { + if class == nil { + continue + } + // dependencies dependencies := file.Stmts.StmtExternalDependencies @@ -878,7 +882,6 @@ func (r *Aggregator) mapCoupling(aggregated *Aggregated) Aggregated { result.PackageRelations[namespaceFrom][namespaceTo]++ } - class.Stmts.Analyze.Coupling.Efferent = 0 uniqueDependencies := make(map[string]bool) for _, dependency := range class.Stmts.StmtExternalDependencies { dependencyName := dependency.ClassName @@ -894,6 +897,13 @@ func (r *Aggregator) mapCoupling(aggregated *Aggregated) Aggregated { uniqueDependencies[dependencyName] = true } } + + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) } } diff --git a/src/Report/HtmlReportGenerator.go b/src/Report/HtmlReportGenerator.go index 917a1b2..d43eb33 100644 --- a/src/Report/HtmlReportGenerator.go +++ b/src/Report/HtmlReportGenerator.go @@ -261,6 +261,9 @@ func (v *HtmlReportGenerator) RegisterFilters() { // Sort by risk of file files := in.Interface().([]*pb.File) sort.Slice(files, func(i, j int) bool { + if files[i].Stmts == nil && files[j].Stmts == nil { + return false + } if files[i].Stmts.Analyze.Risk == nil && files[j].Stmts.Analyze.Risk == nil { return false From 655b2bc71bff4b6a721fc42105870753c531dc8a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Sat, 30 Nov 2024 08:04:36 +0100 Subject: [PATCH 13/16] fixes coupling --- src/Analyzer/Aggregator.go | 52 +++++++++++++++++++++++++++++++------- 1 file changed, 43 insertions(+), 9 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 9c12fee..11f3429 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -164,6 +164,15 @@ func newAggregated() Aggregated { } } +// This method is the main entry point to get the aggregated data +// It will: +// - chunk the files by number of processors, to speed up the process +// - map the files to the aggregated object with sums +// - reduce the sums to get the averages +// - map the coupling +// - run the risk analysis +// +// it also computes the comparaison if the compared files are set func (r *Aggregator) Aggregates() ProjectAggregated { // We create a new aggregated object for each type of aggregation @@ -372,11 +381,13 @@ func (r *Aggregator) WithAggregateAnalyzer(analyzer AggregateAnalyzer) { r.analyzers = append(r.analyzers, analyzer) } +// Set the files and branch to compare with func (r *Aggregator) WithComparaison(allResultsCloned []*pb.File, comparedBranch string) { r.ComparedFiles = allResultsCloned r.ComparedBranch = comparedBranch } +// Map the sums of a file to the aggregated object func (r *Aggregator) mapSums(file *pb.File, specificAggregation Aggregated) Aggregated { // copy the specific aggregation to new object to avoid side effects result := specificAggregation @@ -580,8 +591,11 @@ func (r *Aggregator) mapSums(file *pb.File, specificAggregation Aggregated) Aggr result.EfferentCoupling.Counter++ result.AfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Afferent) result.AfferentCoupling.Counter++ - result.Instability.Sum += float64(class.Stmts.Analyze.Coupling.Instability) - result.Instability.Counter++ + + // Instability for class + if class.Stmts.Analyze.Coupling.Efferent > 0 { + class.Stmts.Analyze.Coupling.Instability = float64(class.Stmts.Analyze.Coupling.Efferent) / float64(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) + } } // cyclomatic complexity per class @@ -650,6 +664,7 @@ func (r *Aggregator) mapSums(file *pb.File, specificAggregation Aggregated) Aggr return result } +// Merge the chunks of files to get the aggregated data (sums) func (r *Aggregator) mergeChunks(aggregated Aggregated, chunk *Aggregated) Aggregated { result := aggregated @@ -696,9 +711,6 @@ func (r *Aggregator) mergeChunks(aggregated Aggregated, chunk *Aggregated) Aggre result.MaintainabilityCommentWeight.Sum += chunk.MaintainabilityCommentWeight.Sum result.MaintainabilityCommentWeight.Counter += chunk.MaintainabilityCommentWeight.Counter - result.Instability.Sum += chunk.Instability.Sum - result.Instability.Counter += chunk.Instability.Counter - result.EfferentCoupling.Sum += chunk.EfferentCoupling.Sum result.EfferentCoupling.Counter += chunk.EfferentCoupling.Counter result.AfferentCoupling.Sum += chunk.AfferentCoupling.Sum @@ -724,6 +736,7 @@ func (r *Aggregator) mergeChunks(aggregated Aggregated, chunk *Aggregated) Aggre return result } +// Reduce the sums to get the averages func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { // here we reduce metrics by averaging them result := aggregated @@ -788,10 +801,6 @@ func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { result.MaintainabilityCommentWeightPerMethod.Avg = result.MaintainabilityCommentWeightPerMethod.Sum / float64(result.MaintainabilityCommentWeightPerMethod.Counter) } - // afferent coupling - if result.Instability.Counter > 0 { - result.Instability.Avg = result.Instability.Sum / float64(result.Instability.Counter) - } if result.EfferentCoupling.Counter > 0 { result.EfferentCoupling.Avg = result.EfferentCoupling.Sum / float64(result.EfferentCoupling.Counter) } @@ -799,6 +808,12 @@ func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { result.AfferentCoupling.Avg = result.AfferentCoupling.Sum / float64(result.AfferentCoupling.Counter) } + // afferent coupling + // Ce / (Ce + Ca) + if result.AfferentCoupling.Counter > 0 { + result.Instability.Avg = result.EfferentCoupling.Sum / result.AfferentCoupling.Sum + } + // Count commits for the period based on `ResultOfGitAnalysis` data result.ResultOfGitAnalysis = r.gitSummaries if result.ResultOfGitAnalysis != nil { @@ -815,6 +830,7 @@ func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { return result } +// Map the coupling to get the package relations and the afferent coupling func (r *Aggregator) mapCoupling(aggregated *Aggregated) Aggregated { result := *aggregated reg := regexp.MustCompile("[^A-Za-z0-9.]+") @@ -905,7 +921,25 @@ func (r *Aggregator) mapCoupling(aggregated *Aggregated) Aggregated { } } class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) + + // Afferent coupling + class.Stmts.Analyze.Coupling.Afferent = int32(len(class.Stmts.StmtExternalDependencies)) + + // Increment result + result.EfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Efferent) + result.EfferentCoupling.Counter++ + result.AfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Afferent) + result.AfferentCoupling.Counter++ } } + + // Afferent coupling + // Ce / (Ce + Ca) + if result.AfferentCoupling.Counter > 0 { + result.Instability.Avg = result.EfferentCoupling.Sum / result.AfferentCoupling.Sum + } + result.EfferentCoupling.Avg = result.EfferentCoupling.Sum / float64(result.EfferentCoupling.Counter) + result.AfferentCoupling.Avg = result.AfferentCoupling.Sum / float64(result.AfferentCoupling.Counter) + return result } From d2694babc5071d97185675b073df08ea7cf11eeb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Sat, 30 Nov 2024 08:16:26 +0100 Subject: [PATCH 14/16] fix loc diagram for no oop languages --- src/Report/HtmlReportGenerator.go | 7 ++++++- src/Report/templates/html/componentChartRadiusBarLoc.html | 2 +- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/Report/HtmlReportGenerator.go b/src/Report/HtmlReportGenerator.go index d43eb33..778611e 100644 --- a/src/Report/HtmlReportGenerator.go +++ b/src/Report/HtmlReportGenerator.go @@ -248,6 +248,11 @@ func (v *HtmlReportGenerator) RegisterFilters() { } json = json[:len(json)-1] + "]" + if json == "]" { + // occurs when no relations are found + json = "[]" + } + return pongo2.AsSafeValue(json), nil }) @@ -261,7 +266,7 @@ func (v *HtmlReportGenerator) RegisterFilters() { // Sort by risk of file files := in.Interface().([]*pb.File) sort.Slice(files, func(i, j int) bool { - if files[i].Stmts == nil && files[j].Stmts == nil { + if files[i].Stmts == nil && files[j].Stmts == nil || files[i].Stmts.Analyze == nil || files[j].Stmts.Analyze == nil { return false } diff --git a/src/Report/templates/html/componentChartRadiusBarLoc.html b/src/Report/templates/html/componentChartRadiusBarLoc.html index a289c40..bffe7a2 100644 --- a/src/Report/templates/html/componentChartRadiusBarLoc.html +++ b/src/Report/templates/html/componentChartRadiusBarLoc.html @@ -5,7 +5,7 @@ {%- if not fileHasClasses(file) -%} {% set elements = file|convertOneFileToCollection -%} {% set name = file.Path %} - {% set value = file.Stmts.Analyze.Volume.Loc %} + {% set value = file.LinesOfCode.LinesOfCode %} {%- else %} {% set elements = file.Stmts.StmtClass -%} {% set name = class.Name.Qualified -%} From 83097db0e8b9a24e423f1cc6751be381f2cad66d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jean-Fran=C3=A7ois=20L=C3=A9pine?= Date: Sat, 30 Nov 2024 08:29:48 +0100 Subject: [PATCH 15/16] fixes coupling chart for programming languages view --- src/Analyzer/Aggregator.go | 3 ++- .../templates/html/componentChartRadiusBarAfferent.html | 6 +++++- .../templates/html/componentChartRadiusBarEfferent.html | 8 ++++++-- 3 files changed, 13 insertions(+), 4 deletions(-) diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 11f3429..0040a8c 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -357,7 +357,8 @@ func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregat projectAggregated.ByFile = r.reduceMetrics(projectAggregated.ByFile) for k, v := range projectAggregated.ByProgrammingLanguage { v = r.reduceMetrics(v) - projectAggregated.ByProgrammingLanguage[k] = v + f := r.mapCoupling(&v) + projectAggregated.ByProgrammingLanguage[k] = f } // Coupling (should be done separately, to avoid race condition) diff --git a/src/Report/templates/html/componentChartRadiusBarAfferent.html b/src/Report/templates/html/componentChartRadiusBarAfferent.html index 898679d..5fd6030 100644 --- a/src/Report/templates/html/componentChartRadiusBarAfferent.html +++ b/src/Report/templates/html/componentChartRadiusBarAfferent.html @@ -1,5 +1,6 @@