diff --git a/Makefile b/Makefile index 9183ae6..e304a79 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -.PHONY: install build +.PHONY: install build monkey-test PROTOC_VERSION=24.4 ARCHITECTURE=linux-x86_64 @@ -41,4 +41,10 @@ test: go clean -testcache find . -type d -iname ".ast-metrics-cache" -exec rm -rf "{}" \; || true go test ./... - @echo "\e[34m\033[1mDONE \033[0m\e[39m\n" \ No newline at end of file + @echo "\e[34m\033[1mDONE \033[0m\e[39m\n" + +# monkey test: download random PHP and Go packages from top 100 and analyze them +monkey-test: + @echo "\e[34m\033[1m-> Monkey testing\033[0m\e[39m\n" + bash scripts/monkey-test.sh + @echo "\e[34m\033[1mDONE \033[0m\e[39m\n" diff --git a/proto/NodeType.proto b/proto/NodeType.proto index 759bc1b..b7e4e68 100644 --- a/proto/NodeType.proto +++ b/proto/NodeType.proto @@ -209,16 +209,16 @@ message Volume { optional int32 cloc = 3; optional int32 halsteadVocabulary = 4; optional int32 halsteadLength = 5; - optional float halsteadVolume = 6; - optional float halsteadDifficulty = 7; - optional float halsteadEffort = 8; - optional float halsteadTime = 9; - optional float halsteadEstimatedLength = 10; + optional double halsteadVolume = 6; + optional double halsteadDifficulty = 7; + optional double halsteadEffort = 8; + optional double halsteadTime = 9; + optional double halsteadEstimatedLength = 10; } message Maintainability { - optional float maintainabilityIndex = 1; - optional float maintainabilityIndexWithoutComments = 2; - optional float commentWeight = 3; + optional double maintainabilityIndex = 1; + optional double maintainabilityIndexWithoutComments = 2; + optional double commentWeight = 3; } // ------------------------------------ @@ -239,7 +239,7 @@ message Commit { // -- Risk // ------------------------------------ message Risk { - float score = 1; // score of risk. Lower is better + double score = 1; // score of risk. Lower is better } // ------------------------------------ @@ -248,5 +248,5 @@ message Risk { message Coupling { int32 afferent = 1; // number of classes that depends on this class int32 efferent = 2; // number of classes that this class depends on - float instability = 3; // instability of the class + double instability = 3; // instability of the class } \ No newline at end of file diff --git a/scripts/monkey-test.sh b/scripts/monkey-test.sh new file mode 100644 index 0000000..7f5ae27 --- /dev/null +++ b/scripts/monkey-test.sh @@ -0,0 +1,72 @@ +set -e + +# number of packages to download +PACKAGES_COUNT=100 + +workdir=$(mktemp -d) +echo "Working in $workdir" +if [ -z "$workdir" ]; then + echo "Workdir not found" + exit 1 +fi + +# cleanup reports +rm -f ast-metrics-report.json + + +# sort TOP packages randomly +url="https://packagist.org/explore/popular.json?per_page=100" +# shuffle 100 packages +packages=$(curl -s $url | jq -r '.packages[].name' | shuf) +# take only $PACKAGES_COUNT packages +packages=$(echo "$packages" | head -n $PACKAGES_COUNT) + +echo "Downloading $PACKAGES_COUNT packages" +for package in $packages; +do + echo " Downloading $package" + repository=$(curl -s https://packagist.org/packages/$package.json | jq -r '.package.repository') + zipUrl="$repository/archive/refs/heads/master.zip" + # generate random name for destination + name=$(uuidgen) + destination="$workdir/$name" + echo " Downloading $zipUrl to $destination" + curl -s -L -o $destination.zip $zipUrl + + # if zip contains HTML, like "Just a moment...", then skip + if grep -q " /dev/null + rm $destination.zip +done + +echo "Analyzing $workdir" +time go run . analyze --ci $workdir + +# Ensure that report is generated +if [ ! -f ast-metrics-report.json ]; then + echo "Report not generated" + exit 1 +else + echo "Report generated" +fi + + +# Count number of analyzed files +# | **PHP** | 122.0 K | 🟢 112 | 1.21 | 12 | +line=$(cat build/report.md |grep '**PHP**'|head -n 1) +separator="|" +linesOfCode=$(echo $line | awk -F "$separator" '{print $3}') +echo "Analyzed $linesOfCode lines of code" + + +echo "Done" \ No newline at end of file diff --git a/src/Analyzer/Aggregator.go b/src/Analyzer/Aggregator.go index 4b29659..574eeb0 100644 --- a/src/Analyzer/Aggregator.go +++ b/src/Analyzer/Aggregator.go @@ -3,6 +3,8 @@ package Analyzer import ( "math" "regexp" + "runtime" + "sync" "github.com/halleck45/ast-metrics/src/Engine" pb "github.com/halleck45/ast-metrics/src/NodeType" @@ -19,52 +21,67 @@ type ProjectAggregated struct { Comparaison *ProjectComparaison } +type AggregateResult struct { + Sum float64 + Min float64 + Max float64 + Avg float64 + Counter int +} + +func NewAggregateResult() AggregateResult { + return AggregateResult{ + Sum: 0, + Min: 0, + Max: 0, + Avg: 0, + Counter: 0, + } +} + type Aggregated struct { - ConcernedFiles []*pb.File - Comparaison *Comparaison + ProgrammingLanguages map[string]int + ConcernedFiles []*pb.File + ErroredFiles []*pb.File + Comparaison *Comparaison // hashmap of classes, just with the qualified name, used for afferent coupling calculation - ClassesAfferentCoupling map[string]int - NbFiles int - NbFunctions int - NbClasses int - NbClassesWithCode int - NbMethods int - Loc int - Cloc int - Lloc int - AverageMethodsPerClass float64 - AverageLocPerMethod float64 - AverageLlocPerMethod float64 - AverageClocPerMethod float64 - AverageCyclomaticComplexityPerMethod float64 - AverageCyclomaticComplexityPerClass float64 - MinCyclomaticComplexity int - MaxCyclomaticComplexity int - AverageHalsteadDifficulty float64 - AverageHalsteadEffort float64 - AverageHalsteadVolume float64 - AverageHalsteadTime float64 - AverageHalsteadBugs float64 - SumHalsteadDifficulty float64 - SumHalsteadEffort float64 - SumHalsteadVolume float64 - SumHalsteadTime float64 - SumHalsteadBugs float64 - AverageMI float64 - AverageMIwoc float64 - AverageMIcw float64 - AverageMIPerMethod float64 - AverageMIwocPerMethod float64 - AverageMIcwPerMethod float64 - AverageAfferentCoupling float64 - AverageEfferentCoupling float64 - AverageInstability float64 - CommitCountForPeriod int - CommittedFilesCountForPeriod int // for example if one commit concerns 10 files, it will be 10 - BusFactor int - TopCommitters []TopCommitter - ResultOfGitAnalysis []ResultOfGitAnalysis - PackageRelations map[string]map[string]int // counter of dependencies. Ex: A -> B -> 2 + ClassesAfferentCoupling map[string]int + NbFiles int + NbFunctions int + NbClasses int + NbClassesWithCode int + NbMethods int + Loc AggregateResult + Cloc AggregateResult + Lloc AggregateResult + MethodsPerClass AggregateResult + LocPerClass AggregateResult + LocPerMethod AggregateResult + LlocPerMethod AggregateResult + ClocPerMethod AggregateResult + CyclomaticComplexity AggregateResult + CyclomaticComplexityPerMethod AggregateResult + CyclomaticComplexityPerClass AggregateResult + HalsteadDifficulty AggregateResult + HalsteadEffort AggregateResult + HalsteadVolume AggregateResult + HalsteadTime AggregateResult + HalsteadBugs AggregateResult + MaintainabilityIndex AggregateResult + MaintainabilityIndexWithoutComments AggregateResult + MaintainabilityCommentWeight AggregateResult + Instability AggregateResult + EfferentCoupling AggregateResult + AfferentCoupling AggregateResult + MaintainabilityPerMethod AggregateResult + MaintainabilityPerMethodWithoutComments AggregateResult + MaintainabilityCommentWeightPerMethod AggregateResult + CommitCountForPeriod int + CommittedFilesCountForPeriod int + BusFactor int + TopCommitters []TopCommitter + ResultOfGitAnalysis []ResultOfGitAnalysis + PackageRelations map[string]map[string]int // counter of dependencies. Ex: A -> B -> 2 } type ProjectComparaison struct { @@ -111,47 +128,53 @@ type AggregateAnalyzer interface { func newAggregated() Aggregated { return Aggregated{ - ConcernedFiles: make([]*pb.File, 0), - ClassesAfferentCoupling: make(map[string]int), - NbClasses: 0, - NbClassesWithCode: 0, - NbMethods: 0, - NbFunctions: 0, - Loc: 0, - Cloc: 0, - Lloc: 0, - AverageLocPerMethod: 0, - AverageLlocPerMethod: 0, - AverageClocPerMethod: 0, - AverageCyclomaticComplexityPerMethod: 0, - AverageCyclomaticComplexityPerClass: 0, - MinCyclomaticComplexity: 0, - MaxCyclomaticComplexity: 0, - AverageHalsteadDifficulty: 0, - AverageHalsteadEffort: 0, - AverageHalsteadVolume: 0, - AverageHalsteadTime: 0, - AverageHalsteadBugs: 0, - SumHalsteadDifficulty: 0, - SumHalsteadEffort: 0, - SumHalsteadVolume: 0, - SumHalsteadTime: 0, - SumHalsteadBugs: 0, - AverageMI: 0, - AverageMIwoc: 0, - AverageMIcw: 0, - AverageMIPerMethod: 0, - AverageMIwocPerMethod: 0, - AverageAfferentCoupling: 0, - AverageEfferentCoupling: 0, - AverageInstability: 0, - AverageMIcwPerMethod: 0, - CommitCountForPeriod: 0, - ResultOfGitAnalysis: nil, - PackageRelations: make(map[string]map[string]int), + ProgrammingLanguages: make(map[string]int), + ConcernedFiles: make([]*pb.File, 0), + ClassesAfferentCoupling: make(map[string]int), + ErroredFiles: make([]*pb.File, 0), + NbClasses: 0, + NbClassesWithCode: 0, + NbMethods: 0, + NbFunctions: 0, + Loc: NewAggregateResult(), + MethodsPerClass: NewAggregateResult(), + LocPerClass: NewAggregateResult(), + LocPerMethod: NewAggregateResult(), + ClocPerMethod: NewAggregateResult(), + CyclomaticComplexity: NewAggregateResult(), + CyclomaticComplexityPerMethod: NewAggregateResult(), + CyclomaticComplexityPerClass: NewAggregateResult(), + HalsteadEffort: NewAggregateResult(), + HalsteadVolume: NewAggregateResult(), + HalsteadTime: NewAggregateResult(), + HalsteadBugs: NewAggregateResult(), + MaintainabilityIndex: NewAggregateResult(), + MaintainabilityIndexWithoutComments: NewAggregateResult(), + MaintainabilityCommentWeight: NewAggregateResult(), + Instability: NewAggregateResult(), + EfferentCoupling: NewAggregateResult(), + AfferentCoupling: NewAggregateResult(), + MaintainabilityPerMethod: NewAggregateResult(), + MaintainabilityPerMethodWithoutComments: NewAggregateResult(), + MaintainabilityCommentWeightPerMethod: NewAggregateResult(), + CommitCountForPeriod: 0, + CommittedFilesCountForPeriod: 0, + BusFactor: 0, + TopCommitters: make([]TopCommitter, 0), + ResultOfGitAnalysis: nil, + PackageRelations: make(map[string]map[string]int), } } +// This method is the main entry point to get the aggregated data +// It will: +// - chunk the files by number of processors, to speed up the process +// - map the files to the aggregated object with sums +// - reduce the sums to get the averages +// - map the coupling +// - run the risk analysis +// +// it also computes the comparaison if the compared files are set func (r *Aggregator) Aggregates() ProjectAggregated { // We create a new aggregated object for each type of aggregation @@ -197,303 +220,162 @@ func (r *Aggregator) Aggregates() ProjectAggregated { func (r *Aggregator) executeAggregationOnFiles(files []*pb.File) ProjectAggregated { - // We create a new aggregated object for each type of aggregation - // ByFile, ByClass, Combined - projectAggregated := ProjectAggregated{} - projectAggregated.ByFile = newAggregated() - projectAggregated.ByClass = newAggregated() - projectAggregated.Combined = newAggregated() - - // Count files - projectAggregated.ByClass.NbFiles = len(files) - projectAggregated.ByFile.NbFiles = len(files) - projectAggregated.Combined.NbFiles = len(files) - - // Prepare errors - projectAggregated.ErroredFiles = make([]*pb.File, 0) - - for _, file := range files { + projectAggregated := ProjectAggregated{ + ByFile: newAggregated(), + ByClass: newAggregated(), + Combined: newAggregated(), + ByProgrammingLanguage: make(map[string]Aggregated), + ErroredFiles: make([]*pb.File, 0), + Evaluation: nil, + Comparaison: nil, + } - // Files with errors - if file.Errors != nil && len(file.Errors) > 0 { - projectAggregated.ErroredFiles = append(projectAggregated.ErroredFiles, file) + // do the sums. Group files by number of processors + var wg sync.WaitGroup + numberOfProcessors := runtime.NumCPU() + + // Split the files into chunks + chunkSize := len(files) / numberOfProcessors + chunks := make([][]*pb.File, numberOfProcessors) + for i := 0; i < numberOfProcessors; i++ { + start := i * chunkSize + end := start + chunkSize + if i == numberOfProcessors-1 { + end = len(files) } + chunks[i] = files[start:end] + } - if file.Stmts == nil { + // for each programming language, we create a separeted result + aggregateByLanguageChunk := make(map[string]Aggregated) + for _, file := range files { + if file.ProgrammingLanguage == "" { continue } - - // By language - if projectAggregated.ByProgrammingLanguage == nil { - projectAggregated.ByProgrammingLanguage = make(map[string]Aggregated) + if _, ok := aggregateByLanguageChunk[file.ProgrammingLanguage]; !ok { + aggregateByLanguageChunk[file.ProgrammingLanguage] = newAggregated() } - if _, ok := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage]; !ok { - projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = newAggregated() - - } - byLanguage := projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] - byLanguage.NbFiles++ - - // Make calculations: sums of metrics, etc. - r.calculateSums(file, &projectAggregated.ByFile) - r.calculateSums(file, &projectAggregated.ByClass) - r.calculateSums(file, &projectAggregated.Combined) - r.calculateSums(file, &byLanguage) - projectAggregated.ByProgrammingLanguage[file.ProgrammingLanguage] = byLanguage - } - - // Consolidate averages - r.consolidate(&projectAggregated.ByFile) - r.consolidate(&projectAggregated.ByClass) - r.consolidate(&projectAggregated.Combined) - - // by language - for lng, byLanguage := range projectAggregated.ByProgrammingLanguage { - r.consolidate(&byLanguage) - projectAggregated.ByProgrammingLanguage[lng] = byLanguage - } - - // Risks - riskAnalyzer := NewRiskAnalyzer() - riskAnalyzer.Analyze(projectAggregated) - - return projectAggregated -} - -// Consolidate the aggregated data -func (r *Aggregator) consolidate(aggregated *Aggregated) { - - if aggregated.NbClasses > 0 { - aggregated.AverageMethodsPerClass = float64(aggregated.NbMethods) / float64(aggregated.NbClasses) - aggregated.AverageCyclomaticComplexityPerClass = aggregated.AverageCyclomaticComplexityPerClass / float64(aggregated.NbClasses) - } else { - aggregated.AverageMethodsPerClass = 0 - aggregated.AverageCyclomaticComplexityPerClass = 0 - } - - if aggregated.AverageMI > 0 { - aggregated.AverageMI = aggregated.AverageMI / float64(aggregated.NbClasses) - aggregated.AverageMIwoc = aggregated.AverageMIwoc / float64(aggregated.NbClasses) - aggregated.AverageMIcw = aggregated.AverageMIcw / float64(aggregated.NbClasses) - } - - if aggregated.AverageInstability > 0 { - aggregated.AverageEfferentCoupling = aggregated.AverageEfferentCoupling / float64(aggregated.NbClasses) - aggregated.AverageAfferentCoupling = aggregated.AverageAfferentCoupling / float64(aggregated.NbClasses) - } - - if aggregated.NbMethods > 0 { - aggregated.AverageLocPerMethod = aggregated.AverageLocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageClocPerMethod = aggregated.AverageClocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageLlocPerMethod = aggregated.AverageLlocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageCyclomaticComplexityPerMethod = aggregated.AverageCyclomaticComplexityPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIPerMethod = aggregated.AverageMIPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIwocPerMethod = aggregated.AverageMIwocPerMethod / float64(aggregated.NbMethods) - aggregated.AverageMIcwPerMethod = aggregated.AverageMIcwPerMethod / float64(aggregated.NbMethods) - aggregated.AverageHalsteadDifficulty = aggregated.AverageHalsteadDifficulty / float64(aggregated.NbClasses) - aggregated.AverageHalsteadEffort = aggregated.AverageHalsteadEffort / float64(aggregated.NbClasses) - aggregated.AverageHalsteadVolume = aggregated.AverageHalsteadVolume / float64(aggregated.NbClasses) - aggregated.AverageHalsteadTime = aggregated.AverageHalsteadTime / float64(aggregated.NbClasses) - aggregated.AverageHalsteadBugs = aggregated.AverageHalsteadBugs / float64(aggregated.NbClasses) - } - - // if langage without classes - if aggregated.NbClasses == 0 { - aggregated.AverageMI = aggregated.AverageMIPerMethod - aggregated.AverageMIwoc = aggregated.AverageMIwocPerMethod - aggregated.AverageMIcw = aggregated.AverageMIcwPerMethod - aggregated.AverageInstability = 0 - aggregated.AverageEfferentCoupling = 0 - aggregated.AverageAfferentCoupling = 0 } - // Total locs: increment loc of each file - aggregated.Loc = 0 - aggregated.Cloc = 0 - aggregated.Lloc = 0 + // Create channels for the results + resultsByClass := make(chan *Aggregated, numberOfProcessors) + resultsByFile := make(chan *Aggregated, numberOfProcessors) + resultsByProgrammingLanguage := make(chan *map[string]Aggregated, numberOfProcessors) - for _, file := range aggregated.ConcernedFiles { + // Deadlock prevention + mu := sync.Mutex{} - if file.LinesOfCode == nil { - continue - } + // Process each chunk of files + // Please ensure that there is no data race here. If needed, use the mutex + chunkIndex := 0 + for i := 0; i < numberOfProcessors; i++ { - aggregated.Loc += int(file.LinesOfCode.LinesOfCode) - aggregated.Cloc += int(file.LinesOfCode.CommentLinesOfCode) - aggregated.Lloc += int(file.LinesOfCode.LogicalLinesOfCode) + wg.Add(1) - // Calculate alternate MI using average MI per method when file has no class - if file.Stmts.StmtClass == nil || len(file.Stmts.StmtClass) == 0 { - if file.Stmts.Analyze.Maintainability == nil { - file.Stmts.Analyze.Maintainability = &pb.Maintainability{} - } + // Reduce results : we want to get sums, and to count calculated values into a AggregateResult + go func(files []*pb.File) { + defer wg.Done() - methods := file.Stmts.StmtFunction - if methods == nil || len(methods) == 0 { - continue + if len(files) == 0 { + return } - averageForFile := float32(0) - for _, method := range methods { - if method.Stmts.Analyze == nil || method.Stmts.Analyze.Maintainability == nil { - continue - } - averageForFile += float32(*method.Stmts.Analyze.Maintainability.MaintainabilityIndex) - } - averageForFile = averageForFile / float32(len(methods)) - file.Stmts.Analyze.Maintainability.MaintainabilityIndex = &averageForFile - } - // LOC of file is the sum of all classes and methods - // That's useful when we navigate over the files instead of the classes - zero := int32(0) - loc := int32(0) - lloc := int32(0) - cloc := int32(0) - - if file.Stmts.Analyze.Volume == nil { - file.Stmts.Analyze.Volume = &pb.Volume{ - Lloc: &zero, - Cloc: &zero, - Loc: &zero, + // Prepare results + aggregateByFileChunk := newAggregated() + aggregateByClassChunk := newAggregated() + + // the process deal with its own chunk + for _, file := range files { + localFile := file + + // by file + result := r.mapSums(localFile, aggregateByFileChunk) + result.ConcernedFiles = append(result.ConcernedFiles, localFile) + aggregateByFileChunk = result + + // by class + result = r.mapSums(localFile, aggregateByClassChunk) + result.ConcernedFiles = append(result.ConcernedFiles, localFile) + aggregateByClassChunk = result + + // by language + mu.Lock() + byLanguage := r.mapSums(localFile, aggregateByLanguageChunk[localFile.ProgrammingLanguage]) + byLanguage.ConcernedFiles = append(byLanguage.ConcernedFiles, localFile) + aggregateByLanguageChunk[localFile.ProgrammingLanguage] = byLanguage + mu.Unlock() } - } - classes := Engine.GetClassesInFile(file) - functions := file.Stmts.StmtFunction - for _, class := range classes { - if class.LinesOfCode == nil { - continue - } - loc += class.LinesOfCode.LinesOfCode - lloc += class.LinesOfCode.LogicalLinesOfCode - cloc += class.LinesOfCode.CommentLinesOfCode - } - - for _, function := range functions { - if function.LinesOfCode == nil { - continue - } - loc += function.LinesOfCode.LinesOfCode - lloc += function.LinesOfCode.LogicalLinesOfCode - cloc += function.LinesOfCode.CommentLinesOfCode - } + // Send the result to the channels + resultsByClass <- &aggregateByClassChunk + resultsByFile <- &aggregateByFileChunk + resultsByProgrammingLanguage <- &aggregateByLanguageChunk - file.Stmts.Analyze.Volume.Loc = &loc - file.Stmts.Analyze.Volume.Lloc = &lloc - file.Stmts.Analyze.Volume.Cloc = &cloc + }(chunks[chunkIndex]) + chunkIndex++ + } - // File analysis should be the sum of all methods and classes in the file - // That's useful when we navigate over the files instead of the classes - if file.Stmts.Analyze.Complexity.Cyclomatic == nil { - file.Stmts.Analyze.Complexity.Cyclomatic = &zero + wg.Wait() + close(resultsByClass) + close(resultsByFile) + close(resultsByProgrammingLanguage) + + // Now we have chunk of sums. We want to reduce its into a single object + wg.Add(1) + go func() { + defer wg.Done() + for chunk := range resultsByClass { + r := r.mergeChunks(projectAggregated.ByClass, chunk) + projectAggregated.ByClass = r } - for _, function := range functions { - if function.Stmts.Analyze == nil || function.Stmts.Analyze.Complexity == nil { - continue - } - if function.Stmts.Analyze.Complexity != nil { - - *file.Stmts.Analyze.Complexity.Cyclomatic += *function.Stmts.Analyze.Complexity.Cyclomatic - } + }() + + wg.Add(1) + go func() { + defer wg.Done() + for chunk := range resultsByFile { + r := r.mergeChunks(projectAggregated.ByFile, chunk) + projectAggregated.ByFile = r } + }() - // Coupling - // Store relations, with counter - for _, class := range classes { - if class.Stmts == nil || class.Stmts.Analyze == nil { - continue - } - if class.Stmts.Analyze.Coupling == nil { - class.Stmts.Analyze.Coupling = &pb.Coupling{ - Efferent: 0, - Afferent: 0, - } - } - class.Stmts.Analyze.Coupling.Afferent = 0 - - if class.Name == nil { - // avoid nil pointer during tests - continue - } - - // if in hashmap - if _, ok := aggregated.ClassesAfferentCoupling[class.Name.Qualified]; ok { - class.Stmts.Analyze.Coupling.Afferent = int32(aggregated.ClassesAfferentCoupling[class.Name.Qualified]) - - file.Stmts.Analyze.Coupling.Afferent += class.Stmts.Analyze.Coupling.Afferent - } - - // instability - if class.Stmts.Analyze.Coupling.Afferent > 0 || class.Stmts.Analyze.Coupling.Efferent > 0 { - // Ce / (Ce + Ca) - instability := float32(class.Stmts.Analyze.Coupling.Efferent) / float32(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) - class.Stmts.Analyze.Coupling.Instability = instability + wg.Add(1) + go func() { + mu.Lock() + defer wg.Done() + defer mu.Unlock() - // to consolidate - aggregated.AverageInstability += float64(instability) + for chunk := range resultsByProgrammingLanguage { + for k, v := range *chunk { + projectAggregated.ByProgrammingLanguage[k] = v } } + }() - dependencies := file.Stmts.StmtExternalDependencies - - if dependencies != nil { - for _, dependency := range dependencies { - namespaceTo := dependency.Namespace - namespaceFrom := dependency.From - - // Keep only 2 levels in namespace - reg := regexp.MustCompile("[^A-Za-z0-9.]+") - separator := reg.FindString(namespaceFrom) - parts := reg.Split(namespaceTo, -1) - if len(parts) > 2 { - namespaceTo = parts[0] + separator + parts[1] - } - - parts = reg.Split(namespaceFrom, -1) - if len(parts) > 2 { - namespaceFrom = parts[0] + separator + parts[1] - } - - // if same, continue - if namespaceFrom == namespaceTo { - continue - } + wg.Wait() - // if root namespace, continue - if namespaceFrom == "" || namespaceTo == "" { - continue - } - - // create the map if not exists - if _, ok := aggregated.PackageRelations[namespaceFrom]; !ok { - aggregated.PackageRelations[namespaceFrom] = make(map[string]int) - } - - if _, ok := aggregated.PackageRelations[namespaceFrom][namespaceTo]; !ok { - aggregated.PackageRelations[namespaceFrom][namespaceTo] = 0 - } - - // increment the counter - aggregated.PackageRelations[namespaceFrom][namespaceTo]++ - } - } + // Now we have sums. We want to reduce metrics and get the averages + projectAggregated.ByClass = r.reduceMetrics(projectAggregated.ByClass) + projectAggregated.ByFile = r.reduceMetrics(projectAggregated.ByFile) + for k, v := range projectAggregated.ByProgrammingLanguage { + v = r.reduceMetrics(v) + f := r.mapCoupling(&v) + projectAggregated.ByProgrammingLanguage[k] = f } - // Consolidate - aggregated.AverageInstability = aggregated.AverageInstability / float64(aggregated.NbClasses) + // Coupling (should be done separately, to avoid race condition) + projectAggregated.ByClass = r.mapCoupling(&projectAggregated.ByClass) + projectAggregated.ByFile = r.mapCoupling(&projectAggregated.ByFile) - // Count commits for the period based on `ResultOfGitAnalysis` data - aggregated.ResultOfGitAnalysis = r.gitSummaries - if aggregated.ResultOfGitAnalysis != nil { - for _, result := range aggregated.ResultOfGitAnalysis { - aggregated.CommitCountForPeriod += result.CountCommitsForLanguage - } - } + // Risks + riskAnalyzer := NewRiskAnalyzer() + riskAnalyzer.Analyze(projectAggregated) - // Bus factor and other metrics based on aggregated data - for _, analyzer := range r.analyzers { - analyzer.Calculate(aggregated) - } + // For all languages + projectAggregated.Combined = projectAggregated.ByFile + projectAggregated.ErroredFiles = projectAggregated.ByFile.ErroredFiles + + return projectAggregated } // Add an analyzer to the aggregator @@ -502,53 +384,57 @@ func (r *Aggregator) WithAggregateAnalyzer(analyzer AggregateAnalyzer) { r.analyzers = append(r.analyzers, analyzer) } +// Set the files and branch to compare with func (r *Aggregator) WithComparaison(allResultsCloned []*pb.File, comparedBranch string) { r.ComparedFiles = allResultsCloned r.ComparedBranch = comparedBranch } -// Calculate the aggregated data -func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregated) { - classes := Engine.GetClassesInFile(file) - functions := Engine.GetFunctionsInFile(file) +// Map the sums of a file to the aggregated object +func (r *Aggregator) mapSums(file *pb.File, specificAggregation Aggregated) Aggregated { + // copy the specific aggregation to new object to avoid side effects + result := specificAggregation + result.NbFiles++ - if specificAggregation.ConcernedFiles == nil { - specificAggregation.ConcernedFiles = make([]*pb.File, 0) + // deal with errors + if len(file.Errors) > 0 { + result.ErroredFiles = append(result.ErroredFiles, file) + return result } - specificAggregation.ConcernedFiles = append(specificAggregation.ConcernedFiles, file) - - // Number of classes - specificAggregation.NbClasses += len(classes) - - // Prepare the file for analysis if file.Stmts == nil { - return + return result } - if file.Stmts.Analyze == nil { - file.Stmts.Analyze = &pb.Analyze{} - } + classes := Engine.GetClassesInFile(file) + functions := Engine.GetFunctionsInFile(file) - // lines of code (it should be done in the analayzer. This case occurs only in test, or when the analyzer has issue) - if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { - file.LinesOfCode = &pb.LinesOfCode{ - LinesOfCode: *file.Stmts.Analyze.Volume.Loc, - CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, - LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + // Number of classes + result.NbClasses += len(classes) + + // Ensure LOC is set + if file.LinesOfCode == nil { + if file.Stmts != nil && file.Stmts.Analyze != nil && file.Stmts.Analyze.Volume != nil { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: *file.Stmts.Analyze.Volume.Loc, + CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, + LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + } + } else { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: 0, + CommentLinesOfCode: 0, + LogicalLinesOfCode: 0, + } } } - // Prepare the file for analysis - if file.Stmts.Analyze == nil { - file.Stmts.Analyze = &pb.Analyze{} - } - if file.Stmts.Analyze.Complexity == nil { - zero := int32(0) - file.Stmts.Analyze.Complexity = &pb.Complexity{ - Cyclomatic: &zero, - } - } + result.Loc.Sum += float64(file.LinesOfCode.LinesOfCode) + result.Loc.Counter++ + result.Cloc.Sum += float64(file.LinesOfCode.CommentLinesOfCode) + result.Cloc.Counter++ + result.Lloc.Sum += float64(file.LinesOfCode.LogicalLinesOfCode) + result.Lloc.Counter++ // Functions for _, function := range functions { @@ -557,33 +443,121 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate continue } - specificAggregation.NbMethods++ + result.NbMethods++ // Average cyclomatic complexity per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Complexity != nil { if function.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerMethod += float64(*function.Stmts.Analyze.Complexity.Cyclomatic) + + // @todo: only for functions and methods of classes (not interfaces) + // otherwise, average may be lower than 1 + ccn := float64(*function.Stmts.Analyze.Complexity.Cyclomatic) + result.CyclomaticComplexityPerMethod.Sum += ccn + result.CyclomaticComplexityPerMethod.Counter++ + if specificAggregation.CyclomaticComplexityPerMethod.Min == 0 || ccn < specificAggregation.CyclomaticComplexityPerMethod.Min { + result.CyclomaticComplexityPerMethod.Min = ccn + } + if specificAggregation.CyclomaticComplexityPerMethod.Max == 0 || ccn > specificAggregation.CyclomaticComplexityPerMethod.Max { + result.CyclomaticComplexityPerMethod.Max = ccn + } + + result.CyclomaticComplexity.Sum += ccn + result.CyclomaticComplexity.Counter++ + if specificAggregation.CyclomaticComplexity.Min == 0 || ccn < specificAggregation.CyclomaticComplexity.Min { + result.CyclomaticComplexity.Min = ccn + } + if specificAggregation.CyclomaticComplexity.Max == 0 || ccn > specificAggregation.CyclomaticComplexity.Max { + result.CyclomaticComplexity.Max = ccn + } } } // Average maintainability index per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Maintainability != nil { if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMIPerMethod += float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex) - specificAggregation.AverageMIwocPerMethod += float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - specificAggregation.AverageMIcwPerMethod += float64(*function.Stmts.Analyze.Maintainability.CommentWeight) + result.MaintainabilityIndex.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + result.MaintainabilityIndex.Counter++ + if specificAggregation.MaintainabilityIndex.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex < specificAggregation.MaintainabilityIndex.Min { + result.MaintainabilityIndex.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + if specificAggregation.MaintainabilityIndex.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex > specificAggregation.MaintainabilityIndex.Max { + result.MaintainabilityIndex.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + } + + // Maintainability index without comments + if function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments)) { + result.MaintainabilityIndexWithoutComments.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + result.MaintainabilityIndexWithoutComments.Counter++ + if specificAggregation.MaintainabilityIndexWithoutComments.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments < specificAggregation.MaintainabilityIndexWithoutComments.Min { + result.MaintainabilityIndexWithoutComments.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + if specificAggregation.MaintainabilityIndexWithoutComments.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments > specificAggregation.MaintainabilityIndexWithoutComments.Max { + result.MaintainabilityIndexWithoutComments.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + } + + // Comment weight + if function.Stmts.Analyze.Maintainability.CommentWeight != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.CommentWeight)) { + result.MaintainabilityCommentWeight.Sum += *function.Stmts.Analyze.Maintainability.CommentWeight + result.MaintainabilityCommentWeight.Counter++ + if specificAggregation.MaintainabilityCommentWeight.Min == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight < specificAggregation.MaintainabilityCommentWeight.Min { + result.MaintainabilityCommentWeight.Min = *function.Stmts.Analyze.Maintainability.CommentWeight + } + if specificAggregation.MaintainabilityCommentWeight.Max == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight > specificAggregation.MaintainabilityCommentWeight.Max { + result.MaintainabilityCommentWeight.Max = *function.Stmts.Analyze.Maintainability.CommentWeight + } + } + + // Maintainability index per method + if function.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { + result.MaintainabilityPerMethod.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + result.MaintainabilityPerMethod.Counter++ + if specificAggregation.MaintainabilityPerMethod.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex < specificAggregation.MaintainabilityPerMethod.Min { + result.MaintainabilityPerMethod.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + if specificAggregation.MaintainabilityPerMethod.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndex > specificAggregation.MaintainabilityPerMethod.Max { + result.MaintainabilityPerMethod.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + } + + // Maintainability index per method without comments + if function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments)) { + result.MaintainabilityPerMethodWithoutComments.Sum += *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + result.MaintainabilityPerMethodWithoutComments.Counter++ + if specificAggregation.MaintainabilityPerMethodWithoutComments.Min == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments < specificAggregation.MaintainabilityPerMethodWithoutComments.Min { + result.MaintainabilityPerMethodWithoutComments.Min = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + if specificAggregation.MaintainabilityPerMethodWithoutComments.Max == 0 || *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments > specificAggregation.MaintainabilityPerMethodWithoutComments.Max { + result.MaintainabilityPerMethodWithoutComments.Max = *function.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + } + + // Comment weight per method + if function.Stmts.Analyze.Maintainability.CommentWeight != nil && !math.IsNaN(float64(*function.Stmts.Analyze.Maintainability.CommentWeight)) { + result.MaintainabilityCommentWeightPerMethod.Sum += *function.Stmts.Analyze.Maintainability.CommentWeight + result.MaintainabilityCommentWeightPerMethod.Counter++ + if specificAggregation.MaintainabilityCommentWeightPerMethod.Min == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight < specificAggregation.MaintainabilityCommentWeightPerMethod.Min { + result.MaintainabilityCommentWeightPerMethod.Min = *function.Stmts.Analyze.Maintainability.CommentWeight + } + if specificAggregation.MaintainabilityCommentWeightPerMethod.Max == 0 || *function.Stmts.Analyze.Maintainability.CommentWeight > specificAggregation.MaintainabilityCommentWeightPerMethod.Max { + result.MaintainabilityCommentWeightPerMethod.Max = *function.Stmts.Analyze.Maintainability.CommentWeight + } } } // average lines of code per method if function.Stmts.Analyze != nil && function.Stmts.Analyze.Volume != nil { if function.Stmts.Analyze.Volume.Loc != nil { - specificAggregation.AverageLocPerMethod += float64(*function.Stmts.Analyze.Volume.Loc) + result.LocPerMethod.Sum += float64(*function.Stmts.Analyze.Volume.Loc) + result.LocPerMethod.Counter++ } if function.Stmts.Analyze.Volume.Cloc != nil { - specificAggregation.AverageClocPerMethod += float64(*function.Stmts.Analyze.Volume.Cloc) + result.ClocPerMethod.Sum += float64(*function.Stmts.Analyze.Volume.Cloc) + result.ClocPerMethod.Counter++ } if function.Stmts.Analyze.Volume.Lloc != nil { - specificAggregation.AverageLlocPerMethod += float64(*function.Stmts.Analyze.Volume.Lloc) + result.LlocPerMethod.Sum += float64(*function.Stmts.Analyze.Volume.Lloc) + result.LlocPerMethod.Counter++ } } } @@ -596,53 +570,85 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate // Number of classes with code //if class.LinesOfCode != nil && class.LinesOfCode.LinesOfCode > 0 { - specificAggregation.NbClassesWithCode++ + result.NbClassesWithCode++ //} // Maintainability Index if class.Stmts.Analyze.Maintainability != nil { if class.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) { - specificAggregation.AverageMI += float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex) - specificAggregation.AverageMIwoc += float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - specificAggregation.AverageMIcw += float64(*class.Stmts.Analyze.Maintainability.CommentWeight) + result.MaintainabilityIndex.Sum += *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + result.MaintainabilityIndex.Counter++ + if specificAggregation.MaintainabilityIndex.Min == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndex < specificAggregation.MaintainabilityIndex.Min { + result.MaintainabilityIndex.Min = *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + if specificAggregation.MaintainabilityIndex.Max == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndex > specificAggregation.MaintainabilityIndex.Max { + result.MaintainabilityIndex.Max = *class.Stmts.Analyze.Maintainability.MaintainabilityIndex + } + } + if class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments)) { + result.MaintainabilityIndexWithoutComments.Sum += *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + result.MaintainabilityIndexWithoutComments.Counter++ + if specificAggregation.MaintainabilityIndexWithoutComments.Min == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments < specificAggregation.MaintainabilityIndexWithoutComments.Min { + result.MaintainabilityIndexWithoutComments.Min = *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } + if specificAggregation.MaintainabilityIndexWithoutComments.Max == 0 || *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments > specificAggregation.MaintainabilityIndexWithoutComments.Max { + result.MaintainabilityIndexWithoutComments.Max = *class.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments + } } } // Coupling if class.Stmts.Analyze.Coupling != nil { - specificAggregation.AverageInstability += float64(class.Stmts.Analyze.Coupling.Instability) - specificAggregation.AverageEfferentCoupling += float64(class.Stmts.Analyze.Coupling.Efferent) - specificAggregation.AverageAfferentCoupling += float64(class.Stmts.Analyze.Coupling.Afferent) + result.EfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Efferent) + result.EfferentCoupling.Counter++ + result.AfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Afferent) + result.AfferentCoupling.Counter++ + + // Instability for class + if class.Stmts.Analyze.Coupling.Efferent > 0 { + class.Stmts.Analyze.Coupling.Instability = float64(class.Stmts.Analyze.Coupling.Efferent) / float64(class.Stmts.Analyze.Coupling.Efferent+class.Stmts.Analyze.Coupling.Afferent) + } } // cyclomatic complexity per class if class.Stmts.Analyze.Complexity != nil && class.Stmts.Analyze.Complexity.Cyclomatic != nil { - specificAggregation.AverageCyclomaticComplexityPerClass += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) - if specificAggregation.MinCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.MinCyclomaticComplexity { - specificAggregation.MinCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + + result.CyclomaticComplexityPerClass.Sum += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + result.CyclomaticComplexityPerClass.Counter++ + if specificAggregation.CyclomaticComplexityPerClass.Min == 0 || float64(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.CyclomaticComplexityPerClass.Min { + result.CyclomaticComplexityPerClass.Min = float64(*class.Stmts.Analyze.Complexity.Cyclomatic) } - if specificAggregation.MaxCyclomaticComplexity == 0 || int(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.MaxCyclomaticComplexity { - specificAggregation.MaxCyclomaticComplexity = int(*class.Stmts.Analyze.Complexity.Cyclomatic) + if specificAggregation.CyclomaticComplexityPerClass.Max == 0 || float64(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.CyclomaticComplexityPerClass.Max { + result.CyclomaticComplexityPerClass.Max = float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + } + + result.CyclomaticComplexity.Sum += float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + result.CyclomaticComplexity.Counter++ + if specificAggregation.CyclomaticComplexity.Min == 0 || float64(*class.Stmts.Analyze.Complexity.Cyclomatic) < specificAggregation.CyclomaticComplexity.Min { + result.CyclomaticComplexity.Min = float64(*class.Stmts.Analyze.Complexity.Cyclomatic) + } + if specificAggregation.CyclomaticComplexity.Max == 0 || float64(*class.Stmts.Analyze.Complexity.Cyclomatic) > specificAggregation.CyclomaticComplexity.Max { + result.CyclomaticComplexity.Max = float64(*class.Stmts.Analyze.Complexity.Cyclomatic) } } // Halstead if class.Stmts.Analyze.Volume != nil { - if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty)) { - specificAggregation.AverageHalsteadDifficulty += float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty) - specificAggregation.SumHalsteadDifficulty += float64(*class.Stmts.Analyze.Volume.HalsteadDifficulty) + if class.Stmts.Analyze.Volume.HalsteadDifficulty != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadDifficulty) { + result.HalsteadDifficulty.Sum += *class.Stmts.Analyze.Volume.HalsteadDifficulty + result.HalsteadDifficulty.Counter++ } - if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadEffort)) { - specificAggregation.AverageHalsteadEffort += float64(*class.Stmts.Analyze.Volume.HalsteadEffort) - specificAggregation.SumHalsteadEffort += float64(*class.Stmts.Analyze.Volume.HalsteadEffort) + if class.Stmts.Analyze.Volume.HalsteadEffort != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadEffort) { + result.HalsteadEffort.Sum += *class.Stmts.Analyze.Volume.HalsteadEffort + result.HalsteadEffort.Counter++ } - if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadVolume)) { - specificAggregation.AverageHalsteadVolume += float64(*class.Stmts.Analyze.Volume.HalsteadVolume) - specificAggregation.SumHalsteadVolume += float64(*class.Stmts.Analyze.Volume.HalsteadVolume) + if class.Stmts.Analyze.Volume.HalsteadVolume != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadVolume) { + result.HalsteadVolume.Sum += *class.Stmts.Analyze.Volume.HalsteadVolume + result.HalsteadVolume.Counter++ } - if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(float64(*class.Stmts.Analyze.Volume.HalsteadTime)) { - specificAggregation.AverageHalsteadTime += float64(*class.Stmts.Analyze.Volume.HalsteadTime) - specificAggregation.SumHalsteadTime += float64(*class.Stmts.Analyze.Volume.HalsteadTime) + if class.Stmts.Analyze.Volume.HalsteadTime != nil && !math.IsNaN(*class.Stmts.Analyze.Volume.HalsteadTime) { + result.HalsteadTime.Sum += *class.Stmts.Analyze.Volume.HalsteadTime + result.HalsteadTime.Counter++ } } @@ -653,24 +659,6 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate Afferent: 0, } } - class.Stmts.Analyze.Coupling.Efferent = 0 - uniqueDependencies := make(map[string]bool) - for _, dependency := range class.Stmts.StmtExternalDependencies { - dependencyName := dependency.ClassName - - // check if dependency is already in hashmap - if _, ok := specificAggregation.ClassesAfferentCoupling[dependencyName]; !ok { - specificAggregation.ClassesAfferentCoupling[dependencyName] = 0 - } - specificAggregation.ClassesAfferentCoupling[dependencyName]++ - - // check if dependency is unique - if _, ok := uniqueDependencies[dependencyName]; !ok { - uniqueDependencies[dependencyName] = true - } - } - - class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) // Add dependencies to file if file.Stmts.Analyze.Coupling == nil { @@ -689,9 +677,296 @@ func (r *Aggregator) calculateSums(file *pb.File, specificAggregation *Aggregate } // consolidate coupling for file - if file.Stmts.Analyze.Coupling != nil && len(classes) > 0 { + if len(classes) > 0 && file.Stmts.Analyze.Coupling != nil { file.Stmts.Analyze.Coupling.Efferent = file.Stmts.Analyze.Coupling.Efferent / int32(len(classes)) file.Stmts.Analyze.Coupling.Afferent = file.Stmts.Analyze.Coupling.Afferent / int32(len(classes)) } + return result +} + +// Merge the chunks of files to get the aggregated data (sums) +func (r *Aggregator) mergeChunks(aggregated Aggregated, chunk *Aggregated) Aggregated { + + result := aggregated + result.ConcernedFiles = append(result.ConcernedFiles, chunk.ConcernedFiles...) + result.NbFiles += chunk.NbFiles + result.NbClasses += chunk.NbClasses + result.NbClassesWithCode += chunk.NbClassesWithCode + result.NbMethods += chunk.NbMethods + + result.Loc.Sum += chunk.Loc.Sum + result.Loc.Counter += chunk.Loc.Counter + result.Cloc.Sum += chunk.Cloc.Sum + result.Cloc.Counter += chunk.Cloc.Counter + result.Lloc.Sum += chunk.Lloc.Sum + result.Lloc.Counter += chunk.Lloc.Counter + + result.MethodsPerClass.Sum += chunk.MethodsPerClass.Sum + result.MethodsPerClass.Counter += chunk.MethodsPerClass.Counter + result.LocPerClass.Sum += chunk.LocPerClass.Sum + result.LocPerClass.Counter += chunk.LocPerClass.Counter + result.LocPerMethod.Sum += chunk.LocPerMethod.Sum + result.LocPerMethod.Counter += chunk.LocPerMethod.Counter + result.CyclomaticComplexityPerMethod.Sum += chunk.CyclomaticComplexityPerMethod.Sum + result.CyclomaticComplexityPerMethod.Counter += chunk.CyclomaticComplexityPerMethod.Counter + + result.CyclomaticComplexityPerClass.Sum += chunk.CyclomaticComplexityPerClass.Sum + result.CyclomaticComplexityPerClass.Counter += chunk.CyclomaticComplexityPerClass.Counter + + result.CyclomaticComplexity.Sum += chunk.CyclomaticComplexity.Sum + result.CyclomaticComplexity.Counter += chunk.CyclomaticComplexity.Counter + + result.HalsteadDifficulty.Sum += chunk.HalsteadDifficulty.Sum + result.HalsteadDifficulty.Counter += chunk.HalsteadDifficulty.Counter + result.HalsteadEffort.Sum += chunk.HalsteadEffort.Sum + result.HalsteadEffort.Counter += chunk.HalsteadEffort.Counter + result.HalsteadVolume.Sum += chunk.HalsteadVolume.Sum + result.HalsteadVolume.Counter += chunk.HalsteadVolume.Counter + result.HalsteadTime.Sum += chunk.HalsteadTime.Sum + result.HalsteadTime.Counter += chunk.HalsteadTime.Counter + result.HalsteadBugs.Sum += chunk.HalsteadBugs.Sum + result.HalsteadBugs.Counter += chunk.HalsteadBugs.Counter + + result.MaintainabilityIndex.Sum += chunk.MaintainabilityIndex.Sum + result.MaintainabilityIndex.Counter += chunk.MaintainabilityIndex.Counter + result.MaintainabilityIndexWithoutComments.Sum += chunk.MaintainabilityIndexWithoutComments.Sum + result.MaintainabilityIndexWithoutComments.Counter += chunk.MaintainabilityIndexWithoutComments.Counter + result.MaintainabilityCommentWeight.Sum += chunk.MaintainabilityCommentWeight.Sum + result.MaintainabilityCommentWeight.Counter += chunk.MaintainabilityCommentWeight.Counter + + result.EfferentCoupling.Sum += chunk.EfferentCoupling.Sum + result.EfferentCoupling.Counter += chunk.EfferentCoupling.Counter + result.AfferentCoupling.Sum += chunk.AfferentCoupling.Sum + result.AfferentCoupling.Counter += chunk.AfferentCoupling.Counter + + result.MaintainabilityPerMethod.Sum += chunk.MaintainabilityPerMethod.Sum + result.MaintainabilityPerMethod.Counter += chunk.MaintainabilityPerMethod.Counter + result.MaintainabilityPerMethodWithoutComments.Sum += chunk.MaintainabilityPerMethodWithoutComments.Sum + result.MaintainabilityPerMethodWithoutComments.Counter += chunk.MaintainabilityPerMethodWithoutComments.Counter + result.MaintainabilityCommentWeightPerMethod.Sum += chunk.MaintainabilityCommentWeightPerMethod.Sum + result.MaintainabilityCommentWeightPerMethod.Counter += chunk.MaintainabilityCommentWeightPerMethod.Counter + + result.CommitCountForPeriod += chunk.CommitCountForPeriod + result.CommittedFilesCountForPeriod += chunk.CommittedFilesCountForPeriod + + result.PackageRelations = make(map[string]map[string]int) + for k, v := range chunk.PackageRelations { + result.PackageRelations[k] = v + } + + result.ErroredFiles = append(result.ErroredFiles, chunk.ErroredFiles...) + + return result +} + +// Reduce the sums to get the averages +func (r *Aggregator) reduceMetrics(aggregated Aggregated) Aggregated { + // here we reduce metrics by averaging them + result := aggregated + if result.Loc.Counter > 0 { + result.Loc.Avg = result.Loc.Sum / float64(result.Loc.Counter) + } + if result.Cloc.Counter > 0 { + result.Cloc.Avg = result.Cloc.Sum / float64(result.Cloc.Counter) + } + if result.Lloc.Counter > 0 { + result.Lloc.Avg = result.Lloc.Sum / float64(result.Lloc.Counter) + } + if result.MethodsPerClass.Counter > 0 { + result.MethodsPerClass.Avg = result.MethodsPerClass.Sum / float64(result.MethodsPerClass.Counter) + } + if result.LocPerClass.Counter > 0 { + result.LocPerClass.Avg = result.LocPerClass.Sum / float64(result.LocPerClass.Counter) + } + if result.ClocPerMethod.Counter > 0 { + result.ClocPerMethod.Avg = result.ClocPerMethod.Sum / float64(result.ClocPerMethod.Counter) + } + if result.LlocPerMethod.Counter > 0 { + result.LlocPerMethod.Avg = result.LlocPerMethod.Sum / float64(result.LlocPerMethod.Counter) + } + if result.LocPerMethod.Counter > 0 { + result.LocPerMethod.Avg = result.LocPerMethod.Sum / float64(result.LocPerMethod.Counter) + } + if result.CyclomaticComplexityPerMethod.Counter > 0 { + result.CyclomaticComplexityPerMethod.Avg = result.CyclomaticComplexityPerMethod.Sum / float64(result.CyclomaticComplexityPerMethod.Counter) + } + if result.CyclomaticComplexityPerClass.Counter > 0 { + result.CyclomaticComplexityPerClass.Avg = result.CyclomaticComplexityPerClass.Sum / float64(result.CyclomaticComplexityPerClass.Counter) + } + if result.CyclomaticComplexity.Counter > 0 { + result.CyclomaticComplexity.Avg = result.CyclomaticComplexity.Sum / float64(result.CyclomaticComplexity.Counter) + } + if result.HalsteadDifficulty.Counter > 0 { + result.HalsteadDifficulty.Avg = result.HalsteadDifficulty.Sum / float64(result.HalsteadDifficulty.Counter) + } + if result.HalsteadEffort.Counter > 0 { + result.HalsteadEffort.Avg = result.HalsteadEffort.Sum / float64(result.HalsteadEffort.Counter) + } + if result.HalsteadVolume.Counter > 0 { + result.HalsteadVolume.Avg = result.HalsteadVolume.Sum / float64(result.HalsteadVolume.Counter) + } + if result.HalsteadTime.Counter > 0 { + result.HalsteadTime.Avg = result.HalsteadTime.Sum / float64(result.HalsteadTime.Counter) + } + if result.MaintainabilityIndex.Counter > 0 { + result.MaintainabilityIndex.Avg = result.MaintainabilityIndex.Sum / float64(result.MaintainabilityIndex.Counter) + } + if result.MaintainabilityIndexWithoutComments.Counter > 0 { + result.MaintainabilityIndexWithoutComments.Avg = result.MaintainabilityIndexWithoutComments.Sum / float64(result.MaintainabilityIndexWithoutComments.Counter) + } + if result.MaintainabilityCommentWeight.Counter > 0 { + result.MaintainabilityCommentWeight.Avg = result.MaintainabilityCommentWeight.Sum / float64(result.MaintainabilityCommentWeight.Counter) + } + if result.MaintainabilityPerMethod.Counter > 0 { + result.MaintainabilityPerMethod.Avg = result.MaintainabilityPerMethod.Sum / float64(result.MaintainabilityPerMethod.Counter) + } + if result.MaintainabilityPerMethodWithoutComments.Counter > 0 { + result.MaintainabilityPerMethodWithoutComments.Avg = result.MaintainabilityPerMethodWithoutComments.Sum / float64(result.MaintainabilityPerMethodWithoutComments.Counter) + } + if result.MaintainabilityCommentWeightPerMethod.Counter > 0 { + result.MaintainabilityCommentWeightPerMethod.Avg = result.MaintainabilityCommentWeightPerMethod.Sum / float64(result.MaintainabilityCommentWeightPerMethod.Counter) + } + + if result.EfferentCoupling.Counter > 0 { + result.EfferentCoupling.Avg = result.EfferentCoupling.Sum / float64(result.EfferentCoupling.Counter) + } + if result.AfferentCoupling.Counter > 0 { + result.AfferentCoupling.Avg = result.AfferentCoupling.Sum / float64(result.AfferentCoupling.Counter) + } + + // afferent coupling + // Ce / (Ce + Ca) + if result.AfferentCoupling.Counter > 0 { + result.Instability.Avg = result.EfferentCoupling.Sum / result.AfferentCoupling.Sum + } + + // Count commits for the period based on `ResultOfGitAnalysis` data + result.ResultOfGitAnalysis = r.gitSummaries + if result.ResultOfGitAnalysis != nil { + for _, gitAnalysis := range result.ResultOfGitAnalysis { + result.CommitCountForPeriod += gitAnalysis.CountCommitsForLanguage + } + } + + // Bus factor and other metrics based on aggregated data + for _, analyzer := range r.analyzers { + analyzer.Calculate(&result) + } + + return result +} + +// Map the coupling to get the package relations and the afferent coupling +func (r *Aggregator) mapCoupling(aggregated *Aggregated) Aggregated { + result := *aggregated + reg := regexp.MustCompile("[^A-Za-z0-9.]+") + + for _, file := range aggregated.ConcernedFiles { + classes := Engine.GetClassesInFile(file) + + for _, class := range classes { + + if class == nil { + continue + } + + // dependencies + dependencies := file.Stmts.StmtExternalDependencies + + for _, dependency := range dependencies { + if dependency == nil { + continue + } + + namespaceTo := dependency.Namespace + namespaceFrom := dependency.From + + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + // Keep only 2 levels in namespace + separator := reg.FindString(namespaceFrom) + parts := reg.Split(namespaceTo, -1) + if len(parts) > 2 { + namespaceTo = parts[0] + separator + parts[1] + } + + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + parts = reg.Split(namespaceFrom, -1) + if len(parts) > 2 { + namespaceFrom = parts[0] + separator + parts[1] + } + + // if same, continue + if namespaceFrom == namespaceTo { + continue + } + + // if root namespace, continue + if namespaceFrom == "" || namespaceTo == "" { + continue + } + + // create the map if not exists + if _, ok := result.PackageRelations[namespaceFrom]; !ok { + result.PackageRelations[namespaceFrom] = make(map[string]int) + } + + if _, ok := result.PackageRelations[namespaceFrom][namespaceTo]; !ok { + result.PackageRelations[namespaceFrom][namespaceTo] = 0 + } + + // increment the counter + result.PackageRelations[namespaceFrom][namespaceTo]++ + } + + uniqueDependencies := make(map[string]bool) + for _, dependency := range class.Stmts.StmtExternalDependencies { + dependencyName := dependency.ClassName + + // check if dependency is already in hashmap + if _, ok := result.ClassesAfferentCoupling[dependencyName]; !ok { + result.ClassesAfferentCoupling[dependencyName] = 0 + } + result.ClassesAfferentCoupling[dependencyName]++ + + // check if dependency is unique + if _, ok := uniqueDependencies[dependencyName]; !ok { + uniqueDependencies[dependencyName] = true + } + } + + if class.Stmts.Analyze.Coupling == nil { + class.Stmts.Analyze.Coupling = &pb.Coupling{ + Efferent: 0, + Afferent: 0, + } + } + class.Stmts.Analyze.Coupling.Efferent = int32(len(uniqueDependencies)) + + // Afferent coupling + class.Stmts.Analyze.Coupling.Afferent = int32(len(class.Stmts.StmtExternalDependencies)) + + // Increment result + result.EfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Efferent) + result.EfferentCoupling.Counter++ + result.AfferentCoupling.Sum += float64(class.Stmts.Analyze.Coupling.Afferent) + result.AfferentCoupling.Counter++ + } + } + + // Afferent coupling + // Ce / (Ce + Ca) + if result.AfferentCoupling.Counter > 0 { + result.Instability.Avg = result.EfferentCoupling.Sum / result.AfferentCoupling.Sum + } + result.EfferentCoupling.Avg = result.EfferentCoupling.Sum / float64(result.EfferentCoupling.Counter) + result.AfferentCoupling.Avg = result.AfferentCoupling.Sum / float64(result.AfferentCoupling.Counter) + + return result } diff --git a/src/Analyzer/Aggregator_test.go b/src/Analyzer/Aggregator_test.go index 7f850d0..b4ca2c5 100644 --- a/src/Analyzer/Aggregator_test.go +++ b/src/Analyzer/Aggregator_test.go @@ -12,243 +12,200 @@ func TestConsolidate(t *testing.T) { aggregator := Aggregator{} aggregated := Aggregated{ - NbMethods: 10, + MethodsPerClass: AggregateResult{Sum: 10, Counter: 5}, NbClasses: 5, NbClassesWithCode: 5, - AverageCyclomaticComplexityPerClass: 20, - AverageHalsteadDifficulty: 30, - AverageHalsteadEffort: 40, - AverageHalsteadVolume: 50, - AverageHalsteadTime: 60, - AverageLocPerMethod: 70, - AverageClocPerMethod: 80, - AverageLlocPerMethod: 90, - AverageMI: 100, - AverageMIwoc: 110, - AverageMIcw: 120, - } - - aggregator.consolidate(&aggregated) + CyclomaticComplexityPerClass: AggregateResult{Sum: 20, Counter: 5}, + HalsteadDifficulty: AggregateResult{Sum: 30, Counter: 5}, + HalsteadEffort: AggregateResult{Sum: 40, Counter: 5}, + HalsteadVolume: AggregateResult{Sum: 50, Counter: 5}, + HalsteadTime: AggregateResult{Sum: 60, Counter: 5}, + LocPerMethod: AggregateResult{Sum: 70, Counter: 10}, + ClocPerMethod: AggregateResult{Sum: 80, Counter: 10}, + LlocPerMethod: AggregateResult{Sum: 90, Counter: 10}, + MaintainabilityIndex: AggregateResult{Sum: 100, Counter: 5}, + MaintainabilityIndexWithoutComments: AggregateResult{Sum: 110, Counter: 5}, + } + + aggregated = aggregator.reduceMetrics(aggregated) + + assert.Equal(t, float64(2), aggregated.MethodsPerClass.Avg, "Should have 2 methods per class") + assert.Equal(t, float64(10), aggregated.MethodsPerClass.Sum, "Should have 10 methods per class sum") + assert.Equal(t, float64(4), aggregated.CyclomaticComplexityPerClass.Avg, "Should have 4 cyclomatic complexity per class") + assert.Equal(t, float64(6), aggregated.HalsteadDifficulty.Avg, "Should have 6 halstead difficulty") + assert.Equal(t, float64(8), aggregated.HalsteadEffort.Avg, "Should have 8 halstead effort") + assert.Equal(t, float64(10), aggregated.HalsteadVolume.Avg, "Should have 10 halstead volume") + assert.Equal(t, float64(12), aggregated.HalsteadTime.Avg, "Should have 12 halstead time") + assert.Equal(t, float64(7), aggregated.LocPerMethod.Avg, "Should have 7 loc per method") + assert.Equal(t, float64(8), aggregated.ClocPerMethod.Avg, "Should have 8 cloc per method") + assert.Equal(t, float64(9), aggregated.LlocPerMethod.Avg, "Should have 9 lloc per method") + assert.Equal(t, float64(20), aggregated.MaintainabilityIndex.Avg, "Should have 20 maintainability index") + assert.Equal(t, float64(22), aggregated.MaintainabilityIndexWithoutComments.Avg, "Should have 22 maintainability index without comments") - if aggregated.AverageMethodsPerClass != 2 { - t.Errorf("Expected 2, got %f", aggregated.AverageMethodsPerClass) - } - - if aggregated.AverageCyclomaticComplexityPerClass != 4 { - t.Errorf("Expected 4, got %f", aggregated.AverageCyclomaticComplexityPerClass) - } - - if aggregated.AverageHalsteadDifficulty != 6 { - t.Errorf("Expected 6, got %f", aggregated.AverageHalsteadDifficulty) - } - - if aggregated.AverageHalsteadEffort != 8 { - t.Errorf("Expected 8, got %f", aggregated.AverageHalsteadEffort) - } - - if aggregated.AverageHalsteadVolume != 10 { - t.Errorf("Expected 10, got %f", aggregated.AverageHalsteadVolume) - } - - if aggregated.AverageHalsteadTime != 12 { - t.Errorf("Expected 12, got %f", aggregated.AverageHalsteadTime) - } - - if aggregated.AverageLocPerMethod != 7 { - t.Errorf("Expected 7, got %f", aggregated.AverageLocPerMethod) - } - - if aggregated.AverageClocPerMethod != 8 { - t.Errorf("Expected 8, got %f", aggregated.AverageClocPerMethod) - } - - if aggregated.AverageLlocPerMethod != 9 { - t.Errorf("Expected 9, got %f", aggregated.AverageLlocPerMethod) - } - - if aggregated.AverageMI != 20 { - t.Errorf("Expected 20, got %f", aggregated.AverageMI) - } - - if aggregated.AverageMIwoc != 22 { - t.Errorf("Expected 22, got %f", aggregated.AverageMIwoc) - } - - if aggregated.AverageMIcw != 24 { - t.Errorf("Expected 24, got %f", aggregated.AverageMIcw) - } } func TestCalculate(t *testing.T) { - aggregator := Aggregator{} - stmts := pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(10), + + t.Run("TestCalculate", func(t *testing.T) { + aggregator := Aggregator{} + stmts := pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(10), + }, }, }, }, - }, - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(20), + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(20), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - {}, {}, {}, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(100), - Cloc: proto.Int32(200), - Lloc: proto.Int32(300), + StmtClass: []*pb.StmtClass{ + {}, {}, {}, }, - }, - } - file := pb.File{ - Stmts: &stmts, - } - aggregated := Aggregated{} - aggregator.calculateSums(&file, &aggregated) - aggregator.consolidate(&aggregated) - - if aggregated.NbMethods != 2 { - t.Errorf("Expected 2, got %d", aggregated.NbMethods) - } - - if aggregated.NbClasses != 3 { - t.Errorf("Expected 3 classes, got %d", aggregated.NbClasses) - } - - if aggregated.AverageCyclomaticComplexityPerMethod != 15 { - t.Errorf("Expected AverageCyclomaticComplexityPerMethod, got %f", aggregated.AverageCyclomaticComplexityPerMethod) - } - - if aggregated.Loc != 100 { - t.Errorf("Expected 100, got %d", aggregated.Loc) - } - - if aggregated.Cloc != 200 { - t.Errorf("Expected 200, got %d", aggregated.Cloc) - } - - if aggregated.Lloc != 300 { - t.Errorf("Expected 300, got %d", aggregated.Lloc) - } + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(100), + Cloc: proto.Int32(200), + Lloc: proto.Int32(300), + }, + }, + } + file := pb.File{ + Stmts: &stmts, + Path: "test.foo", + } + aggregated := Aggregated{} + aggregated = aggregator.mapSums(&file, aggregated) + aggregated.ConcernedFiles = []*pb.File{ + &file, + } + aggregated = aggregator.reduceMetrics(aggregated) + + assert.Equal(t, 2, aggregated.NbMethods, "Should have 2 methods") + assert.Equal(t, 3, aggregated.NbClasses, "Should have 3 classes") + assert.Equal(t, float64(15), aggregated.CyclomaticComplexityPerMethod.Avg, "Should have 15 average cyclomatic complexity per method") + assert.Equal(t, float64(100), aggregated.Loc.Avg, "Should have 100 loc") + assert.Equal(t, float64(200), aggregated.Cloc.Avg, "Should have 200 cloc") + assert.Equal(t, float64(300), aggregated.Lloc.Avg, "Should have 300 lloc") + }) } func TestAggregates(t *testing.T) { - // Create a new Aggregator with some dummy data - aggregator := Aggregator{ - files: []*pb.File{ - // file 1 - { - ProgrammingLanguage: "Go", - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(10), + t.Run("TestAggregates", func(t *testing.T) { + // Create a new Aggregator with some dummy data + aggregator := Aggregator{ + files: []*pb.File{ + // file 1 + { + ProgrammingLanguage: "Go", + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(10), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(120), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(85), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(85), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(65), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(65), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(100), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(100), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, }, - }, - StmtNamespace: []*pb.StmtNamespace{ - { - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(20), + StmtNamespace: []*pb.StmtNamespace{ + { + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(20), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(70), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(70), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(100), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(100), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, @@ -256,80 +213,80 @@ func TestAggregates(t *testing.T) { }, }, }, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(100), - Cloc: proto.Int32(200), - Lloc: proto.Int32(50), + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(100), + Cloc: proto.Int32(200), + Lloc: proto.Int32(50), + }, }, }, }, - }, - // file 2 - { - ProgrammingLanguage: "Go", - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(60), + // file 2 + { + ProgrammingLanguage: "Go", + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(60), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(75), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(75), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, - }, - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(120), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, }, - }, - StmtNamespace: []*pb.StmtNamespace{ - { - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(30), + StmtNamespace: []*pb.StmtNamespace{ + { + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(30), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(90), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(90), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, @@ -337,43 +294,43 @@ func TestAggregates(t *testing.T) { }, }, }, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(200), - Cloc: proto.Int32(300), - Lloc: proto.Int32(150), + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(200), + Cloc: proto.Int32(300), + Lloc: proto.Int32(150), + }, }, }, }, - }, - // file 3 - { - ProgrammingLanguage: "Php", - Stmts: &pb.Stmts{ - StmtNamespace: []*pb.StmtNamespace{ - { - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Complexity: &pb.Complexity{ - Cyclomatic: proto.Int32(30), + // file 3 + { + ProgrammingLanguage: "Php", + Stmts: &pb.Stmts{ + StmtNamespace: []*pb.StmtNamespace{ + { + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Complexity: &pb.Complexity{ + Cyclomatic: proto.Int32(30), + }, }, }, }, }, - }, - StmtClass: []*pb.StmtClass{ - // class - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(120), - MaintainabilityIndexWithoutComments: proto.Float32(48), - CommentWeight: proto.Float32(40), + StmtClass: []*pb.StmtClass{ + // class + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(120), + MaintainabilityIndexWithoutComments: proto.Float64(48), + CommentWeight: proto.Float64(40), + }, }, }, }, @@ -381,154 +338,111 @@ func TestAggregates(t *testing.T) { }, }, }, - }, - Analyze: &pb.Analyze{ - Volume: &pb.Volume{ - Loc: proto.Int32(600), - Cloc: proto.Int32(100), - Lloc: proto.Int32(400), + Analyze: &pb.Analyze{ + Volume: &pb.Volume{ + Loc: proto.Int32(600), + Cloc: proto.Int32(100), + Lloc: proto.Int32(400), + }, }, }, }, }, - }, - } + } - // Call the Aggregates method - projectAggregated := aggregator.Aggregates() + // Call the Aggregates method + projectAggregated := aggregator.Aggregates() + result := projectAggregated.Combined - // Check that the returned ProjectAggregated struct has the expected values - if projectAggregated.ByFile.NbFiles != 3 { - t.Errorf("Expected 3 files, got %d", projectAggregated.ByFile.NbFiles) - } - - // Checks on Combined aggregate - if projectAggregated.ByClass.NbClasses != 10 { - t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) - } - - if projectAggregated.Combined.NbClasses != 10 { - t.Errorf("Expected 10 classes, got %d", projectAggregated.ByClass.NbClasses) - } - - if projectAggregated.Combined.NbMethods != 5 { - t.Errorf("Expected 5 methods, got %d", projectAggregated.Combined.NbMethods) - } + // Check that the returned ProjectAggregated struct has the expected values + assert.Equal(t, 3, result.NbFiles, "Should have 3 files") - if projectAggregated.Combined.AverageCyclomaticComplexityPerMethod != 30 { - t.Errorf("Expected AverageCyclomaticComplexityPerMethod 30, got %f", projectAggregated.Combined.AverageCyclomaticComplexityPerMethod) - } + // Checks on Combined aggregate + assert.Equal(t, 10, projectAggregated.ByClass.NbClasses, "Should have 10 classes") - if int(projectAggregated.Combined.AverageMI) != 94 { - t.Errorf("Expected MI of 94 for all files, got %v", int(projectAggregated.Combined.AverageMI)) - } + assert.Equal(t, 5, result.NbMethods, "Should have 5 methods") - // Check on Go aggregate - if projectAggregated.ByProgrammingLanguage["Go"].NbClasses != 9 { - t.Errorf("Expected 9 classes, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbClasses) - } + assert.Equal(t, float64(30), result.CyclomaticComplexityPerMethod.Avg, "Should have 30 average cyclomatic complexity per method") - if projectAggregated.ByProgrammingLanguage["Go"].NbMethods != 4 { - t.Errorf("Expected 4 methods in Go, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbMethods) - } + assert.Equal(t, 94, int(result.MaintainabilityIndex.Avg), "Should have 94 average maintainability index") - if projectAggregated.ByProgrammingLanguage["Go"].NbFiles != 2 { - t.Errorf("Expected 2 Go files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) - } + // Check on Go aggregate + assert.Equal(t, 9, projectAggregated.ByProgrammingLanguage["Go"].NbClasses, "Should have 9 classes") - if int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI) != 91 { - t.Errorf("Expected MI of 91 for Go files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) - } + assert.Equal(t, 4, projectAggregated.ByProgrammingLanguage["Go"].NbMethods, "Should have 4 methods in Go") - // Check on Php aggregate - if projectAggregated.ByProgrammingLanguage["Php"].NbClasses != 1 { - t.Errorf("Expected 1 class, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbClasses) - } + assert.Equal(t, 2, projectAggregated.ByProgrammingLanguage["Go"].NbFiles, "Should have 2 Go files") - if projectAggregated.ByProgrammingLanguage["Php"].NbMethods != 1 { - t.Errorf("Expected 1 methods in PHP, got %d", projectAggregated.ByProgrammingLanguage["Php"].NbMethods) - } + assert.Equal(t, 91, int(projectAggregated.ByProgrammingLanguage["Go"].MaintainabilityIndex.Avg), "Should have 91 average maintainability index for Go files") - if projectAggregated.ByProgrammingLanguage["Php"].NbFiles != 1 { - t.Errorf("Expected 1 PHP files, got %d", projectAggregated.ByProgrammingLanguage["Go"].NbFiles) - } + // Check on Php aggregate + assert.Equal(t, 1, projectAggregated.ByProgrammingLanguage["Php"].NbClasses, "Should have 1 class") - if projectAggregated.ByProgrammingLanguage["Php"].AverageMI != 120 { - t.Errorf("Expected MI of 120 for PHP files, got %f", projectAggregated.ByProgrammingLanguage["Php"].AverageMI) - } + assert.Equal(t, 1, projectAggregated.ByProgrammingLanguage["Php"].NbMethods, "Should have 1 methods in PHP") - if int(projectAggregated.ByProgrammingLanguage["Php"].AverageMI) != 120 { - t.Errorf("Expected MI of 120 for PHP files, got %v", int(projectAggregated.ByProgrammingLanguage["Go"].AverageMI)) - } + assert.Equal(t, 1, projectAggregated.ByProgrammingLanguage["Php"].NbFiles, "Should have 1 PHP files") + assert.Equal(t, 120, int(projectAggregated.ByProgrammingLanguage["Php"].MaintainabilityIndex.Avg), "Should have 120 average maintainability index for PHP files") + }) } func TestCalculateMaintainabilityIndex(t *testing.T) { - aggregator := Aggregator{} - file := pb.File{ - Stmts: &pb.Stmts{ - StmtFunction: []*pb.StmtFunction{ - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(15), - MaintainabilityIndexWithoutComments: proto.Float32(20), - CommentWeight: proto.Float32(25), + t.Run("TestCalculateMaintainabilityIndex", func(t *testing.T) { + aggregator := Aggregator{} + file := pb.File{ + Stmts: &pb.Stmts{ + StmtFunction: []*pb.StmtFunction{ + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(15), + MaintainabilityIndexWithoutComments: proto.Float64(20), + CommentWeight: proto.Float64(25), + }, }, }, }, - }, - { - Stmts: &pb.Stmts{ - Analyze: &pb.Analyze{ - Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(30), - MaintainabilityIndexWithoutComments: proto.Float32(35), - CommentWeight: proto.Float32(40), + { + Stmts: &pb.Stmts{ + Analyze: &pb.Analyze{ + Maintainability: &pb.Maintainability{ + MaintainabilityIndex: proto.Float64(30), + MaintainabilityIndexWithoutComments: proto.Float64(35), + CommentWeight: proto.Float64(40), + }, }, }, }, }, }, - }, - } - aggregated := Aggregated{} - - aggregator.calculateSums(&file, &aggregated) - aggregator.consolidate(&aggregated) - - if aggregated.AverageMI != 22.5 { - t.Errorf("Expected 22.5, got %f", aggregated.AverageMI) - } + } + aggregated := Aggregated{} - if aggregated.AverageMIwoc != 27.5 { - t.Errorf("Expected 27.5, got %f", aggregated.AverageMIwoc) - } - - if aggregated.AverageMIcw != 32.5 { - t.Errorf("Expected 32.5, got %f", aggregated.AverageMIcw) - } + aggregated = aggregator.mapSums(&file, aggregated) + aggregated = aggregator.reduceMetrics(aggregated) - // Average per method - if aggregated.AverageMIPerMethod != 22.5 { - t.Errorf("Expected AverageMIPerMethod, got %f", aggregated.AverageMIPerMethod) - } + assert.Equal(t, float64(22.5), aggregated.MaintainabilityIndex.Avg, "Should have 22.5 average maintainability index") + assert.Equal(t, float64(27.5), aggregated.MaintainabilityIndexWithoutComments.Avg, "Should have 27.5 average maintainability index without comments") + assert.Equal(t, float64(22.5), aggregated.MaintainabilityPerMethod.Avg, "Should have 22.5 average maintainability index per method") + }) } func TestFIlesWithErrorAreDetected(t *testing.T) { - aggregator := Aggregator{} - files := []*pb.File{ - &pb.File{ - Stmts: &pb.Stmts{}, - }, - &pb.File{ - Errors: []string{"Error1", "Error2"}, - }, - } - aggregator.files = files - aggregated := aggregator.Aggregates() + t.Run("TestFilesWithErrorAreDetected", func(t *testing.T) { + aggregator := Aggregator{} + files := []*pb.File{ + &pb.File{ + Stmts: &pb.Stmts{}, + }, + &pb.File{ + Errors: []string{"Error1", "Error2"}, + }, + } + aggregator.files = files + aggregated := aggregator.Aggregates() - assert.Equal(t, 2, aggregated.ByFile.NbFiles) - assert.Equal(t, 1, len(aggregated.ErroredFiles)) + assert.Equal(t, 2, aggregated.ByFile.NbFiles) + assert.Equal(t, 1, len(aggregated.ErroredFiles)) + }) } diff --git a/src/Analyzer/AstAnalyzer.go b/src/Analyzer/AstAnalyzer.go index 1b83374..5d80372 100644 --- a/src/Analyzer/AstAnalyzer.go +++ b/src/Analyzer/AstAnalyzer.go @@ -2,12 +2,15 @@ package Analyzer import ( "io/ioutil" + "runtime" "strconv" "sync" + "sync/atomic" Complexity "github.com/halleck45/ast-metrics/src/Analyzer/Complexity" Component "github.com/halleck45/ast-metrics/src/Analyzer/Component" Volume "github.com/halleck45/ast-metrics/src/Analyzer/Volume" + "github.com/halleck45/ast-metrics/src/Engine" pb "github.com/halleck45/ast-metrics/src/NodeType" "github.com/halleck45/ast-metrics/src/Storage" "github.com/pterm/pterm" @@ -31,25 +34,39 @@ func Start(workdir *Storage.Workdir, progressbar *pterm.SpinnerPrinter) []*pb.Fi // https://stackoverflow.com/questions/58743038/why-does-this-goroutine-not-call-wg-done channelResult := make(chan *pb.File, len(astFiles)) - nbParsingFiles := 0 - // in parallel, 8 process max, analyze each AST file running the runAnalysis function - for _, file := range astFiles { - wg.Add(1) - nbParsingFiles++ - go func(file string) { - defer wg.Done() - executeFileAnalysis(file, channelResult) - // details is the number of files processed / total number of files - details := strconv.Itoa(nbParsingFiles) + "/" + strconv.Itoa(len(astFiles)) - - if progressbar != nil { - progressbar.UpdateText("Analyzing (" + details + ")") + var nbParsingFiles atomic.Uint64 + + // analyze each AST file running the runAnalysis function + numWorkers := runtime.NumCPU() + filesChan := make(chan string, numWorkers) + + for i := 0; i < numWorkers; i++ { + go func() { + for file := range filesChan { + go func(file string) { + defer wg.Done() + nbParsingFiles.Add(1) + + executeFileAnalysis(file, channelResult) + + details := strconv.Itoa(int(nbParsingFiles.Load())) + "/" + strconv.Itoa(len(astFiles)) + + if progressbar != nil { + progressbar.UpdateText("Analyzing (" + details + ")") + } + }(file) } + }() + } - }(file) + for _, file := range astFiles { + wg.Add(1) + filesChan <- file } wg.Wait() + close(filesChan) + if progressbar != nil { progressbar.Info("AST Analysis finished") } @@ -114,6 +131,10 @@ func executeFileAnalysis(file string, channelResult chan<- *pb.File) error { // visit AST root.Visit() + + // Ensure structure is complete + Engine.EnsureNodeTypeIsComplete(pbFile) + channelResult <- pbFile return nil } diff --git a/src/Analyzer/Comparator.go b/src/Analyzer/Comparator.go index eb4057c..8550618 100644 --- a/src/Analyzer/Comparator.go +++ b/src/Analyzer/Comparator.go @@ -9,9 +9,9 @@ type Comparator struct { } const ( - ADDED = "added" - DELETED = "deleted" - MODIFIED = "modified" + ADDED = "added" + DELETED = "deleted" + MODIFIED = "modified" UNCHANGED = "unchanged" ) @@ -88,36 +88,35 @@ func (c *Comparator) Compare(first Aggregated, second Aggregated) Comparaison { comparaison.NbClasses = first.NbClasses - second.NbClasses comparaison.NbClassesWithCode = first.NbClassesWithCode - second.NbClassesWithCode comparaison.NbMethods = first.NbMethods - second.NbMethods - comparaison.Loc = first.Loc - second.Loc - comparaison.Cloc = first.Cloc - second.Cloc - comparaison.Lloc = first.Lloc - second.Lloc - comparaison.AverageMethodsPerClass = first.AverageMethodsPerClass - second.AverageMethodsPerClass - comparaison.AverageLocPerMethod = first.AverageLocPerMethod - second.AverageLocPerMethod - comparaison.AverageLlocPerMethod = first.AverageLlocPerMethod - second.AverageLlocPerMethod - comparaison.AverageClocPerMethod = first.AverageClocPerMethod - second.AverageClocPerMethod - comparaison.AverageCyclomaticComplexityPerMethod = first.AverageCyclomaticComplexityPerMethod - second.AverageCyclomaticComplexityPerMethod - comparaison.AverageCyclomaticComplexityPerClass = first.AverageCyclomaticComplexityPerClass - second.AverageCyclomaticComplexityPerClass - comparaison.MinCyclomaticComplexity = first.MinCyclomaticComplexity - second.MinCyclomaticComplexity - comparaison.MaxCyclomaticComplexity = first.MaxCyclomaticComplexity - second.MaxCyclomaticComplexity - comparaison.AverageHalsteadDifficulty = first.AverageHalsteadDifficulty - second.AverageHalsteadDifficulty - comparaison.AverageHalsteadEffort = first.AverageHalsteadEffort - second.AverageHalsteadEffort - comparaison.AverageHalsteadVolume = first.AverageHalsteadVolume - second.AverageHalsteadVolume - comparaison.AverageHalsteadTime = first.AverageHalsteadTime - second.AverageHalsteadTime - comparaison.AverageHalsteadBugs = first.AverageHalsteadBugs - second.AverageHalsteadBugs - comparaison.SumHalsteadDifficulty = first.SumHalsteadDifficulty - second.SumHalsteadDifficulty - comparaison.SumHalsteadEffort = first.SumHalsteadEffort - second.SumHalsteadEffort - comparaison.SumHalsteadVolume = first.SumHalsteadVolume - second.SumHalsteadVolume - comparaison.SumHalsteadTime = first.SumHalsteadTime - second.SumHalsteadTime - comparaison.SumHalsteadBugs = first.SumHalsteadBugs - second.SumHalsteadBugs - comparaison.AverageMI = first.AverageMI - second.AverageMI - comparaison.AverageMIwoc = first.AverageMIwoc - second.AverageMIwoc - comparaison.AverageMIcw = first.AverageMIcw - second.AverageMIcw - comparaison.AverageMIPerMethod = first.AverageMIPerMethod - second.AverageMIPerMethod - comparaison.AverageMIwocPerMethod = first.AverageMIwocPerMethod - second.AverageMIwocPerMethod - comparaison.AverageMIcwPerMethod = first.AverageMIcwPerMethod - second.AverageMIcwPerMethod - comparaison.AverageAfferentCoupling = first.AverageAfferentCoupling - second.AverageAfferentCoupling - comparaison.AverageEfferentCoupling = first.AverageEfferentCoupling - second.AverageEfferentCoupling - comparaison.AverageInstability = first.AverageInstability - second.AverageInstability + comparaison.Loc = int(first.Loc.Sum - second.Loc.Sum) + comparaison.Cloc = int(first.Cloc.Sum - second.Cloc.Sum) + comparaison.Lloc = int(first.Lloc.Sum - second.Lloc.Sum) + comparaison.AverageMethodsPerClass = first.MethodsPerClass.Avg - second.MethodsPerClass.Avg + comparaison.AverageLocPerMethod = first.LocPerMethod.Avg - second.LocPerMethod.Avg + comparaison.AverageLlocPerMethod = first.LlocPerMethod.Avg - second.LlocPerMethod.Avg + comparaison.AverageClocPerMethod = first.ClocPerMethod.Avg - second.ClocPerMethod.Avg + comparaison.AverageCyclomaticComplexityPerMethod = first.CyclomaticComplexityPerMethod.Avg - second.CyclomaticComplexityPerMethod.Avg + comparaison.AverageCyclomaticComplexityPerClass = first.CyclomaticComplexityPerClass.Avg - second.CyclomaticComplexityPerClass.Avg + comparaison.MinCyclomaticComplexity = int(first.CyclomaticComplexityPerMethod.Min - second.CyclomaticComplexityPerMethod.Min) + comparaison.MaxCyclomaticComplexity = int(first.CyclomaticComplexityPerMethod.Max - second.CyclomaticComplexityPerMethod.Max) + comparaison.AverageHalsteadDifficulty = first.HalsteadDifficulty.Avg - second.HalsteadDifficulty.Avg + comparaison.AverageHalsteadEffort = first.HalsteadEffort.Avg - second.HalsteadEffort.Avg + comparaison.AverageHalsteadVolume = first.HalsteadVolume.Avg - second.HalsteadVolume.Avg + comparaison.AverageHalsteadTime = first.HalsteadTime.Avg - second.HalsteadTime.Avg + comparaison.AverageHalsteadBugs = first.HalsteadBugs.Avg - second.HalsteadBugs.Avg + comparaison.SumHalsteadDifficulty = first.HalsteadDifficulty.Sum - second.HalsteadDifficulty.Sum + comparaison.SumHalsteadEffort = first.HalsteadEffort.Sum - second.HalsteadEffort.Sum + comparaison.SumHalsteadVolume = first.HalsteadVolume.Sum - second.HalsteadVolume.Sum + comparaison.SumHalsteadTime = first.HalsteadTime.Sum - second.HalsteadTime.Sum + comparaison.SumHalsteadBugs = first.HalsteadBugs.Sum - second.HalsteadBugs.Sum + comparaison.AverageMI = first.MaintainabilityIndex.Avg - second.MaintainabilityIndex.Avg + comparaison.AverageMIwoc = first.MaintainabilityIndexWithoutComments.Avg - second.MaintainabilityIndexWithoutComments.Avg + comparaison.AverageMIPerMethod = first.MaintainabilityPerMethod.Avg - second.MaintainabilityPerMethod.Avg + comparaison.AverageMIwocPerMethod = first.MaintainabilityCommentWeightPerMethod.Avg - second.MaintainabilityCommentWeightPerMethod.Avg + comparaison.AverageMIcwPerMethod = first.MaintainabilityCommentWeightPerMethod.Avg - second.MaintainabilityCommentWeightPerMethod.Avg + comparaison.AverageAfferentCoupling = first.AfferentCoupling.Avg - second.AfferentCoupling.Avg + comparaison.AverageEfferentCoupling = first.EfferentCoupling.Avg - second.EfferentCoupling.Avg + comparaison.AverageInstability = first.Instability.Avg - second.Instability.Avg comparaison.CommitCountForPeriod = first.CommitCountForPeriod - second.CommitCountForPeriod comparaison.CommittedFilesCountForPeriod = first.CommittedFilesCountForPeriod - second.CommittedFilesCountForPeriod comparaison.BusFactor = first.BusFactor - second.BusFactor @@ -224,30 +223,30 @@ func (c *Comparator) Compare(first Aggregated, second Aggregated) Comparaison { // Halstead if file.Stmts.Analyze.Volume != nil && file.Stmts.Analyze.Volume.HalsteadDifficulty != nil && file2.Stmts.Analyze.Volume != nil && file2.Stmts.Analyze.Volume.HalsteadDifficulty != nil { - change.Comparaison.AverageHalsteadDifficulty = float64(*file.Stmts.Analyze.Volume.HalsteadDifficulty) - float64(*file2.Stmts.Analyze.Volume.HalsteadDifficulty) - change.Comparaison.AverageHalsteadEffort = float64(*file.Stmts.Analyze.Volume.HalsteadEffort) - float64(*file2.Stmts.Analyze.Volume.HalsteadEffort) - change.Comparaison.AverageHalsteadVolume = float64(*file.Stmts.Analyze.Volume.HalsteadVolume) - float64(*file2.Stmts.Analyze.Volume.HalsteadVolume) - change.Comparaison.AverageHalsteadTime = float64(*file.Stmts.Analyze.Volume.HalsteadTime) - float64(*file2.Stmts.Analyze.Volume.HalsteadTime) + change.Comparaison.AverageHalsteadDifficulty = *file.Stmts.Analyze.Volume.HalsteadDifficulty - *file2.Stmts.Analyze.Volume.HalsteadDifficulty + change.Comparaison.AverageHalsteadEffort = *file.Stmts.Analyze.Volume.HalsteadEffort - *file2.Stmts.Analyze.Volume.HalsteadEffort + change.Comparaison.AverageHalsteadVolume = *file.Stmts.Analyze.Volume.HalsteadVolume - *file2.Stmts.Analyze.Volume.HalsteadVolume + change.Comparaison.AverageHalsteadTime = *file.Stmts.Analyze.Volume.HalsteadTime - *file2.Stmts.Analyze.Volume.HalsteadTime } // Maintainability index if file.Stmts.Analyze.Maintainability != nil && file2.Stmts.Analyze.Maintainability != nil && file.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil && file2.Stmts.Analyze.Maintainability.MaintainabilityIndex != nil { - change.Comparaison.AverageMI = float64(*file.Stmts.Analyze.Maintainability.MaintainabilityIndex) - float64(*file2.Stmts.Analyze.Maintainability.MaintainabilityIndex) + change.Comparaison.AverageMI = *file.Stmts.Analyze.Maintainability.MaintainabilityIndex - *file2.Stmts.Analyze.Maintainability.MaintainabilityIndex } if file.Stmts.Analyze.Maintainability != nil && file2.Stmts.Analyze.Maintainability != nil && file.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil && file2.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments != nil { - change.Comparaison.AverageMIwoc = float64(*file.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - float64(*file2.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) + change.Comparaison.AverageMIwoc = *file.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments - *file2.Stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments } // Coupling if file.Stmts.Analyze.Coupling != nil && file2.Stmts.Analyze.Coupling != nil { change.Comparaison.AverageAfferentCoupling = float64(file.Stmts.Analyze.Coupling.Afferent) - float64(file2.Stmts.Analyze.Coupling.Afferent) change.Comparaison.AverageEfferentCoupling = float64(file.Stmts.Analyze.Coupling.Efferent) - float64(file2.Stmts.Analyze.Coupling.Efferent) - change.Comparaison.AverageInstability = float64(file.Stmts.Analyze.Coupling.Instability) - float64(file2.Stmts.Analyze.Coupling.Instability) + change.Comparaison.AverageInstability = file.Stmts.Analyze.Coupling.Instability - file2.Stmts.Analyze.Coupling.Instability } // Risk if file.Stmts.Analyze.Risk != nil && file2.Stmts.Analyze.Risk != nil { - change.Comparaison.Risk = float64(file.Stmts.Analyze.Risk.Score) - float64(file2.Stmts.Analyze.Risk.Score) + change.Comparaison.Risk = file.Stmts.Analyze.Risk.Score - file2.Stmts.Analyze.Risk.Score // check if not NaN if change.Comparaison.Risk != change.Comparaison.Risk { change.Comparaison.Risk = 0 diff --git a/src/Analyzer/Component/MaintainabilityIndexVisitor.go b/src/Analyzer/Component/MaintainabilityIndexVisitor.go index 67406b0..9435978 100644 --- a/src/Analyzer/Component/MaintainabilityIndexVisitor.go +++ b/src/Analyzer/Component/MaintainabilityIndexVisitor.go @@ -65,24 +65,24 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { var lloc int32 = *stmts.Analyze.Volume.Lloc var cloc int32 = *stmts.Analyze.Volume.Cloc var cyclomatic int32 = *stmts.Analyze.Complexity.Cyclomatic - var halsteadVolume float32 = *stmts.Analyze.Volume.HalsteadVolume + var halsteadVolume float64 = *stmts.Analyze.Volume.HalsteadVolume var MIwoC float64 = 0 var MI float64 = 0 var commentWeight float64 = 0 // // maintainability index without comment - MIwoC = max((171- + MIwoC = float64(math.Max((171- (5.2*math.Log(float64(halsteadVolume)))- (0.23*float64(cyclomatic))- - (16.2*math.Log(float64(lloc))))*100/171, 0) + (16.2*math.Log(float64(lloc))))*100/171, 0)) - if math.IsInf(MIwoC, 0) { + if math.IsInf(float64(MIwoC), 0) { MIwoC = 171 } if loc > 0 { CM := float64(cloc) / float64(loc) - commentWeight = 50 * math.Sin(math.Sqrt(2.4*CM)) + commentWeight = float64(50 * math.Sin(math.Sqrt(2.4*CM))) } MI = MIwoC + commentWeight @@ -94,9 +94,9 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { commentWeight = 0 } - MI32 := float32(MI) - MIwoC32 := float32(MIwoC) - commentWeight32 := float32(commentWeight) + MI32 := float64(MI) + MIwoC32 := float64(MIwoC) + commentWeight32 := float64(commentWeight) if stmts.Analyze.Maintainability == nil { stmts.Analyze.Maintainability = &pb.Maintainability{} @@ -104,7 +104,7 @@ func (v *MaintainabilityIndexVisitor) Calculate(stmts *pb.Stmts) { if loc == 0 { // when class has no code - MI32 = float32(171) + MI32 = float64(171) } stmts.Analyze.Maintainability.MaintainabilityIndex = &MI32 diff --git a/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go b/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go index 444c44f..fbfdad3 100644 --- a/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go +++ b/src/Analyzer/Component/MaintainabilityIndexVisitor_test.go @@ -1,50 +1,51 @@ package Analyzer import ( - "testing" - pb "github.com/halleck45/ast-metrics/src/NodeType" + "testing" + + pb "github.com/halleck45/ast-metrics/src/NodeType" ) func TestItCalculateMaintainabilityIndex(t *testing.T) { - visitor := MaintainabilityIndexVisitor{} + visitor := MaintainabilityIndexVisitor{} - stmts := pb.Stmts{} - class1 := pb.StmtClass{} - class1.Stmts = &pb.Stmts{} - stmts.StmtClass = append(stmts.StmtClass, &class1) + stmts := pb.Stmts{} + class1 := pb.StmtClass{} + class1.Stmts = &pb.Stmts{} + stmts.StmtClass = append(stmts.StmtClass, &class1) - stmts.Analyze = &pb.Analyze{} - stmts.Analyze.Volume = &pb.Volume{} + stmts.Analyze = &pb.Analyze{} + stmts.Analyze.Volume = &pb.Volume{} - loc := int32(10) - lloc := int32(8) - cloc := int32(2) - cyclomatic := int32(3) - halsteadVolume := float32(10) + loc := int32(10) + lloc := int32(8) + cloc := int32(2) + cyclomatic := int32(3) + halsteadVolume := float64(10) - stmts.Analyze.Volume.Loc = &loc - stmts.Analyze.Volume.Lloc = &lloc - stmts.Analyze.Volume.Cloc = &cloc - stmts.Analyze.Complexity = &pb.Complexity{} - stmts.Analyze.Complexity.Cyclomatic = &cyclomatic - stmts.Analyze.Volume.HalsteadVolume = &halsteadVolume + stmts.Analyze.Volume.Loc = &loc + stmts.Analyze.Volume.Lloc = &lloc + stmts.Analyze.Volume.Cloc = &cloc + stmts.Analyze.Complexity = &pb.Complexity{} + stmts.Analyze.Complexity.Cyclomatic = &cyclomatic + stmts.Analyze.Volume.HalsteadVolume = &halsteadVolume - visitor.Calculate(&stmts) + visitor.Calculate(&stmts) - MI := int(*stmts.Analyze.Maintainability.MaintainabilityIndex) - MIwoc := int(*stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) - commentWeight := int(*stmts.Analyze.Maintainability.CommentWeight) + MI := int(*stmts.Analyze.Maintainability.MaintainabilityIndex) + MIwoc := int(*stmts.Analyze.Maintainability.MaintainabilityIndexWithoutComments) + commentWeight := int(*stmts.Analyze.Maintainability.CommentWeight) - if MI != 104 { - t.Error("Expected 104, got ", MI) - } + if MI != 104 { + t.Error("Expected 104, got ", MI) + } - if MIwoc != 72 { - t.Error("Expected 72, got ", MIwoc) - } + if MIwoc != 72 { + t.Error("Expected 72, got ", MIwoc) + } - if commentWeight != 31 { - t.Error("Expected 31, got ", commentWeight) - } -} \ No newline at end of file + if commentWeight != 31 { + t.Error("Expected 31, got ", commentWeight) + } +} diff --git a/src/Analyzer/RiskAnalyzer.go b/src/Analyzer/RiskAnalyzer.go index 8f04049..94cab36 100644 --- a/src/Analyzer/RiskAnalyzer.go +++ b/src/Analyzer/RiskAnalyzer.go @@ -16,9 +16,9 @@ func NewRiskAnalyzer() *RiskAnalyzer { func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { - maxComplexity := 0.0 - maxCyclomatic := 0.0 - maxCommits := 0.0 + var maxComplexity float64 = 0 + var maxCyclomatic int32 = 0 + var maxCommits int = 0 // get bounds for _, file := range project.Combined.ConcernedFiles { @@ -32,20 +32,20 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { // OOP file for _, class := range classes { - maintainability := float64(128 - *class.Stmts.Analyze.Maintainability.MaintainabilityIndex) + maintainability := 128 - *class.Stmts.Analyze.Maintainability.MaintainabilityIndex if maintainability > maxComplexity { maxComplexity = maintainability } } // all files (procedural and OOP) - cyclomatic := float64(*file.Stmts.Analyze.Complexity.Cyclomatic) + cyclomatic := *file.Stmts.Analyze.Complexity.Cyclomatic if cyclomatic > maxCyclomatic { maxCyclomatic = cyclomatic } - if float64(len(commits)) > maxCommits { - maxCommits = float64(len(commits)) + if len(commits) > maxCommits { + maxCommits = len(commits) } } @@ -53,7 +53,7 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { for _, file := range project.Combined.ConcernedFiles { if file.Stmts.Analyze.Risk == nil { - file.Stmts.Analyze.Risk = &pb.Risk{Score: float32(0)} + file.Stmts.Analyze.Risk = &pb.Risk{Score: float64(0)} } nbCommits := 0 @@ -68,8 +68,8 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { continue } - risk := v.GetRisk(maxCommits, maxComplexity, nbCommits, int(128-*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) - file.Stmts.Analyze.Risk.Score += float32(risk) + risk := v.GetRisk(int32(maxCommits), maxComplexity, nbCommits, int(128-*class.Stmts.Analyze.Maintainability.MaintainabilityIndex)) + file.Stmts.Analyze.Risk.Score += float64(risk) } // Procedural file. We put risk on the file itself, according to the cyclomatic complexity. @@ -77,27 +77,27 @@ func (v *RiskAnalyzer) Analyze(project ProjectAggregated) { continue } - cyclo := float64(*file.Stmts.Analyze.Complexity.Cyclomatic) - risk := v.GetRisk(maxCommits, maxCyclomatic, nbCommits, int(cyclo)) - file.Stmts.Analyze.Risk.Score += float32(risk) + cyclo := *file.Stmts.Analyze.Complexity.Cyclomatic + risk := v.GetRisk(int32(maxCommits), float64(maxCyclomatic), nbCommits, int(cyclo)) + file.Stmts.Analyze.Risk.Score += float64(risk) } } -func (v *RiskAnalyzer) GetRisk(maxCommits float64, maxComplexity float64, nbCommits int, complexity int) float32 { +func (v *RiskAnalyzer) GetRisk(maxCommits int32, maxComplexity float64, nbCommits int, complexity int) float64 { // Calculate the horizontal and vertical distance from the "top right" corner. - horizontalDistance := maxCommits - float64(nbCommits) + horizontalDistance := float64(maxCommits) - float64(nbCommits) verticalDistance := maxComplexity - float64(complexity) // Normalize these values over time, we first divide by the maximum values, to always end up with distances between 0 and 1. - normalizedHorizontalDistance := horizontalDistance / maxCommits + normalizedHorizontalDistance := horizontalDistance / float64(maxCommits) normalizedVerticalDistance := verticalDistance / maxComplexity // Calculate the distance of this class from the "top right" corner, using the simple formula A^2 + B^2 = C^2; or: C = sqrt(A^2 + B^2)). - distanceFromTopRightCorner := math.Sqrt(math.Pow(normalizedHorizontalDistance, 2) + math.Pow(normalizedVerticalDistance, 2)) + distanceFromTopRightCorner := math.Sqrt(math.Pow(float64(normalizedHorizontalDistance), 2) + math.Pow(float64(normalizedVerticalDistance), 2)) // The resulting value will be between 0 and sqrt(2). A short distance is bad, so in order to end up with a high score, we invert the value by subtracting it from 1. risk := 1 - distanceFromTopRightCorner - return float32(risk) + return float64(risk) } diff --git a/src/Analyzer/Volume/HalsteadMetricsVisitor.go b/src/Analyzer/Volume/HalsteadMetricsVisitor.go index c76a13e..d23c8b2 100644 --- a/src/Analyzer/Volume/HalsteadMetricsVisitor.go +++ b/src/Analyzer/Volume/HalsteadMetricsVisitor.go @@ -74,20 +74,20 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { N = int32(N1 + N2) // Calculate estimated program length (𝑁̂) - hatN = float64(n1)*math.Log2(float64(n1)) + float64(n2)*math.Log2(float64(n2)) - if math.IsNaN(hatN) { + hatN = float64(n1)*float64(math.Log2(float64(n1))) + float64(n2)*float64(math.Log2(float64(n2))) + if math.IsNaN(float64(hatN)) { hatN = 0 } // Calculate volume (V) - V = float64(N) * math.Log2(float64(n)) - if math.IsNaN(V) { + V = float64(N) * float64(math.Log2(float64(n))) + if math.IsNaN(float64(V)) { V = 0 } // Calculate difficulty (D) D = float64(n1) / 2 * float64(N2) / float64(n2) - if math.IsNaN(D) { + if math.IsNaN(float64(D)) { D = 0 } @@ -97,13 +97,6 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { // Calculate time required to program (T) T = E / 18 - // convert float to float32 - V32 := float32(V) - hatN32 := float32(hatN) - D32 := float32(D) - E32 := float32(E) - T32 := float32(T) - // Assign to result if stmt.Stmts.Analyze == nil { stmt.Stmts.Analyze = &pb.Analyze{} @@ -112,11 +105,11 @@ func (v *HalsteadMetricsVisitor) Visit(stmts *pb.Stmts, parents *pb.Stmts) { stmt.Stmts.Analyze.Volume.HalsteadVocabulary = &n stmt.Stmts.Analyze.Volume.HalsteadLength = &N - stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN32 - stmt.Stmts.Analyze.Volume.HalsteadVolume = &V32 - stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D32 - stmt.Stmts.Analyze.Volume.HalsteadEffort = &E32 - stmt.Stmts.Analyze.Volume.HalsteadTime = &T32 + stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN + stmt.Stmts.Analyze.Volume.HalsteadVolume = &V + stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D + stmt.Stmts.Analyze.Volume.HalsteadEffort = &E + stmt.Stmts.Analyze.Volume.HalsteadTime = &T } } @@ -155,11 +148,11 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { } n += int32(*method.Stmts.Analyze.Volume.HalsteadVocabulary) N += int32(*method.Stmts.Analyze.Volume.HalsteadLength) - hatN += float64(*method.Stmts.Analyze.Volume.HalsteadEstimatedLength) - V += float64(*method.Stmts.Analyze.Volume.HalsteadVolume) - D += float64(*method.Stmts.Analyze.Volume.HalsteadDifficulty) - E += float64(*method.Stmts.Analyze.Volume.HalsteadEffort) - T += float64(*method.Stmts.Analyze.Volume.HalsteadTime) + hatN += *method.Stmts.Analyze.Volume.HalsteadEstimatedLength + V += *method.Stmts.Analyze.Volume.HalsteadVolume + D += *method.Stmts.Analyze.Volume.HalsteadDifficulty + E += *method.Stmts.Analyze.Volume.HalsteadEffort + T += *method.Stmts.Analyze.Volume.HalsteadTime } } @@ -174,13 +167,6 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { T = T / float64(len(stmt.Stmts.StmtFunction)) } - // convert float to float32 - V32 := float32(V) - hatN32 := float32(hatN) - D32 := float32(D) - E32 := float32(E) - T32 := float32(T) - // Assign to result if stmt.Stmts.Analyze == nil { stmt.Stmts.Analyze = &pb.Analyze{} @@ -191,11 +177,11 @@ func (v *HalsteadMetricsVisitor) LeaveNode(stmts *pb.Stmts) { stmt.Stmts.Analyze.Volume.HalsteadVocabulary = &n stmt.Stmts.Analyze.Volume.HalsteadLength = &N - stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN32 - stmt.Stmts.Analyze.Volume.HalsteadVolume = &V32 - stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D32 - stmt.Stmts.Analyze.Volume.HalsteadEffort = &E32 - stmt.Stmts.Analyze.Volume.HalsteadTime = &T32 + stmt.Stmts.Analyze.Volume.HalsteadEstimatedLength = &hatN + stmt.Stmts.Analyze.Volume.HalsteadVolume = &V + stmt.Stmts.Analyze.Volume.HalsteadDifficulty = &D + stmt.Stmts.Analyze.Volume.HalsteadEffort = &E + stmt.Stmts.Analyze.Volume.HalsteadTime = &T } } } diff --git a/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go b/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go index 43aa78d..affa79d 100644 --- a/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go +++ b/src/Analyzer/Volume/HalsteadMetricsVisitor_test.go @@ -92,15 +92,15 @@ func TestHalsteadMetricsVisitor(t *testing.T) { t.Errorf("Expected 4, got %d", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadLength) } - if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float32(4.754887502163469) { + if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float64(4.754887502163469) { t.Errorf("Expected 4.754887502163469, got %f", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadEstimatedLength) } - if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadVolume != float32(8) { + if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadVolume != float64(8) { t.Errorf("Expected 8, got %f", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadVolume) } - if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadDifficulty != float32(1.5) { + if *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadDifficulty != float64(1.5) { t.Errorf("Expected 1.5, got %f", *pbFile.Stmts.StmtFunction[0].Stmts.Analyze.Volume.HalsteadDifficulty) } } @@ -119,11 +119,11 @@ func TestHalsteadMetricsVisitor_LeaveNode(t *testing.T) { Volume: &pb.Volume{ HalsteadVocabulary: proto.Int32(2), HalsteadLength: proto.Int32(2), - HalsteadEstimatedLength: proto.Float32(2.5), - HalsteadVolume: proto.Float32(2.5), - HalsteadDifficulty: proto.Float32(2.5), - HalsteadEffort: proto.Float32(2.5), - HalsteadTime: proto.Float32(2.5), + HalsteadEstimatedLength: proto.Float64(2.5), + HalsteadVolume: proto.Float64(2.5), + HalsteadDifficulty: proto.Float64(2.5), + HalsteadEffort: proto.Float64(2.5), + HalsteadTime: proto.Float64(2.5), }, }, }, @@ -134,11 +134,11 @@ func TestHalsteadMetricsVisitor_LeaveNode(t *testing.T) { Volume: &pb.Volume{ HalsteadVocabulary: proto.Int32(4), HalsteadLength: proto.Int32(4), - HalsteadEstimatedLength: proto.Float32(4.5), - HalsteadVolume: proto.Float32(4.5), - HalsteadDifficulty: proto.Float32(4.5), - HalsteadEffort: proto.Float32(4.5), - HalsteadTime: proto.Float32(4.5), + HalsteadEstimatedLength: proto.Float64(4.5), + HalsteadVolume: proto.Float64(4.5), + HalsteadDifficulty: proto.Float64(4.5), + HalsteadEffort: proto.Float64(4.5), + HalsteadTime: proto.Float64(4.5), }, }, }, @@ -159,23 +159,23 @@ func TestHalsteadMetricsVisitor_LeaveNode(t *testing.T) { t.Errorf("Expected 3, got %d", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadLength) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEstimatedLength != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEstimatedLength) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadVolume != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadVolume != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadVolume) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadDifficulty != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadDifficulty != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadDifficulty) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEffort != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEffort != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadEffort) } - if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadTime != float32(3.5) { + if *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadTime != float64(3.5) { t.Errorf("Expected 3.5, got %f", *stmts.StmtClass[0].Stmts.Analyze.Volume.HalsteadTime) } } diff --git a/src/Cli/ComponentClassTable.go b/src/Cli/ComponentClassTable.go index bcc84d7..e5aeab2 100644 --- a/src/Cli/ComponentClassTable.go +++ b/src/Cli/ComponentClassTable.go @@ -150,7 +150,7 @@ func (v *ComponentTableClass) Init() { strconv.Itoa(int(*class.Stmts.Analyze.Volume.Loc)), strconv.Itoa(int(*class.Stmts.Analyze.Complexity.Cyclomatic)), strconv.Itoa(int(*class.Stmts.Analyze.Volume.HalsteadLength)), - fmt.Sprintf("%.2f", ToFixed(float64(*class.Stmts.Analyze.Volume.HalsteadVolume), 2)), + fmt.Sprintf("%.2f", ToFixed(*class.Stmts.Analyze.Volume.HalsteadVolume, 2)), DecorateMaintainabilityIndex(int(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex), class.Stmts.Analyze), }) } @@ -171,7 +171,7 @@ func (v *ComponentTableClass) Init() { strconv.Itoa(int(*class.Stmts.Analyze.Volume.Loc)), strconv.Itoa(int(*class.Stmts.Analyze.Complexity.Cyclomatic)), strconv.Itoa(int(*class.Stmts.Analyze.Volume.HalsteadLength)), - fmt.Sprintf("%.2f", ToFixed(float64(*class.Stmts.Analyze.Volume.HalsteadVolume), 2)), + fmt.Sprintf("%.2f", ToFixed(*class.Stmts.Analyze.Volume.HalsteadVolume, 2)), DecorateMaintainabilityIndex(int(*class.Stmts.Analyze.Maintainability.MaintainabilityIndex), class.Stmts.Analyze), }) } diff --git a/src/Cli/ComponentFileTable.go b/src/Cli/ComponentFileTable.go index 98b7c07..bfebd09 100644 --- a/src/Cli/ComponentFileTable.go +++ b/src/Cli/ComponentFileTable.go @@ -116,9 +116,9 @@ func (v *ComponentFileTable) Init() { cyclo = int(*file.Stmts.Analyze.Complexity.Cyclomatic) } - risk := float32(0.0) + risk := float64(0.0) if file.Stmts != nil && file.Stmts.Analyze != nil && file.Stmts.Analyze.Risk != nil { - risk = float32(file.Stmts.Analyze.Risk.Score) + risk = float64(file.Stmts.Analyze.Risk.Score) } // truncate filename, but to the left diff --git a/src/Cli/ComponentStatisticsOverview.go b/src/Cli/ComponentStatisticsOverview.go index 8cdf2a4..26b1c2a 100644 --- a/src/Cli/ComponentStatisticsOverview.go +++ b/src/Cli/ComponentStatisticsOverview.go @@ -31,7 +31,7 @@ func (v *ComponentStatisticsOverview) Render() string { Files: v.files, } boxCcn := StyleNumberBox( - fmt.Sprintf("%.2f", v.aggregated.AverageCyclomaticComplexityPerMethod), + fmt.Sprintf("%.2f", v.aggregated.CyclomaticComplexityPerMethod.Avg), "Cycl. complexity per method", chartRepartitionCyclomatic.AsTerminalElement(), ) @@ -42,7 +42,7 @@ func (v *ComponentStatisticsOverview) Render() string { Files: v.files, } boxMethods := StyleNumberBox( - fmt.Sprintf("%.2f", v.aggregated.AverageLocPerMethod), + fmt.Sprintf("%.2f", v.aggregated.LocPerMethod.Avg), "Average LOC per method", chartRepartitionLocByMethod.AsTerminalElement()+" ", ) @@ -53,7 +53,7 @@ func (v *ComponentStatisticsOverview) Render() string { Files: v.files, } boxMaintainability := StyleNumberBox( - DecorateMaintainabilityIndex(int(v.aggregated.AverageMI), nil), + DecorateMaintainabilityIndex(int(v.aggregated.MaintainabilityIndex.Avg), nil), "Maintainability index", chartRepartitionMI.AsTerminalElement(), ) diff --git a/src/Cli/ComponentTableClass_test.go b/src/Cli/ComponentTableClass_test.go index 50f1b76..39ce6f2 100644 --- a/src/Cli/ComponentTableClass_test.go +++ b/src/Cli/ComponentTableClass_test.go @@ -41,11 +41,11 @@ func TestNewComponentTableClass(t *testing.T) { func TestComponentTableClass_Render(t *testing.T) { - mi := float32(120) + mi := float64(120) ccn := int32(5) loc := int32(100) halsteadLength := int32(100) - halsteadVolume := float32(100) + halsteadVolume := float64(100) files := []*pb.File{ { @@ -89,14 +89,14 @@ func TestComponentTableClass_Render(t *testing.T) { func TestComponentTableClass_Sort(t *testing.T) { // class 1 - mi1 := float32(120) + mi1 := float64(120) ccn1 := int32(5) loc1 := int32(100) halsteadLength1 := int32(5) - halsteadVolume := float32(7) + halsteadVolume := float64(7) // class 2 - mi2 := float32(110) + mi2 := float64(110) ccn2 := int32(10) loc2 := int32(80) halsteadLength2 := int32(7) diff --git a/src/Cli/ScreenSummary.go b/src/Cli/ScreenSummary.go index c1e5343..256598a 100644 --- a/src/Cli/ScreenSummary.go +++ b/src/Cli/ScreenSummary.go @@ -82,10 +82,10 @@ func (m modelScreenSummary) View() string { | Min | Max | Average per class | Average per method | | --- | --- | --- | --- | | ` + - strconv.Itoa(combined.MinCyclomaticComplexity) + - ` | ` + strconv.Itoa(combined.MaxCyclomaticComplexity) + - ` | ` + fmt.Sprintf("%.2f", combined.AverageCyclomaticComplexityPerClass) + - ` | ` + fmt.Sprintf("%.2f", combined.AverageCyclomaticComplexityPerMethod) + + strconv.Itoa(int(combined.CyclomaticComplexityPerMethod.Min)) + + ` | ` + strconv.Itoa(int(combined.CyclomaticComplexityPerMethod.Max)) + + ` | ` + fmt.Sprintf("%.2f", combined.CyclomaticComplexityPerClass.Avg) + + ` | ` + fmt.Sprintf("%.2f", combined.CyclomaticComplexityPerMethod.Avg) + ` | ### Classes and methods @@ -94,8 +94,8 @@ func (m modelScreenSummary) View() string { | --- | --- | --- | --- |` + "\n" + ` | ` + strconv.Itoa(aggregatedByClass.NbClasses) + ` | ` + strconv.Itoa(combined.NbMethods) + - ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.AverageMethodsPerClass) + - ` | ` + fmt.Sprintf("%.2f", combined.AverageLocPerMethod) + + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.MethodsPerClass.Avg) + + ` | ` + fmt.Sprintf("%.2f", combined.LocPerMethod.Avg) + ` | ## Maintainability @@ -105,7 +105,7 @@ func (m modelScreenSummary) View() string { | Maintainability index | MI without comments | Comment weight | | --- | --- | --- | - | ` + DecorateMaintainabilityIndex(int(aggregatedByClass.AverageMI), nil) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.AverageMIwoc) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.AverageMIcw) + ` | + | ` + DecorateMaintainabilityIndex(int(aggregatedByClass.MaintainabilityIndex.Avg), nil) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.MaintainabilityIndexWithoutComments.Avg) + ` | ` + fmt.Sprintf("%.2f", aggregatedByClass.MaintainabilityCommentWeight.Avg) + ` | ` out, _ := glamour.Render(in, "dark") diff --git a/src/Cli/Styles.go b/src/Cli/Styles.go index 72e3d5b..f476476 100644 --- a/src/Cli/Styles.go +++ b/src/Cli/Styles.go @@ -126,10 +126,10 @@ func DecorateMaintainabilityIndex(mi int, analyze *pb.Analyze) string { } func Round(num float64) int { - return int(num + math.Copysign(0.5, num)) + return int(num + float64(math.Copysign(0.5, float64(num)))) } func ToFixed(num float64, precision int) float64 { output := math.Pow(10, float64(precision)) - return float64(Round(num*output)) / output + return float64(Round(num*float64(output))) / float64(output) } diff --git a/src/Engine/NodeTypeEnsurer.go b/src/Engine/NodeTypeEnsurer.go new file mode 100644 index 0000000..1140a05 --- /dev/null +++ b/src/Engine/NodeTypeEnsurer.go @@ -0,0 +1,38 @@ +package Engine + +import ( + pb "github.com/halleck45/ast-metrics/src/NodeType" +) + +func EnsureNodeTypeIsComplete(file *pb.File) { + + if file.Stmts.Analyze == nil { + file.Stmts.Analyze = &pb.Analyze{} + } + + if file.LinesOfCode == nil && file.Stmts.Analyze.Volume != nil { + file.LinesOfCode = &pb.LinesOfCode{ + LinesOfCode: *file.Stmts.Analyze.Volume.Loc, + CommentLinesOfCode: *file.Stmts.Analyze.Volume.Cloc, + LogicalLinesOfCode: *file.Stmts.Analyze.Volume.Lloc, + } + } + + if file.Stmts.Analyze == nil { + file.Stmts.Analyze = &pb.Analyze{} + } + + if file.Stmts.Analyze.Complexity == nil { + zero := int32(0) + file.Stmts.Analyze.Complexity = &pb.Complexity{ + Cyclomatic: &zero, + } + } + + if file.Stmts.Analyze.Coupling == nil { + file.Stmts.Analyze.Coupling = &pb.Coupling{ + Afferent: 0, + Efferent: 0, + } + } +} diff --git a/src/NodeType/NodeType.pb.go b/src/NodeType/NodeType.pb.go index 514b32a..b6043ea 100644 --- a/src/NodeType/NodeType.pb.go +++ b/src/NodeType/NodeType.pb.go @@ -1767,11 +1767,11 @@ type Volume struct { Cloc *int32 `protobuf:"varint,3,opt,name=cloc,proto3,oneof" json:"cloc,omitempty"` HalsteadVocabulary *int32 `protobuf:"varint,4,opt,name=halsteadVocabulary,proto3,oneof" json:"halsteadVocabulary,omitempty"` HalsteadLength *int32 `protobuf:"varint,5,opt,name=halsteadLength,proto3,oneof" json:"halsteadLength,omitempty"` - HalsteadVolume *float32 `protobuf:"fixed32,6,opt,name=halsteadVolume,proto3,oneof" json:"halsteadVolume,omitempty"` - HalsteadDifficulty *float32 `protobuf:"fixed32,7,opt,name=halsteadDifficulty,proto3,oneof" json:"halsteadDifficulty,omitempty"` - HalsteadEffort *float32 `protobuf:"fixed32,8,opt,name=halsteadEffort,proto3,oneof" json:"halsteadEffort,omitempty"` - HalsteadTime *float32 `protobuf:"fixed32,9,opt,name=halsteadTime,proto3,oneof" json:"halsteadTime,omitempty"` - HalsteadEstimatedLength *float32 `protobuf:"fixed32,10,opt,name=halsteadEstimatedLength,proto3,oneof" json:"halsteadEstimatedLength,omitempty"` + HalsteadVolume *float64 `protobuf:"fixed64,6,opt,name=halsteadVolume,proto3,oneof" json:"halsteadVolume,omitempty"` + HalsteadDifficulty *float64 `protobuf:"fixed64,7,opt,name=halsteadDifficulty,proto3,oneof" json:"halsteadDifficulty,omitempty"` + HalsteadEffort *float64 `protobuf:"fixed64,8,opt,name=halsteadEffort,proto3,oneof" json:"halsteadEffort,omitempty"` + HalsteadTime *float64 `protobuf:"fixed64,9,opt,name=halsteadTime,proto3,oneof" json:"halsteadTime,omitempty"` + HalsteadEstimatedLength *float64 `protobuf:"fixed64,10,opt,name=halsteadEstimatedLength,proto3,oneof" json:"halsteadEstimatedLength,omitempty"` } func (x *Volume) Reset() { @@ -1841,35 +1841,35 @@ func (x *Volume) GetHalsteadLength() int32 { return 0 } -func (x *Volume) GetHalsteadVolume() float32 { +func (x *Volume) GetHalsteadVolume() float64 { if x != nil && x.HalsteadVolume != nil { return *x.HalsteadVolume } return 0 } -func (x *Volume) GetHalsteadDifficulty() float32 { +func (x *Volume) GetHalsteadDifficulty() float64 { if x != nil && x.HalsteadDifficulty != nil { return *x.HalsteadDifficulty } return 0 } -func (x *Volume) GetHalsteadEffort() float32 { +func (x *Volume) GetHalsteadEffort() float64 { if x != nil && x.HalsteadEffort != nil { return *x.HalsteadEffort } return 0 } -func (x *Volume) GetHalsteadTime() float32 { +func (x *Volume) GetHalsteadTime() float64 { if x != nil && x.HalsteadTime != nil { return *x.HalsteadTime } return 0 } -func (x *Volume) GetHalsteadEstimatedLength() float32 { +func (x *Volume) GetHalsteadEstimatedLength() float64 { if x != nil && x.HalsteadEstimatedLength != nil { return *x.HalsteadEstimatedLength } @@ -1881,9 +1881,9 @@ type Maintainability struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - MaintainabilityIndex *float32 `protobuf:"fixed32,1,opt,name=maintainabilityIndex,proto3,oneof" json:"maintainabilityIndex,omitempty"` - MaintainabilityIndexWithoutComments *float32 `protobuf:"fixed32,2,opt,name=maintainabilityIndexWithoutComments,proto3,oneof" json:"maintainabilityIndexWithoutComments,omitempty"` - CommentWeight *float32 `protobuf:"fixed32,3,opt,name=commentWeight,proto3,oneof" json:"commentWeight,omitempty"` + MaintainabilityIndex *float64 `protobuf:"fixed64,1,opt,name=maintainabilityIndex,proto3,oneof" json:"maintainabilityIndex,omitempty"` + MaintainabilityIndexWithoutComments *float64 `protobuf:"fixed64,2,opt,name=maintainabilityIndexWithoutComments,proto3,oneof" json:"maintainabilityIndexWithoutComments,omitempty"` + CommentWeight *float64 `protobuf:"fixed64,3,opt,name=commentWeight,proto3,oneof" json:"commentWeight,omitempty"` } func (x *Maintainability) Reset() { @@ -1918,21 +1918,21 @@ func (*Maintainability) Descriptor() ([]byte, []int) { return file_proto_NodeType_proto_rawDescGZIP(), []int{25} } -func (x *Maintainability) GetMaintainabilityIndex() float32 { +func (x *Maintainability) GetMaintainabilityIndex() float64 { if x != nil && x.MaintainabilityIndex != nil { return *x.MaintainabilityIndex } return 0 } -func (x *Maintainability) GetMaintainabilityIndexWithoutComments() float32 { +func (x *Maintainability) GetMaintainabilityIndexWithoutComments() float64 { if x != nil && x.MaintainabilityIndexWithoutComments != nil { return *x.MaintainabilityIndexWithoutComments } return 0 } -func (x *Maintainability) GetCommentWeight() float32 { +func (x *Maintainability) GetCommentWeight() float64 { if x != nil && x.CommentWeight != nil { return *x.CommentWeight } @@ -2076,7 +2076,7 @@ type Risk struct { sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields - Score float32 `protobuf:"fixed32,1,opt,name=score,proto3" json:"score,omitempty"` // score of risk. Lower is better + Score float64 `protobuf:"fixed64,1,opt,name=score,proto3" json:"score,omitempty"` // score of risk. Lower is better } func (x *Risk) Reset() { @@ -2111,7 +2111,7 @@ func (*Risk) Descriptor() ([]byte, []int) { return file_proto_NodeType_proto_rawDescGZIP(), []int{28} } -func (x *Risk) GetScore() float32 { +func (x *Risk) GetScore() float64 { if x != nil { return x.Score } @@ -2128,7 +2128,7 @@ type Coupling struct { Afferent int32 `protobuf:"varint,1,opt,name=afferent,proto3" json:"afferent,omitempty"` // number of classes that depends on this class Efferent int32 `protobuf:"varint,2,opt,name=efferent,proto3" json:"efferent,omitempty"` // number of classes that this class depends on - Instability float32 `protobuf:"fixed32,3,opt,name=instability,proto3" json:"instability,omitempty"` // instability of the class + Instability float64 `protobuf:"fixed64,3,opt,name=instability,proto3" json:"instability,omitempty"` // instability of the class } func (x *Coupling) Reset() { @@ -2177,7 +2177,7 @@ func (x *Coupling) GetEfferent() int32 { return 0 } -func (x *Coupling) GetInstability() float32 { +func (x *Coupling) GetInstability() float64 { if x != nil { return x.Instability } @@ -2497,19 +2497,19 @@ var file_proto_NodeType_proto_rawDesc = []byte{ 0x18, 0x05, 0x20, 0x01, 0x28, 0x05, 0x48, 0x04, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x88, 0x01, 0x01, 0x12, 0x2b, 0x0a, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x18, 0x06, 0x20, - 0x01, 0x28, 0x02, 0x48, 0x05, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x56, + 0x01, 0x28, 0x01, 0x48, 0x05, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x56, 0x6f, 0x6c, 0x75, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x33, 0x0a, 0x12, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x44, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x18, 0x07, - 0x20, 0x01, 0x28, 0x02, 0x48, 0x06, 0x52, 0x12, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, + 0x20, 0x01, 0x28, 0x01, 0x48, 0x06, 0x52, 0x12, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x44, 0x69, 0x66, 0x66, 0x69, 0x63, 0x75, 0x6c, 0x74, 0x79, 0x88, 0x01, 0x01, 0x12, 0x2b, 0x0a, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x18, - 0x08, 0x20, 0x01, 0x28, 0x02, 0x48, 0x07, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, + 0x08, 0x20, 0x01, 0x28, 0x01, 0x48, 0x07, 0x52, 0x0e, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x66, 0x66, 0x6f, 0x72, 0x74, 0x88, 0x01, 0x01, 0x12, 0x27, 0x0a, 0x0c, 0x68, 0x61, - 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x02, + 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x18, 0x09, 0x20, 0x01, 0x28, 0x01, 0x48, 0x08, 0x52, 0x0c, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x88, 0x01, 0x01, 0x12, 0x3d, 0x0a, 0x17, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x73, 0x74, 0x69, 0x6d, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x0a, - 0x20, 0x01, 0x28, 0x02, 0x48, 0x09, 0x52, 0x17, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, + 0x20, 0x01, 0x28, 0x01, 0x48, 0x09, 0x52, 0x17, 0x68, 0x61, 0x6c, 0x73, 0x74, 0x65, 0x61, 0x64, 0x45, 0x73, 0x74, 0x69, 0x6d, 0x61, 0x74, 0x65, 0x64, 0x4c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x88, 0x01, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x6c, 0x6f, 0x63, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x6c, 0x6c, 0x6f, 0x63, 0x42, 0x07, 0x0a, 0x05, 0x5f, 0x63, 0x6c, 0x6f, 0x63, 0x42, 0x15, 0x0a, 0x13, @@ -2525,15 +2525,15 @@ var file_proto_NodeType_proto_rawDesc = []byte{ 0x22, 0x9f, 0x02, 0x0a, 0x0f, 0x4d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x12, 0x37, 0x0a, 0x14, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x02, 0x48, 0x00, 0x52, 0x14, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, + 0x28, 0x01, 0x48, 0x00, 0x52, 0x14, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x88, 0x01, 0x01, 0x12, 0x55, 0x0a, 0x23, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x57, 0x69, 0x74, 0x68, 0x6f, 0x75, 0x74, 0x43, 0x6f, 0x6d, 0x6d, - 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x02, 0x48, 0x01, 0x52, 0x23, 0x6d, 0x61, + 0x65, 0x6e, 0x74, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x01, 0x48, 0x01, 0x52, 0x23, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x57, 0x69, 0x74, 0x68, 0x6f, 0x75, 0x74, 0x43, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x73, 0x88, 0x01, 0x01, 0x12, 0x29, 0x0a, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x57, - 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x48, 0x02, 0x52, 0x0d, 0x63, + 0x65, 0x69, 0x67, 0x68, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x48, 0x02, 0x52, 0x0d, 0x63, 0x6f, 0x6d, 0x6d, 0x65, 0x6e, 0x74, 0x57, 0x65, 0x69, 0x67, 0x68, 0x74, 0x88, 0x01, 0x01, 0x42, 0x17, 0x0a, 0x15, 0x5f, 0x6d, 0x61, 0x69, 0x6e, 0x74, 0x61, 0x69, 0x6e, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x42, 0x26, 0x0a, 0x24, 0x5f, 0x6d, 0x61, 0x69, @@ -2553,13 +2553,13 @@ var file_proto_NodeType_proto_rawDesc = []byte{ 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x64, 0x61, 0x74, 0x65, 0x22, 0x1c, 0x0a, 0x04, 0x52, 0x69, 0x73, 0x6b, 0x12, 0x14, 0x0a, 0x05, 0x73, 0x63, 0x6f, - 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x02, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x22, + 0x72, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x01, 0x52, 0x05, 0x73, 0x63, 0x6f, 0x72, 0x65, 0x22, 0x64, 0x0a, 0x08, 0x43, 0x6f, 0x75, 0x70, 0x6c, 0x69, 0x6e, 0x67, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x61, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x65, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x65, 0x66, 0x66, 0x65, 0x72, 0x65, 0x6e, 0x74, 0x12, 0x20, 0x0a, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x62, 0x69, 0x6c, 0x69, - 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x02, 0x52, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x62, + 0x74, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x01, 0x52, 0x0b, 0x69, 0x6e, 0x73, 0x74, 0x61, 0x62, 0x69, 0x6c, 0x69, 0x74, 0x79, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x68, 0x61, 0x6c, 0x6c, 0x65, 0x63, 0x6b, 0x34, 0x35, 0x2f, 0x61, 0x73, 0x74, 0x2d, 0x6d, 0x65, 0x74, 0x72, 0x69, 0x63, 0x73, 0x2f, 0x4e, 0x6f, 0x64, 0x65, 0x54, 0x79, diff --git a/src/Pkg/Cleaner/cleaner.go b/src/Pkg/Cleaner/cleaner.go index 9f386a6..77a8209 100644 --- a/src/Pkg/Cleaner/cleaner.go +++ b/src/Pkg/Cleaner/cleaner.go @@ -7,13 +7,13 @@ import ( ) var ( + defaultfloat64 float64 = 0 defaultFloat64 float64 = 0 - defaultFloat32 float32 = 0 ) // The CleanVal removes all NaN values from any value // and sets them to the default float64 value, which is 0. -// For float32 values, it also sets them to 0. +// For float64 values, it also sets them to 0. // // This function accepts a pointer because it needs // to modify the provided value. @@ -60,18 +60,13 @@ func cleanSlice(v reflect.Value) { func cleanField(field reflect.Value) { switch field.Kind() { - case reflect.Float32, reflect.Float64: + case reflect.Float64: f := field.Float() isInvalidAndCanSet := field.CanSet() && (math.IsNaN(f) || math.IsInf(f, 0)) if !isInvalidAndCanSet { return } - switch field.Kind() { - case reflect.Float64: - field.Set(reflect.ValueOf(defaultFloat64)) - case reflect.Float32: - field.Set(reflect.ValueOf(defaultFloat32)) - } + field.Set(reflect.ValueOf(defaultFloat64)) } } diff --git a/src/Report/HtmlReportGenerator.go b/src/Report/HtmlReportGenerator.go index e4c1614..778611e 100644 --- a/src/Report/HtmlReportGenerator.go +++ b/src/Report/HtmlReportGenerator.go @@ -248,6 +248,11 @@ func (v *HtmlReportGenerator) RegisterFilters() { } json = json[:len(json)-1] + "]" + if json == "]" { + // occurs when no relations are found + json = "[]" + } + return pongo2.AsSafeValue(json), nil }) @@ -261,6 +266,13 @@ func (v *HtmlReportGenerator) RegisterFilters() { // Sort by risk of file files := in.Interface().([]*pb.File) sort.Slice(files, func(i, j int) bool { + if files[i].Stmts == nil && files[j].Stmts == nil || files[i].Stmts.Analyze == nil || files[j].Stmts.Analyze == nil { + return false + } + + if files[i].Stmts.Analyze.Risk == nil && files[j].Stmts.Analyze.Risk == nil { + return false + } if files[i].Stmts.Analyze.Risk == nil { return false diff --git a/src/Report/JsonReportGenerator.go b/src/Report/JsonReportGenerator.go index a28de11..2e0e9db 100644 --- a/src/Report/JsonReportGenerator.go +++ b/src/Report/JsonReportGenerator.go @@ -143,36 +143,36 @@ func (j *JsonReportGenerator) buildReport(projectAggregated Analyzer.ProjectAggr r.NbClasses = combined.NbClasses r.NbClassesWithCode = combined.NbClassesWithCode r.NbMethods = combined.NbMethods - r.Loc = combined.Loc - r.Cloc = combined.Cloc - r.Lloc = combined.Lloc - r.AverageMethodsPerClass = combined.AverageMethodsPerClass - r.AverageLocPerMethod = combined.AverageLocPerMethod - r.AverageLlocPerMethod = combined.AverageLlocPerMethod - r.AverageClocPerMethod = combined.AverageClocPerMethod - r.AverageCyclomaticComplexityPerMethod = combined.AverageCyclomaticComplexityPerMethod - r.AverageCyclomaticComplexityPerClass = combined.AverageCyclomaticComplexityPerClass - r.MinCyclomaticComplexity = combined.MinCyclomaticComplexity - r.MaxCyclomaticComplexity = combined.MaxCyclomaticComplexity - r.AverageHalsteadDifficulty = combined.AverageHalsteadDifficulty - r.AverageHalsteadEffort = combined.AverageHalsteadEffort - r.AverageHalsteadVolume = combined.AverageHalsteadVolume - r.AverageHalsteadTime = combined.AverageHalsteadTime - r.AverageHalsteadBugs = combined.AverageHalsteadBugs - r.SumHalsteadDifficulty = combined.SumHalsteadDifficulty - r.SumHalsteadEffort = combined.SumHalsteadEffort - r.SumHalsteadVolume = combined.SumHalsteadVolume - r.SumHalsteadTime = combined.SumHalsteadTime - r.SumHalsteadBugs = combined.SumHalsteadBugs - r.AverageMI = combined.AverageMI - r.AverageMIwoc = combined.AverageMIwoc - r.AverageMIcw = combined.AverageMIcw - r.AverageMIPerMethod = combined.AverageMIPerMethod - r.AverageMIwocPerMethod = combined.AverageMIwocPerMethod - r.AverageMIcwPerMethod = combined.AverageMIcwPerMethod - r.AverageAfferentCoupling = combined.AverageAfferentCoupling - r.AverageEfferentCoupling = combined.AverageEfferentCoupling - r.AverageInstability = combined.AverageInstability + r.Loc = int(combined.Loc.Sum) + r.Cloc = int(combined.Cloc.Sum) + r.Lloc = int(combined.Lloc.Sum) + r.AverageMethodsPerClass = combined.MethodsPerClass.Avg + r.AverageLocPerMethod = combined.LocPerMethod.Avg + r.AverageLlocPerMethod = combined.LlocPerMethod.Avg + r.AverageClocPerMethod = combined.ClocPerMethod.Avg + r.AverageCyclomaticComplexityPerMethod = combined.CyclomaticComplexityPerMethod.Avg + r.AverageCyclomaticComplexityPerClass = combined.CyclomaticComplexityPerClass.Avg + r.MinCyclomaticComplexity = int(combined.CyclomaticComplexityPerMethod.Min) + r.MaxCyclomaticComplexity = int(combined.CyclomaticComplexityPerMethod.Max) + r.AverageHalsteadDifficulty = combined.HalsteadDifficulty.Avg + r.AverageHalsteadEffort = combined.HalsteadEffort.Avg + r.AverageHalsteadVolume = combined.HalsteadVolume.Avg + r.AverageHalsteadTime = combined.HalsteadTime.Avg + r.AverageHalsteadBugs = combined.HalsteadBugs.Avg + r.SumHalsteadDifficulty = combined.HalsteadDifficulty.Sum + r.SumHalsteadEffort = combined.HalsteadEffort.Sum + r.SumHalsteadVolume = combined.HalsteadVolume.Sum + r.SumHalsteadTime = combined.HalsteadTime.Sum + r.SumHalsteadBugs = combined.HalsteadBugs.Sum + r.AverageMI = combined.MaintainabilityIndex.Avg + r.AverageMIwoc = combined.MaintainabilityIndexWithoutComments.Avg + r.AverageMIcw = combined.MaintainabilityCommentWeight.Avg + r.AverageMIPerMethod = combined.MaintainabilityPerMethod.Avg + r.AverageMIwocPerMethod = combined.MaintainabilityCommentWeightPerMethod.Avg + r.AverageMIcwPerMethod = combined.MaintainabilityCommentWeightPerMethod.Avg + r.AverageAfferentCoupling = combined.AfferentCoupling.Avg + r.AverageEfferentCoupling = combined.EfferentCoupling.Avg + r.AverageInstability = combined.Instability.Avg r.CommitCountForPeriod = combined.CommitCountForPeriod r.CommittedFilesCountForPeriod = combined.CommittedFilesCountForPeriod r.BusFactor = combined.BusFactor diff --git a/src/Report/OpenMetricsGenerator_test.go b/src/Report/OpenMetricsGenerator_test.go index 6e785a9..b3d2956 100644 --- a/src/Report/OpenMetricsGenerator_test.go +++ b/src/Report/OpenMetricsGenerator_test.go @@ -46,8 +46,8 @@ func TestGenerateOpenMetricsReports(t *testing.T) { Cloc: proto.Int32(20), }, Maintainability: &pb.Maintainability{ - MaintainabilityIndex: proto.Float32(75.5), - MaintainabilityIndexWithoutComments: proto.Float32(70.0), + MaintainabilityIndex: proto.Float64(75.5), + MaintainabilityIndexWithoutComments: proto.Float64(70.0), }, Coupling: &pb.Coupling{ Afferent: *proto.Int32(5), diff --git a/src/Report/templates/html/componentChartRadiusBarAfferent.html b/src/Report/templates/html/componentChartRadiusBarAfferent.html index 898679d..5fd6030 100644 --- a/src/Report/templates/html/componentChartRadiusBarAfferent.html +++ b/src/Report/templates/html/componentChartRadiusBarAfferent.html @@ -1,5 +1,6 @@