mirror of
https://github.com/ivuorinen/gibidify.git
synced 2026-03-02 21:56:19 +00:00
chore: modernize workflows, security scanning, and linting configuration (#50)
* build: update Go 1.25, CI workflows, and build tooling - Upgrade to Go 1.25 - Add benchmark targets to Makefile - Implement parallel gosec execution - Lock tool versions for reproducibility - Add shellcheck directives to scripts - Update CI workflows with improved caching * refactor: migrate from golangci-lint to revive - Replace golangci-lint with revive for linting - Configure comprehensive revive rules - Fix all EditorConfig violations - Add yamllint and yamlfmt support - Remove deprecated .golangci.yml * refactor: rename utils to shared and deduplicate code - Rename utils package to shared - Add shared constants package - Deduplicate constants across packages - Address CodeRabbit review feedback * fix: resolve SonarQube issues and add safety guards - Fix all 73 SonarQube OPEN issues - Add nil guards for resourceMonitor, backpressure, metricsCollector - Implement io.Closer for headerFileReader - Propagate errors from processing helpers - Add metrics and templates packages - Improve error handling across codebase * test: improve test infrastructure and coverage - Add benchmarks for cli, fileproc, metrics - Improve test coverage for cli, fileproc, config - Refactor tests with helper functions - Add shared test constants - Fix test function naming conventions - Reduce cognitive complexity in benchmark tests * docs: update documentation and configuration examples - Update CLAUDE.md with current project state - Refresh README with new features - Add usage and configuration examples - Add SonarQube project configuration - Consolidate config.example.yaml * fix: resolve shellcheck warnings in scripts - Use ./*.go instead of *.go to prevent dash-prefixed filenames from being interpreted as options (SC2035) - Remove unreachable return statement after exit (SC2317) - Remove obsolete gibidiutils/ directory reference * chore(deps): upgrade go dependencies * chore(lint): megalinter fixes * fix: improve test coverage and fix file descriptor leaks - Add defer r.Close() to fix pipe file descriptor leaks in benchmark tests - Refactor TestProcessorConfigureFileTypes with helper functions and assertions - Refactor TestProcessorLogFinalStats with output capture and keyword verification - Use shared constants instead of literal strings (TestFilePNG, FormatMarkdown, etc.) - Reduce cognitive complexity by extracting helper functions * fix: align test comments with function names Remove underscores from test comments to match actual function names: - benchmark/benchmark_test.go (2 fixes) - fileproc/filetypes_config_test.go (4 fixes) - fileproc/filetypes_registry_test.go (6 fixes) - fileproc/processor_test.go (6 fixes) - fileproc/resource_monitor_types_test.go (4 fixes) - fileproc/writer_test.go (3 fixes) * fix: various test improvements and bug fixes - Remove duplicate maxCacheSize check in filetypes_registry_test.go - Shorten long comment in processor_test.go to stay under 120 chars - Remove flaky time.Sleep in collector_test.go, use >= 0 assertion - Close pipe reader in benchmark_test.go to fix file descriptor leak - Use ContinueOnError in flags_test.go to match ResetFlags behavior - Add nil check for p.ui in processor_workers.go before UpdateProgress - Fix resource_monitor_validation_test.go by setting hardMemoryLimitBytes directly * chore(yaml): add missing document start markers Add --- document start to YAML files to satisfy yamllint: - .github/workflows/codeql.yml - .github/workflows/build-test-publish.yml - .github/workflows/security.yml - .github/actions/setup/action.yml * fix: guard nil resourceMonitor and fix test deadlock - Guard resourceMonitor before CreateFileProcessingContext call - Add ui.UpdateProgress on emergency stop and path error returns - Fix potential deadlock in TestProcessFile using wg.Go with defer close
This commit is contained in:
134
metrics/types.go
Normal file
134
metrics/types.go
Normal file
@@ -0,0 +1,134 @@
|
||||
// Package metrics provides comprehensive processing statistics and profiling capabilities.
|
||||
package metrics
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
// ProcessingMetrics provides comprehensive processing statistics.
|
||||
type ProcessingMetrics struct {
|
||||
// File processing metrics
|
||||
TotalFiles int64 `json:"total_files"`
|
||||
ProcessedFiles int64 `json:"processed_files"`
|
||||
SkippedFiles int64 `json:"skipped_files"`
|
||||
ErrorFiles int64 `json:"error_files"`
|
||||
LastUpdated time.Time `json:"last_updated"`
|
||||
|
||||
// Size metrics
|
||||
TotalSize int64 `json:"total_size_bytes"`
|
||||
ProcessedSize int64 `json:"processed_size_bytes"`
|
||||
AverageFileSize float64 `json:"average_file_size_bytes"`
|
||||
LargestFile int64 `json:"largest_file_bytes"`
|
||||
SmallestFile int64 `json:"smallest_file_bytes"`
|
||||
|
||||
// Performance metrics
|
||||
StartTime time.Time `json:"start_time"`
|
||||
EndTime time.Time `json:"end_time,omitempty"`
|
||||
ProcessingTime time.Duration `json:"processing_duration"`
|
||||
FilesPerSecond float64 `json:"files_per_second"`
|
||||
BytesPerSecond float64 `json:"bytes_per_second"`
|
||||
|
||||
// Memory and resource metrics
|
||||
PeakMemoryMB int64 `json:"peak_memory_mb"`
|
||||
CurrentMemoryMB int64 `json:"current_memory_mb"`
|
||||
GoroutineCount int `json:"goroutine_count"`
|
||||
|
||||
// Format specific metrics
|
||||
FormatCounts map[string]int64 `json:"format_counts"`
|
||||
ErrorCounts map[string]int64 `json:"error_counts"`
|
||||
|
||||
// Concurrency metrics
|
||||
MaxConcurrency int `json:"max_concurrency"`
|
||||
CurrentConcurrency int32 `json:"current_concurrency"`
|
||||
|
||||
// Phase timings
|
||||
PhaseTimings map[string]time.Duration `json:"phase_timings"`
|
||||
}
|
||||
|
||||
// Collector collects and manages processing metrics.
|
||||
type Collector struct {
|
||||
metrics ProcessingMetrics
|
||||
mu sync.RWMutex
|
||||
startTime time.Time
|
||||
lastUpdate time.Time
|
||||
|
||||
// Atomic counters for high-concurrency access
|
||||
totalFiles int64
|
||||
processedFiles int64
|
||||
skippedFiles int64
|
||||
errorFiles int64
|
||||
totalSize int64
|
||||
processedSize int64
|
||||
largestFile int64
|
||||
smallestFile int64 // Using max int64 as initial value to track minimum
|
||||
|
||||
// Concurrency tracking
|
||||
concurrency int32
|
||||
peakConcurrency int32
|
||||
|
||||
// Format and error tracking with mutex protection
|
||||
formatCounts map[string]int64
|
||||
errorCounts map[string]int64
|
||||
|
||||
// Phase timing tracking
|
||||
phaseTimings map[string]time.Duration
|
||||
}
|
||||
|
||||
// FileProcessingResult represents the result of processing a single file.
|
||||
type FileProcessingResult struct {
|
||||
FilePath string `json:"file_path"`
|
||||
FileSize int64 `json:"file_size"`
|
||||
Format string `json:"format"`
|
||||
ProcessingTime time.Duration `json:"processing_time"`
|
||||
Success bool `json:"success"`
|
||||
Error error `json:"error,omitempty"`
|
||||
Skipped bool `json:"skipped"`
|
||||
SkipReason string `json:"skip_reason,omitempty"`
|
||||
}
|
||||
|
||||
// ProfileReport represents a comprehensive profiling report.
|
||||
type ProfileReport struct {
|
||||
Summary ProcessingMetrics `json:"summary"`
|
||||
TopLargestFiles []FileInfo `json:"top_largest_files"`
|
||||
TopSlowestFiles []FileInfo `json:"top_slowest_files"`
|
||||
FormatBreakdown map[string]FormatMetrics `json:"format_breakdown"`
|
||||
ErrorBreakdown map[string]int64 `json:"error_breakdown"`
|
||||
HourlyStats []HourlyProcessingStats `json:"hourly_stats,omitempty"`
|
||||
PhaseBreakdown map[string]PhaseMetrics `json:"phase_breakdown"`
|
||||
PerformanceIndex float64 `json:"performance_index"`
|
||||
Recommendations []string `json:"recommendations"`
|
||||
}
|
||||
|
||||
// FileInfo represents information about a processed file.
|
||||
type FileInfo struct {
|
||||
Path string `json:"path"`
|
||||
Size int64 `json:"size"`
|
||||
ProcessingTime time.Duration `json:"processing_time"`
|
||||
Format string `json:"format"`
|
||||
}
|
||||
|
||||
// FormatMetrics represents metrics for a specific file format.
|
||||
type FormatMetrics struct {
|
||||
Count int64 `json:"count"`
|
||||
TotalSize int64 `json:"total_size"`
|
||||
AverageSize float64 `json:"average_size"`
|
||||
TotalProcessingTime time.Duration `json:"total_processing_time"`
|
||||
AverageProcessingTime time.Duration `json:"average_processing_time"`
|
||||
}
|
||||
|
||||
// HourlyProcessingStats represents processing statistics for an hour.
|
||||
type HourlyProcessingStats struct {
|
||||
Hour time.Time `json:"hour"`
|
||||
FilesProcessed int64 `json:"files_processed"`
|
||||
BytesProcessed int64 `json:"bytes_processed"`
|
||||
AverageRate float64 `json:"average_rate"`
|
||||
}
|
||||
|
||||
// PhaseMetrics represents timing metrics for processing phases.
|
||||
type PhaseMetrics struct {
|
||||
TotalTime time.Duration `json:"total_time"`
|
||||
Count int64 `json:"count"`
|
||||
AverageTime time.Duration `json:"average_time"`
|
||||
Percentage float64 `json:"percentage_of_total"`
|
||||
}
|
||||
Reference in New Issue
Block a user