Files
gibidify/fileproc/json_writer.go
Ismo Vuorinen 95b7ef6dd3 chore: modernize workflows, security scanning, and linting configuration (#50)
* build: update Go 1.25, CI workflows, and build tooling

- Upgrade to Go 1.25
- Add benchmark targets to Makefile
- Implement parallel gosec execution
- Lock tool versions for reproducibility
- Add shellcheck directives to scripts
- Update CI workflows with improved caching

* refactor: migrate from golangci-lint to revive

- Replace golangci-lint with revive for linting
- Configure comprehensive revive rules
- Fix all EditorConfig violations
- Add yamllint and yamlfmt support
- Remove deprecated .golangci.yml

* refactor: rename utils to shared and deduplicate code

- Rename utils package to shared
- Add shared constants package
- Deduplicate constants across packages
- Address CodeRabbit review feedback

* fix: resolve SonarQube issues and add safety guards

- Fix all 73 SonarQube OPEN issues
- Add nil guards for resourceMonitor, backpressure, metricsCollector
- Implement io.Closer for headerFileReader
- Propagate errors from processing helpers
- Add metrics and templates packages
- Improve error handling across codebase

* test: improve test infrastructure and coverage

- Add benchmarks for cli, fileproc, metrics
- Improve test coverage for cli, fileproc, config
- Refactor tests with helper functions
- Add shared test constants
- Fix test function naming conventions
- Reduce cognitive complexity in benchmark tests

* docs: update documentation and configuration examples

- Update CLAUDE.md with current project state
- Refresh README with new features
- Add usage and configuration examples
- Add SonarQube project configuration
- Consolidate config.example.yaml

* fix: resolve shellcheck warnings in scripts

- Use ./*.go instead of *.go to prevent dash-prefixed filenames
  from being interpreted as options (SC2035)
- Remove unreachable return statement after exit (SC2317)
- Remove obsolete gibidiutils/ directory reference

* chore(deps): upgrade go dependencies

* chore(lint): megalinter fixes

* fix: improve test coverage and fix file descriptor leaks

- Add defer r.Close() to fix pipe file descriptor leaks in benchmark tests
- Refactor TestProcessorConfigureFileTypes with helper functions and assertions
- Refactor TestProcessorLogFinalStats with output capture and keyword verification
- Use shared constants instead of literal strings (TestFilePNG, FormatMarkdown, etc.)
- Reduce cognitive complexity by extracting helper functions

* fix: align test comments with function names

Remove underscores from test comments to match actual function names:
- benchmark/benchmark_test.go (2 fixes)
- fileproc/filetypes_config_test.go (4 fixes)
- fileproc/filetypes_registry_test.go (6 fixes)
- fileproc/processor_test.go (6 fixes)
- fileproc/resource_monitor_types_test.go (4 fixes)
- fileproc/writer_test.go (3 fixes)

* fix: various test improvements and bug fixes

- Remove duplicate maxCacheSize check in filetypes_registry_test.go
- Shorten long comment in processor_test.go to stay under 120 chars
- Remove flaky time.Sleep in collector_test.go, use >= 0 assertion
- Close pipe reader in benchmark_test.go to fix file descriptor leak
- Use ContinueOnError in flags_test.go to match ResetFlags behavior
- Add nil check for p.ui in processor_workers.go before UpdateProgress
- Fix resource_monitor_validation_test.go by setting hardMemoryLimitBytes directly

* chore(yaml): add missing document start markers

Add --- document start to YAML files to satisfy yamllint:
- .github/workflows/codeql.yml
- .github/workflows/build-test-publish.yml
- .github/workflows/security.yml
- .github/actions/setup/action.yml

* fix: guard nil resourceMonitor and fix test deadlock

- Guard resourceMonitor before CreateFileProcessingContext call
- Add ui.UpdateProgress on emergency stop and path error returns
- Fix potential deadlock in TestProcessFile using wg.Go with defer close
2025-12-10 19:07:11 +02:00

170 lines
4.5 KiB
Go

// Package fileproc handles file processing, collection, and output formatting.
package fileproc
import (
"encoding/json"
"fmt"
"io"
"os"
"github.com/ivuorinen/gibidify/shared"
)
// JSONWriter handles JSON format output with streaming support.
type JSONWriter struct {
outFile *os.File
firstFile bool
}
// NewJSONWriter creates a new JSON writer.
func NewJSONWriter(outFile *os.File) *JSONWriter {
return &JSONWriter{
outFile: outFile,
firstFile: true,
}
}
// Start writes the JSON header.
func (w *JSONWriter) Start(prefix, suffix string) error {
// Start JSON structure
if _, err := w.outFile.WriteString(`{"prefix":"`); err != nil {
return shared.WrapError(err, shared.ErrorTypeIO, shared.CodeIOWrite, "failed to write JSON start")
}
// Write escaped prefix
escapedPrefix := shared.EscapeForJSON(prefix)
if err := shared.WriteWithErrorWrap(w.outFile, escapedPrefix, "failed to write JSON prefix", ""); err != nil {
return fmt.Errorf("writing JSON prefix: %w", err)
}
if _, err := w.outFile.WriteString(`","suffix":"`); err != nil {
return shared.WrapError(err, shared.ErrorTypeIO, shared.CodeIOWrite, "failed to write JSON middle")
}
// Write escaped suffix
escapedSuffix := shared.EscapeForJSON(suffix)
if err := shared.WriteWithErrorWrap(w.outFile, escapedSuffix, "failed to write JSON suffix", ""); err != nil {
return fmt.Errorf("writing JSON suffix: %w", err)
}
if _, err := w.outFile.WriteString(`","files":[`); err != nil {
return shared.WrapError(err, shared.ErrorTypeIO, shared.CodeIOWrite, "failed to write JSON files start")
}
return nil
}
// WriteFile writes a file entry in JSON format.
func (w *JSONWriter) WriteFile(req WriteRequest) error {
if !w.firstFile {
if _, err := w.outFile.WriteString(","); err != nil {
return shared.WrapError(err, shared.ErrorTypeIO, shared.CodeIOWrite, "failed to write JSON separator")
}
}
w.firstFile = false
if req.IsStream {
return w.writeStreaming(req)
}
return w.writeInline(req)
}
// Close writes the JSON footer.
func (w *JSONWriter) Close() error {
// Close JSON structure
if _, err := w.outFile.WriteString("]}"); err != nil {
return shared.WrapError(err, shared.ErrorTypeIO, shared.CodeIOWrite, "failed to write JSON end")
}
return nil
}
// writeStreaming writes a large file as JSON in streaming chunks.
func (w *JSONWriter) writeStreaming(req WriteRequest) error {
defer shared.SafeCloseReader(req.Reader, req.Path)
language := detectLanguage(req.Path)
// Write file start
escapedPath := shared.EscapeForJSON(req.Path)
if _, err := fmt.Fprintf(w.outFile, `{"path":"%s","language":"%s","content":"`, escapedPath, language); err != nil {
return shared.WrapError(
err,
shared.ErrorTypeIO,
shared.CodeIOWrite,
"failed to write JSON file start",
).WithFilePath(req.Path)
}
// Stream content with JSON escaping
if err := w.streamJSONContent(req.Reader, req.Path); err != nil {
return err
}
// Write file end
if _, err := w.outFile.WriteString(`"}`); err != nil {
return shared.WrapError(
err,
shared.ErrorTypeIO,
shared.CodeIOWrite,
"failed to write JSON file end",
).WithFilePath(req.Path)
}
return nil
}
// writeInline writes a small file directly as JSON.
func (w *JSONWriter) writeInline(req WriteRequest) error {
language := detectLanguage(req.Path)
fileData := FileData{
Path: req.Path,
Content: req.Content,
Language: language,
}
encoded, err := json.Marshal(fileData)
if err != nil {
return shared.WrapError(
err,
shared.ErrorTypeProcessing,
shared.CodeProcessingEncode,
"failed to marshal JSON",
).WithFilePath(req.Path)
}
if _, err := w.outFile.Write(encoded); err != nil {
return shared.WrapError(
err,
shared.ErrorTypeIO,
shared.CodeIOWrite,
"failed to write JSON file",
).WithFilePath(req.Path)
}
return nil
}
// streamJSONContent streams content with JSON escaping.
func (w *JSONWriter) streamJSONContent(reader io.Reader, path string) error {
if err := shared.StreamContent(
reader, w.outFile, shared.FileProcessingStreamChunkSize, path, func(chunk []byte) []byte {
escaped := shared.EscapeForJSON(string(chunk))
return []byte(escaped)
},
); err != nil {
return fmt.Errorf("streaming JSON content: %w", err)
}
return nil
}
// startJSONWriter handles JSON format output with streaming support.
func startJSONWriter(outFile *os.File, writeCh <-chan WriteRequest, done chan<- struct{}, prefix, suffix string) {
startFormatWriter(outFile, writeCh, done, prefix, suffix, func(f *os.File) FormatWriter {
return NewJSONWriter(f)
})
}