diff --git a/internal/linter/linter.go b/internal/linter/linter.go new file mode 100644 index 0000000..4183b11 --- /dev/null +++ b/internal/linter/linter.go @@ -0,0 +1,60 @@ +package linter + +import ( + "github.com/veggiemonk/awesome-docker/internal/parser" +) + +// Result holds all lint issues found. +type Result struct { + Issues []Issue + Errors int + Warnings int +} + +// Lint checks an entire parsed document for issues. +func Lint(doc parser.Document) Result { + var result Result + + // Collect all entries for duplicate checking + allEntries := collectEntries(doc.Sections) + for _, issue := range CheckDuplicates(allEntries) { + addIssue(&result, issue) + } + + // Check each section + lintSections(doc.Sections, &result) + + return result +} + +func lintSections(sections []parser.Section, result *Result) { + for _, s := range sections { + for _, e := range s.Entries { + for _, issue := range CheckEntry(e) { + addIssue(result, issue) + } + } + for _, issue := range CheckSorted(s.Entries) { + addIssue(result, issue) + } + lintSections(s.Children, result) + } +} + +func collectEntries(sections []parser.Section) []parser.Entry { + var all []parser.Entry + for _, s := range sections { + all = append(all, s.Entries...) + all = append(all, collectEntries(s.Children)...) + } + return all +} + +func addIssue(result *Result, issue Issue) { + result.Issues = append(result.Issues, issue) + if issue.Severity == SeverityError { + result.Errors++ + } else { + result.Warnings++ + } +} diff --git a/internal/linter/linter_test.go b/internal/linter/linter_test.go new file mode 100644 index 0000000..bd4fd5e --- /dev/null +++ b/internal/linter/linter_test.go @@ -0,0 +1,111 @@ +package linter + +import ( + "testing" + + "github.com/veggiemonk/awesome-docker/internal/parser" +) + +func TestRuleDescriptionCapital(t *testing.T) { + entry := parser.Entry{Name: "Test", URL: "https://example.com", Description: "lowercase start.", Line: 10} + issues := CheckEntry(entry) + found := false + for _, issue := range issues { + if issue.Rule == RuleDescriptionCapital { + found = true + } + } + if !found { + t.Error("expected RuleDescriptionCapital issue for lowercase description") + } +} + +func TestRuleDescriptionPeriod(t *testing.T) { + entry := parser.Entry{Name: "Test", URL: "https://example.com", Description: "No period at end", Line: 10} + issues := CheckEntry(entry) + found := false + for _, issue := range issues { + if issue.Rule == RuleDescriptionPeriod { + found = true + } + } + if !found { + t.Error("expected RuleDescriptionPeriod issue") + } +} + +func TestRuleSorted(t *testing.T) { + entries := []parser.Entry{ + {Name: "Zebra", URL: "https://z.com", Description: "Z.", Line: 1}, + {Name: "Alpha", URL: "https://a.com", Description: "A.", Line: 2}, + } + issues := CheckSorted(entries) + if len(issues) == 0 { + t.Error("expected sorting issue") + } +} + +func TestRuleSortedOK(t *testing.T) { + entries := []parser.Entry{ + {Name: "Alpha", URL: "https://a.com", Description: "A.", Line: 1}, + {Name: "Zebra", URL: "https://z.com", Description: "Z.", Line: 2}, + } + issues := CheckSorted(entries) + if len(issues) != 0 { + t.Errorf("expected no sorting issues, got %d", len(issues)) + } +} + +func TestRuleDuplicateURL(t *testing.T) { + entries := []parser.Entry{ + {Name: "A", URL: "https://example.com/a", Description: "A.", Line: 1}, + {Name: "B", URL: "https://example.com/a", Description: "B.", Line: 5}, + } + issues := CheckDuplicates(entries) + if len(issues) == 0 { + t.Error("expected duplicate URL issue") + } +} + +func TestValidEntry(t *testing.T) { + entry := parser.Entry{Name: "Good", URL: "https://example.com", Description: "A good project.", Line: 10} + issues := CheckEntry(entry) + if len(issues) != 0 { + t.Errorf("expected no issues, got %v", issues) + } +} + +func TestFixDescriptionCapital(t *testing.T) { + entry := parser.Entry{Name: "Test", URL: "https://example.com", Description: "lowercase.", Line: 10} + fixed := FixEntry(entry) + if fixed.Description != "Lowercase." { + t.Errorf("description = %q, want %q", fixed.Description, "Lowercase.") + } +} + +func TestFixDescriptionPeriod(t *testing.T) { + entry := parser.Entry{Name: "Test", URL: "https://example.com", Description: "No period", Line: 10} + fixed := FixEntry(entry) + if fixed.Description != "No period." { + t.Errorf("description = %q, want %q", fixed.Description, "No period.") + } +} + +func TestLintDocument(t *testing.T) { + doc := parser.Document{ + Sections: []parser.Section{ + { + Title: "Tools", + Level: 2, + Entries: []parser.Entry{ + {Name: "Zebra", URL: "https://z.com", Description: "Z tool.", Line: 1}, + {Name: "Alpha", URL: "https://a.com", Description: "a tool", Line: 2}, + }, + }, + }, + } + result := Lint(doc) + if result.Errors == 0 { + t.Error("expected errors (unsorted, lowercase, no period)") + } +} diff --git a/internal/linter/rules.go b/internal/linter/rules.go new file mode 100644 index 0000000..03bc1b2 --- /dev/null +++ b/internal/linter/rules.go @@ -0,0 +1,134 @@ +package linter + +import ( + "fmt" + "sort" + "strings" + "unicode" + + "github.com/veggiemonk/awesome-docker/internal/parser" +) + +// Rule identifies a linting rule. +type Rule string + +const ( + RuleDescriptionCapital Rule = "description-capital" + RuleDescriptionPeriod Rule = "description-period" + RuleSorted Rule = "sorted" + RuleDuplicateURL Rule = "duplicate-url" +) + +// Severity of a lint issue. +type Severity int + +const ( + SeverityError Severity = iota + SeverityWarning +) + +// Issue is a single lint problem found. +type Issue struct { + Rule Rule + Severity Severity + Line int + Message string +} + +func (i Issue) String() string { + sev := "ERROR" + if i.Severity == SeverityWarning { + sev = "WARN" + } + return fmt.Sprintf("[%s] line %d: %s (%s)", sev, i.Line, i.Message, i.Rule) +} + +// CheckEntry validates a single entry against formatting rules. +func CheckEntry(e parser.Entry) []Issue { + var issues []Issue + + if len(e.Description) > 0 && !unicode.IsUpper(rune(e.Description[0])) { + issues = append(issues, Issue{ + Rule: RuleDescriptionCapital, + Severity: SeverityError, + Line: e.Line, + Message: fmt.Sprintf("%q: description should start with a capital letter", e.Name), + }) + } + + if len(e.Description) > 0 && !strings.HasSuffix(e.Description, ".") { + issues = append(issues, Issue{ + Rule: RuleDescriptionPeriod, + Severity: SeverityError, + Line: e.Line, + Message: fmt.Sprintf("%q: description should end with a period", e.Name), + }) + } + + return issues +} + +// CheckSorted verifies entries are in alphabetical order (case-insensitive). +func CheckSorted(entries []parser.Entry) []Issue { + var issues []Issue + for i := 1; i < len(entries); i++ { + prev := strings.ToLower(entries[i-1].Name) + curr := strings.ToLower(entries[i].Name) + if prev > curr { + issues = append(issues, Issue{ + Rule: RuleSorted, + Severity: SeverityError, + Line: entries[i].Line, + Message: fmt.Sprintf("%q should come before %q (alphabetical order)", entries[i].Name, entries[i-1].Name), + }) + } + } + return issues +} + +// CheckDuplicates finds entries with the same URL across the entire document. +func CheckDuplicates(entries []parser.Entry) []Issue { + var issues []Issue + seen := make(map[string]int) // URL -> first line number + for _, e := range entries { + url := strings.TrimRight(e.URL, "/") + if firstLine, exists := seen[url]; exists { + issues = append(issues, Issue{ + Rule: RuleDuplicateURL, + Severity: SeverityError, + Line: e.Line, + Message: fmt.Sprintf("duplicate URL %q (first seen at line %d)", e.URL, firstLine), + }) + } else { + seen[url] = e.Line + } + } + return issues +} + +// FixEntry returns a copy of the entry with auto-fixable issues corrected. +func FixEntry(e parser.Entry) parser.Entry { + fixed := e + if len(fixed.Description) > 0 { + // Capitalize first letter + runes := []rune(fixed.Description) + runes[0] = unicode.ToUpper(runes[0]) + fixed.Description = string(runes) + + // Ensure period at end + if !strings.HasSuffix(fixed.Description, ".") { + fixed.Description += "." + } + } + return fixed +} + +// SortEntries returns a sorted copy of entries (case-insensitive by Name). +func SortEntries(entries []parser.Entry) []parser.Entry { + sorted := make([]parser.Entry, len(entries)) + copy(sorted, entries) + sort.Slice(sorted, func(i, j int) bool { + return strings.ToLower(sorted[i].Name) < strings.ToLower(sorted[j].Name) + }) + return sorted +}