Skip to content

Commit

Permalink
Upgrade golangci-lint, more linters (#194)
Browse files Browse the repository at this point in the history
  • Loading branch information
joeturki authored Jan 13, 2025
1 parent 3f77272 commit edabe23
Show file tree
Hide file tree
Showing 15 changed files with 279 additions and 167 deletions.
47 changes: 30 additions & 17 deletions .golangci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,32 @@ linters-settings:
- ^os.Exit$
- ^panic$
- ^print(ln)?$
varnamelen:
max-distance: 12
min-name-length: 2
ignore-type-assert-ok: true
ignore-map-index-ok: true
ignore-chan-recv-ok: true
ignore-decls:
- i int
- n int
- w io.Writer
- r io.Reader
- b []byte

linters:
enable:
- asciicheck # Simple linter to check that your code does not contain non-ASCII identifiers
- bidichk # Checks for dangerous unicode character sequences
- bodyclose # checks whether HTTP response body is closed successfully
- containedctx # containedctx is a linter that detects struct contained context.Context field
- contextcheck # check the function whether use a non-inherited context
- cyclop # checks function and package cyclomatic complexity
- decorder # check declaration order and count of types, constants, variables and functions
- dogsled # Checks assignments with too many blank identifiers (e.g. x, _, _, _, := f())
- dupl # Tool for code clone detection
- durationcheck # check for two durations multiplied together
- err113 # Golang linter to check the errors handling expressions
- errcheck # Errcheck is a program for checking for unchecked errors in go programs. These unchecked errors can be critical bugs in some cases
- errchkjson # Checks types passed to the json encoding functions. Reports unsupported types and optionally reports occations, where the check for the returned error can be omitted.
- errname # Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.
Expand All @@ -46,66 +61,64 @@ linters:
- forcetypeassert # finds forced type assertions
- gci # Gci control golang package import order and make it always deterministic.
- gochecknoglobals # Checks that no globals are present in Go code
- gochecknoinits # Checks that no init functions are present in Go code
- gocognit # Computes and checks the cognitive complexity of functions
- goconst # Finds repeated strings that could be replaced by a constant
- gocritic # The most opinionated Go source code linter
- gocyclo # Computes and checks the cyclomatic complexity of functions
- godot # Check if comments end in a period
- godox # Tool for detection of FIXME, TODO and other comment keywords
- err113 # Golang linter to check the errors handling expressions
- gofmt # Gofmt checks whether code was gofmt-ed. By default this tool runs with -s option to check for code simplification
- gofumpt # Gofumpt checks whether code was gofumpt-ed.
- goheader # Checks is file header matches to pattern
- goimports # Goimports does everything that gofmt does. Additionally it checks unused imports
- gomoddirectives # Manage the use of 'replace', 'retract', and 'excludes' directives in go.mod.
- gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
- goprintffuncname # Checks that printf-like functions are named with `f` at the end
- gosec # Inspects source code for security problems
- gosimple # Linter for Go source code that specializes in simplifying a code
- govet # Vet examines Go source code and reports suspicious constructs, such as Printf calls whose arguments do not align with the format string
- grouper # An analyzer to analyze expression groups.
- importas # Enforces consistent import aliases
- ineffassign # Detects when assignments to existing variables are not used
- lll # Reports long lines
- maintidx # maintidx measures the maintainability index of each function.
- makezero # Finds slice declarations with non-zero initial length
- misspell # Finds commonly misspelled English words in comments
- nakedret # Finds naked returns in functions greater than a specified function length
- nestif # Reports deeply nested if statements
- nilerr # Finds the code that returns nil even if it checks that the error is not nil.
- nilnil # Checks that there is no simultaneous return of `nil` error and an invalid value.
- nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity
- noctx # noctx finds sending http request without context.Context
- predeclared # find code that shadows one of Go's predeclared identifiers
- revive # golint replacement, finds style mistakes
- staticcheck # Staticcheck is a go vet on steroids, applying a ton of static analysis checks
- stylecheck # Stylecheck is a replacement for golint
- tagliatelle # Checks the struct tags.
- tenv # tenv is analyzer that detects using os.Setenv instead of t.Setenv since Go1.17
- tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
- thelper # thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers
- typecheck # Like the front-end of a Go compiler, parses and type-checks Go code
- unconvert # Remove unnecessary type conversions
- unparam # Reports unused function parameters
- unused # Checks Go code for unused constants, variables, functions and types
- varnamelen # checks that the length of a variable's name matches its scope
- wastedassign # wastedassign finds wasted assignment statements
- whitespace # Tool for detection of leading and trailing whitespace
disable:
- depguard # Go linter that checks if package imports are in a list of acceptable packages
- containedctx # containedctx is a linter that detects struct contained context.Context field
- cyclop # checks function and package cyclomatic complexity
- funlen # Tool for detection of long functions
- gocyclo # Computes and checks the cyclomatic complexity of functions
- godot # Check if comments end in a period
- gomnd # An analyzer to detect magic numbers.
- gochecknoinits # Checks that no init functions are present in Go code
- gomodguard # Allow and block list linter for direct Go module dependencies. This is different from depguard where there are different block types for example version constraints and module recommendations.
- interfacebloat # A linter that checks length of interface.
- ireturn # Accept Interfaces, Return Concrete Types
- lll # Reports long lines
- maintidx # maintidx measures the maintainability index of each function.
- makezero # Finds slice declarations with non-zero initial length
- nakedret # Finds naked returns in functions greater than a specified function length
- nestif # Reports deeply nested if statements
- nlreturn # nlreturn checks for a new line before return and branch statements to increase code clarity
- mnd # An analyzer to detect magic numbers
- nolintlint # Reports ill-formed or insufficient nolint directives
- paralleltest # paralleltest detects missing usage of t.Parallel() method in your Go test
- prealloc # Finds slice declarations that could potentially be preallocated
- promlinter # Check Prometheus metrics naming via promlint
- rowserrcheck # checks whether Err of rows is checked successfully
- sqlclosecheck # Checks that sql.Rows and sql.Stmt are closed.
- testpackage # linter that makes you use a separate _test package
- thelper # thelper detects golang test helpers without t.Helper() call and checks the consistency of test helpers
- varnamelen # checks that the length of a variable's name matches its scope
- tparallel # tparallel detects inappropriate usage of t.Parallel() method in your Go test codes
- wrapcheck # Checks that errors returned from external packages are wrapped
- wsl # Whitespace Linter - Forces you to use empty lines!

Expand Down
17 changes: 13 additions & 4 deletions base_lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ func (e syntaxError) Error() string {
if e.i < 0 {
e.i = 0
}

return fmt.Sprintf("sdp: syntax error at pos %d: %s", e.i, strconv.QuoteToASCII(e.s[e.i:e.i+1]))
}

Expand All @@ -38,6 +39,7 @@ func (l *baseLexer) unreadByte() error {
return errDocumentStart
}
l.pos--

return nil
}

Expand All @@ -47,6 +49,7 @@ func (l *baseLexer) readByte() (byte, error) {
}
ch := l.value[l.pos]
l.pos++

return ch, nil
}

Expand All @@ -64,7 +67,7 @@ func (l *baseLexer) nextLine() error {
}
}

func (l *baseLexer) readWhitespace() error {
func (l *baseLexer) readWhitespace() error { //notlint:cyclop
for {
ch, err := l.readByte()
if errors.Is(err, io.EOF) {
Expand All @@ -78,7 +81,7 @@ func (l *baseLexer) readWhitespace() error {
}
}

func (l *baseLexer) readUint64Field() (i uint64, err error) {
func (l *baseLexer) readUint64Field() (i uint64, err error) { //nolint:cyclop
for {
ch, err := l.readByte()
if errors.Is(err, io.EOF) && i > 0 {
Expand All @@ -91,13 +94,15 @@ func (l *baseLexer) readUint64Field() (i uint64, err error) {
if err := l.unreadByte(); err != nil {
return i, err
}

break
}

if isWhitespace(ch) {
if err := l.readWhitespace(); err != nil {
return i, err
}

break
}

Expand Down Expand Up @@ -130,7 +135,7 @@ func (l *baseLexer) readUint64Field() (i uint64, err error) {
return i, nil
}

// Returns next field on this line or empty string if no more fields on line
// Returns next field on this line or empty string if no more fields on line.
func (l *baseLexer) readField() (string, error) {
start := l.pos
var stop int
Expand All @@ -147,20 +152,23 @@ func (l *baseLexer) readField() (string, error) {
if err := l.unreadByte(); err != nil {
return "", err
}

break
}

if isWhitespace(ch) {
if err := l.readWhitespace(); err != nil {
return "", err
}

break
}
}

return l.value[start:stop], nil
}

// Returns symbols until line end
// Returns symbols until line end.
func (l *baseLexer) readLine() (string, error) {
start := l.pos
trim := 1
Expand Down Expand Up @@ -212,5 +220,6 @@ func anyOf(element string, data ...string) bool {
return true
}
}

return false
}
20 changes: 10 additions & 10 deletions base_lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (
"testing"
)

func TestLexer(t *testing.T) {
func TestLexer(t *testing.T) { //nolint:cyclop
t.Run("single field", func(t *testing.T) {
for k, value := range map[string]string{
"clean": "aaa",
Expand Down Expand Up @@ -38,62 +38,62 @@ func TestLexer(t *testing.T) {
})

t.Run("many fields", func(t *testing.T) {
l := &baseLexer{value: "aaa 123\nf1 f2\nlast"}
lex := &baseLexer{value: "aaa 123\nf1 f2\nlast"}

t.Run("first line", func(t *testing.T) {
field, err := l.readField()
field, err := lex.readField()
if err != nil {
t.Fatal(err)
}
if field != "aaa" {
t.Errorf("aaa not parsed, got: '%v'", field)
}

value, err := l.readUint64Field()
value, err := lex.readUint64Field()
if err != nil {
t.Fatal(err)
}
if value != 123 {
t.Errorf("aaa not parsed, got: '%v'", field)
}

if err := l.nextLine(); err != nil {
if err := lex.nextLine(); err != nil {
t.Fatal(err)
}
})

t.Run("second line", func(t *testing.T) {
field, err := l.readField()
field, err := lex.readField()
if err != nil {
t.Fatal(err)
}
if field != "f1" {
t.Errorf("value not parsed, got: '%v'", field)
}

field, err = l.readField()
field, err = lex.readField()
if err != nil {
t.Fatal(err)
}
if field != "f2" {
t.Errorf("value not parsed, got: '%v'", field)
}

field, err = l.readField()
field, err = lex.readField()
if err != nil {
t.Fatal(err)
}
if field != "" {
t.Errorf("value not parsed, got: '%v'", field)
}

if err := l.nextLine(); err != nil {
if err := lex.nextLine(); err != nil {
t.Fatal(err)
}
})

t.Run("last line", func(t *testing.T) {
field, err := l.readField()
field, err := lex.readField()
if err != nil {
t.Fatal(err)
}
Expand Down
10 changes: 6 additions & 4 deletions common_description.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,10 @@ func (c *Address) marshalInto(b []byte) []byte {
func (c Address) marshalSize() (size int) {
size = len(c.Address)
if c.TTL != nil {
size += 1 + lenUint(uint64(*c.TTL))
size += 1 + lenUint(uint64(*c.TTL)) //nolint:gosec // G115
}
if c.Range != nil {
size += 1 + lenUint(uint64(*c.Range))
size += 1 + lenUint(uint64(*c.Range)) //nolint:gosec // G115
}

return
Expand All @@ -111,6 +111,7 @@ func (b Bandwidth) marshalInto(d []byte) []byte {
d = append(d, "X-"...)
}
d = append(append(d, b.Type...), ':')

return strconv.AppendUint(d, b.Bandwidth, 10)
}

Expand All @@ -120,6 +121,7 @@ func (b Bandwidth) marshalSize() (size int) {
}

size += len(b.Type) + 1 + lenUint(b.Bandwidth)

return
}

Expand All @@ -145,14 +147,14 @@ type Attribute struct {
Value string
}

// NewPropertyAttribute constructs a new attribute
// NewPropertyAttribute constructs a new attribute.
func NewPropertyAttribute(key string) Attribute {
return Attribute{
Key: key,
}
}

// NewAttribute constructs a new attribute
// NewAttribute constructs a new attribute.
func NewAttribute(key, value string) Attribute {
return Attribute{
Key: key,
Expand Down
10 changes: 5 additions & 5 deletions direction.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,17 @@ package sdp

import "errors"

// Direction is a marker for transmission directon of an endpoint
// Direction is a marker for transmission directon of an endpoint.
type Direction int

const (
// DirectionSendRecv is for bidirectional communication
// DirectionSendRecv is for bidirectional communication.
DirectionSendRecv Direction = iota + 1
// DirectionSendOnly is for outgoing communication
// DirectionSendOnly is for outgoing communication.
DirectionSendOnly
// DirectionRecvOnly is for incoming communication
// DirectionRecvOnly is for incoming communication.
DirectionRecvOnly
// DirectionInactive is for no communication
// DirectionInactive is for no communication.
DirectionInactive
)

Expand Down
Loading

0 comments on commit edabe23

Please sign in to comment.