Skip to content

Commit

Permalink
Add golangci-lint to workflows (#34)
Browse files Browse the repository at this point in the history
* Add golangci-lin.yml file to workflows

* Make checkForNewVersion function not stop the code (#33)

* Add golangci-lin.yml file to workflows

* Add linting config file

* Add more configs for outputs.

Remove the config line in golangci-ling.yml

* Fix whitespace issues

* Add more linters and fix minor issues.

* Disable typecheck linter.

* Only run on new issues

* Remove on push to main
  • Loading branch information
kavir1698 authored Apr 8, 2024
1 parent 817002b commit fe1bf21
Show file tree
Hide file tree
Showing 36 changed files with 113 additions and 73 deletions.
16 changes: 16 additions & 0 deletions .github/workflows/golangci-lint.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
name: golangci-lint
on:
pull_request:
branches: [ "*" ]

jobs:
golangci-lint:
name: golangci-lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: golangci-lint
uses: golangci/golangci-lint-action@v3
with:
version: v1.50.1
only-new-issues: true
71 changes: 71 additions & 0 deletions .golangci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
run:
# Timeout for analysis, e.g. 30s, 5m, default is 1m
deadline: 3m

# Exit code when at least one issue was found, default is 1
issues-exit-code: 1

# Include test files or not, default is true
tests: true

linters:
# Enable specific linters
enable:
- whitespace
- bodyclose
- dupl
- goprintffuncname
- gosec
- gosimple
- govet
- misspell
- nakedret
- prealloc
- staticcheck
- unparam
- unused

# Disable specific linters
disable:
- typecheck
- gochecknoinits
- goconst
- gocritic
- gocyclo
- godot
- gofmt
- goimports
- ineffassign
- lll
- stylecheck
- unconvert
- megacheck
- gas
- dogsled
- errcheck

linters-settings:
gocyclo:
# Minimal code complexity to report
min-complexity: 10
golint:
# Minimal confidence to report a problem, low by default
min-confidence: 0.8
unused:
# Treat code as a program (not a library) and report unused exported identifiers; default is false.
# The argument specifies the Go source directory to analyze.
check-exported: false

output:
formats:
- format: colored-line-number
path: stdout
print-issued-lines: true
print-linter-name: true
uniq-by-line: true
sort-results: true
sort-order:
- file
- linter
- severity
show-stats: true
2 changes: 0 additions & 2 deletions cmd/datasetArchiver/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@ import (
var VERSION string

func main() {

var client = &http.Client{
Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: false}},
Timeout: 10 * time.Second}
Expand Down Expand Up @@ -125,5 +124,4 @@ func main() {
} else {
log.Fatalf("No archivable datasets remaining")
}

}
1 change: 0 additions & 1 deletion cmd/datasetCleaner/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,6 @@ func isFlagPassed(name string) bool {
var VERSION string

func main() {

var client = &http.Client{
Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: false}},
Timeout: 10 * time.Second}
Expand Down
1 change: 0 additions & 1 deletion cmd/datasetGetProposal/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ import (
var VERSION string

func main() {

var client = &http.Client{
Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: false}},
Timeout: 10 * time.Second}
Expand Down
2 changes: 0 additions & 2 deletions cmd/datasetIngestor/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,6 @@ func main() {
}
}
datasetIngestor.ResetUpdatedMetaData(originalMap, metaDataMap)

}
}

Expand Down Expand Up @@ -404,5 +403,4 @@ func main() {
for i := 0; i < len(datasetList); i++ {
fmt.Println(datasetList[i])
}

}
5 changes: 1 addition & 4 deletions cmd/datasetIngestor/main_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,11 @@ import (
"testing"
)

func init() {
os.Setenv("TEST_MODE", "true")
}

// TestMainOutput is a test function that verifies the output of the main function.
// It captures the stdout, runs the main function, and checks if the output contains the expected strings.
// This just checks if the main function prints the help message.
func TestMainOutput(t *testing.T) {
os.Setenv("TEST_MODE", "true")
oldTestMode := "false"
defer os.Setenv("TEST_MODE", oldTestMode)
// Capture stdout
Expand Down
8 changes: 4 additions & 4 deletions cmd/datasetPublishData/main.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/*
Purpose: copy all files from a publisheddata entry (list of datasets) to publication server
taking into account orginal sourceFolder names
taking into account original sourceFolder names
This script must be run on the retrieve servers (from root) and pushes data to the publication server
hosted in the DMZ. It requires that a previous retrieve job for the datasets, executed
Expand Down Expand Up @@ -79,9 +79,7 @@ func check(e error) {
}

func main() {

// check input parameters

publishFlag := flag.Bool("publish", false, "Defines if this command is meant to actually publish data (default nothing is done)")
publishedDataId := flag.String("publisheddata", "", "Defines to publish data froma given publishedData document ID")
// datasetId := flag.String("dataset", "", "Defines single datasetId to publish")
Expand Down Expand Up @@ -208,7 +206,6 @@ func findMinLength(arr []string) int {
// A Function that returns the longest common prefix path (runes)
// from the array of strings
func commonPrefix(arr []string) string {

n := len(arr)
if n == 1 {
return arr[0]
Expand Down Expand Up @@ -317,6 +314,9 @@ func createWebpage(urls []string, title string, doi string, datasetDetails []dat

myurl := APIServer + "/PublishedData/" + strings.Replace(publishedDataId, "/", "%2F", 1) + "?access_token=" + user["accessToken"]
req, err := http.NewRequest("PATCH", myurl, bytes.NewBuffer(cmm))
if err != nil {
log.Fatal(err)
}
req.Header.Set("Content-Type", "application/json")
// fmt.Printf("request to message broker:%v\n", req)
resp, err := client.Do(req)
Expand Down
2 changes: 0 additions & 2 deletions cmd/datasetPublishDataRetrieve/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,7 @@ var scanner = bufio.NewScanner(os.Stdin)
var VERSION string

func main() {

// check input parameters

retrieveFlag := flag.Bool("retrieve", false, "Defines if this command is meant to actually retrieve data (default: retrieve actions are only displayed)")
publishedDataId := flag.String("publisheddata", "", "Defines to publish data from a given publishedData document ID")
userpass := flag.String("user", "", "Defines optional username:password string")
Expand Down
3 changes: 1 addition & 2 deletions cmd/datasetRetriever/main.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
/*
Purpose: retrieve datasets from intermediate cache, taking into account orginal sourceFolder names
Purpose: retrieve datasets from intermediate cache, taking into account original sourceFolder names
This script must be run on the machine having write access to the destination folder
Expand Down Expand Up @@ -147,7 +147,6 @@ func main() {
}
}
}

}

type Dataset struct {
Expand Down
1 change: 0 additions & 1 deletion cmd/waitForJobFinished/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ type Job struct {
var VERSION string

func main() {

var client = &http.Client{
Transport: &http.Transport{TLSClientConfig: &tls.Config{InsecureSkipVerify: false}},
Timeout: 10 * time.Second}
Expand Down
8 changes: 5 additions & 3 deletions datasetIngestor/addAttachment.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ import (
)

func AddAttachment(client *http.Client, APIServer string, datasetId string, metaDataDataset map[string]interface{}, accessToken string, attachmentFile string, caption string) {

// turn image into base64 structure:
imgFile, err := os.Open(attachmentFile)
if err != nil {
Expand Down Expand Up @@ -40,12 +39,12 @@ func AddAttachment(client *http.Client, APIServer string, datasetId string, meta
metaDataMap["caption"] = caption
metaDataMap["datasetId"] = datasetId
if ownerGroup, ok := metaDataDataset["ownerGroup"]; ok {
metaDataMap["ownerGroup"], ok = ownerGroup.(string)
metaDataMap["ownerGroup"], _ = ownerGroup.(string)
}
if accessGroups, ok := metaDataDataset["accessGroups"]; ok {
metaDataMap["accessGroups"], ok = accessGroups.([]string)
if !ok {
metaDataMap["accessGroups"], ok = accessGroups.([]interface{})
metaDataMap["accessGroups"], _ = accessGroups.([]interface{})
}
}

Expand All @@ -56,6 +55,9 @@ func AddAttachment(client *http.Client, APIServer string, datasetId string, meta
myurl := APIServer + "/Datasets/" + strings.Replace(datasetId, "/", "%2F", 1) + "/attachments?access_token=" + accessToken

req, err := http.NewRequest("POST", myurl, bytes.NewBuffer(bm))
if err != nil {
log.Fatal(err)
}
req.Header.Set("Content-Type", "application/json")
resp, err := client.Do(req)
if err != nil {
Expand Down
9 changes: 4 additions & 5 deletions datasetIngestor/assembleFilelisting.go
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ type Datafile struct {
var skippedLinks = 0
var illegalFileNames = 0
var errorGroupIds = 0

const windows = "windows"
var scanner = bufio.NewScanner(os.Stdin)

// readLines reads a whole file into memory
Expand All @@ -47,7 +47,6 @@ func readLines(path string) ([]string, error) {
}

func AssembleFilelisting(sourceFolder string, filelistingPath string, skip *string) (fullFileArray []Datafile, startTime time.Time, endTime time.Time, owner string, numFiles int64, totalSize int64) {

// scan all lines
//fmt.Println("sourceFolder,listing:", sourceFolder, filelistingPath)
fullFileArray = make([]Datafile, 0)
Expand Down Expand Up @@ -82,7 +81,7 @@ func AssembleFilelisting(sourceFolder string, filelistingPath string, skip *stri
// for windows source path add colon in the leading drive character
// windowsSource := strings.Replace(sourceFolder, "/C/", "C:/", 1)
osSource := sourceFolder
if runtime.GOOS == "windows" {
if runtime.GOOS == windows {
re := regexp.MustCompile(`^\/([A-Z])\/`)
osSource = re.ReplaceAllString(sourceFolder, "$1:/")
}
Expand Down Expand Up @@ -130,7 +129,7 @@ func AssembleFilelisting(sourceFolder string, filelistingPath string, skip *stri
uidName, gidName := GetFileOwner(f)
// replace backslashes for windows path
modpath := path
if runtime.GOOS == "windows" {
if runtime.GOOS == windows {
modpath = strings.Replace(path, "\\", "/", -1)
}
fileStruct := Datafile{Path: modpath, User: uidName, Group: gidName, Perm: f.Mode().String(), Size: f.Size(), Time: f.ModTime().Format(time.RFC3339)}
Expand All @@ -145,6 +144,7 @@ func AssembleFilelisting(sourceFolder string, filelistingPath string, skip *stri
if err != nil {
log.Printf("Could not follow symlink for file:%v %v", pabs, err)
keep = false
log.Printf("keep variable set to %v", keep)
}
}
//fmt.Printf("Skip variable:%v\n", *skip)
Expand Down Expand Up @@ -187,7 +187,6 @@ Do you want to keep the link in dataset or skip it (D(efault)/k(eep)/s(kip) ?`,
log.Printf("You chose to remove the link %v -> %v.\n\n", modpath, pointee)
}
color.Unset()

}

// make sure that filenames do not contain characters like "\" or "*"
Expand Down
17 changes: 9 additions & 8 deletions datasetIngestor/checkMetadata.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@ const (
DUMMY_OWNER = "x12345"
)

const unknown = "unknown"
const raw = "raw"

func CheckMetadata(client *http.Client, APIServer string, metadatafile string, user map[string]string, accessGroups []string) (metaDataMap map[string]interface{}, sourceFolder string, beamlineAccount bool, err error) {
metaDataMap, err = readMetadataFromFile(metadatafile)
Expand Down Expand Up @@ -171,7 +173,7 @@ func getHost() string {
// Try to get the hostname of the current machine.
hostname, err := os.Hostname()
if err != nil {
return "unknown"
return unknown
}

addrs, err := net.LookupIP(hostname)
Expand All @@ -197,8 +199,7 @@ func getHost() string {
}

// augmentMissingMetadata augments missing metadata fields.
func augmentMissingMetadata(user map[string]string, metaDataMap map[string]interface{}, client *http.Client, APIServer string, accessGroups []string) error {

func augmentMissingMetadata(user map[string]string, metaDataMap map[string]interface{}, client *http.Client, APIServer string, accessGroups []string) error {
color.Set(color.FgGreen)
// optionally augment missing owner metadata
if _, ok := metaDataMap["owner"]; !ok {
Expand All @@ -216,7 +217,7 @@ func augmentMissingMetadata(user map[string]string, metaDataMap map[string]inter
// and sourceFolderHost
if _, ok := metaDataMap["sourceFolderHost"]; !ok {
hostname := getHost()
if hostname == "unknown" {
if hostname == unknown {
log.Printf("sourceFolderHost is unknown")
} else {
metaDataMap["sourceFolderHost"] = hostname
Expand All @@ -229,7 +230,7 @@ func augmentMissingMetadata(user map[string]string, metaDataMap map[string]inter
if !ok {
return fmt.Errorf("type is not a string")
}
if dstype == "raw" {
if dstype == raw {
if _, ok := metaDataMap["principalInvestigator"]; !ok {
val, ok := metaDataMap["ownerGroup"]
if ok {
Expand Down Expand Up @@ -260,7 +261,7 @@ func augmentMissingMetadata(user map[string]string, metaDataMap map[string]inter
func checkMetadataValidity(client *http.Client, APIServer string, metaDataMap map[string]interface{}, dstype string) error {
myurl := ""
switch dstype {
case "raw":
case raw:
myurl = APIServer + "/RawDatasets/isValid"
case "derived":
myurl = APIServer + "/DerivedDatasets/isValid"
Expand All @@ -278,7 +279,7 @@ func checkMetadataValidity(client *http.Client, APIServer string, metaDataMap ma
if _, exists := metaDataMap["creationTime"]; !exists {
metaDataMap["creationTime"] = DUMMY_TIME
}
if metaDataMap["type"] == "raw" {
if metaDataMap["type"] == raw {
if _, exists := metaDataMap["endTime"]; !exists {
metaDataMap["endTime"] = DUMMY_TIME
}
Expand Down Expand Up @@ -362,4 +363,4 @@ func getSourceFolder(metaDataMap map[string]interface{}) (string, error) {
}

return sourceFolder, nil
}
}
1 change: 0 additions & 1 deletion datasetIngestor/getFileOwner_unix.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ import (
)

func GetFileOwner(f os.FileInfo) (uidName string, gidName string) {

uid := strconv.Itoa(int(f.Sys().(*syscall.Stat_t).Uid))
u, err2 := user.LookupId(uid)
if err2 != nil {
Expand Down
2 changes: 0 additions & 2 deletions datasetIngestor/scp.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,6 @@ func keyString(k ssh.PublicKey) string {
}

func trustedHostKeyCallback(trustedKey string) ssh.HostKeyCallback {

if trustedKey == "" {
return func(_ string, _ net.Addr, k ssh.PublicKey) error {
log.Printf("WARNING: SSH-key verification is *NOT* in effect: to fix, add this trustedKey: %q", keyString(k))
Expand Down Expand Up @@ -151,7 +150,6 @@ func (c *Client) walkAndSend(w io.Writer, src string) error {
return err
}
} else {

// It is a directory need to walk and copy
dirStack := strings.Split(cleanedPath, fmt.Sprintf("%c", os.PathSeparator))
startStackLen := len(dirStack)
Expand Down
Loading

0 comments on commit fe1bf21

Please sign in to comment.