Skip to content

Commit

Permalink
lint fix
Browse files Browse the repository at this point in the history
  • Loading branch information
bmflynn committed Jan 2, 2025
1 parent 208d5f8 commit 98fe38b
Show file tree
Hide file tree
Showing 15 changed files with 80 additions and 83 deletions.
2 changes: 1 addition & 1 deletion cmd/collections/writers.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ func writeCollection(zult internal.CollectionResult, w io.Writer, long bool) err
t.SetCaption(col["abstract"])
}
t.Render()
w.Write([]byte{'\n'})
_, _ = w.Write([]byte{'\n'})
}
return zult.Err()
}
Expand Down
9 changes: 5 additions & 4 deletions cmd/granules/cmd.go
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ NASA Earthdata Authentication
if destdir != "" {
err = doDownload(context.TODO(), api, params, destdir, token, netrc, clobber, yes, concurrency)
} else {
err = do(api, params, output, fields, yes)
err = do(api, params, output, fields)
}
if err != nil {
log.Fatalf("failed! %s", err)
Expand Down Expand Up @@ -181,11 +181,12 @@ func init() {
"results and must load all results in memory before rendering. Make sure to provide enough "+
"filters to limit the result set to a reasonable size or use json or csv output.")

flags.MarkHidden("shortname")
flags.MarkDeprecated("shortname", "Provide the collection concept id instead")
cobra.CheckErr(flags.MarkHidden("shortname"))
cobra.CheckErr(flags.MarkDeprecated("yes", "Not used and will be ignored"))
cobra.CheckErr(flags.MarkDeprecated("shortname", "Provide the collection concept id instead"))
}

func do(api *internal.CMRSearchAPI, params *internal.SearchGranuleParams, writerName string, fields []string, yes bool) error {
func do(api *internal.CMRSearchAPI, params *internal.SearchGranuleParams, writerName string, fields []string) error {
var writer outputWriter
switch writerName {
case "short":
Expand Down
4 changes: 3 additions & 1 deletion cmd/granules/download.go
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ func doDownload(

if !yes && zult.Hits() > maxResultsWithoutPrompt {
fmt.Printf("There are more than %v, CTRL-C to cancel or ENTER to continue\n", maxResultsWithoutPrompt)
bufio.NewReader(os.Stdin).ReadBytes('\n')
if _, err := bufio.NewReader(os.Stdin).ReadBytes('\n'); err != nil {
return err
}
}

if internal.Exists(destdir) {
Expand Down
59 changes: 29 additions & 30 deletions cmd/granules/order.go
Original file line number Diff line number Diff line change
@@ -1,32 +1,31 @@
package granules

import "strings"

var sortByFields = []string{
"entry_title",
"data_size",
"granule_ur",
"producer_granule_id",
"project",
"provider",
"short_name",
"start_date",
"end_date",
"version",
"platform",
"instrument",
"sensor",
"day_night_flag",
"cloud_cover",
"revision_date",
}

func validSortField(val string) bool {
val = strings.ReplaceAll(val, "-", "")
for _, s := range sortByFields {
if val == s {
return true
}
}
return false
}
// TODO: impl sortBy
// var sortByFields = []string{
// "entry_title",
// "data_size",
// "granule_ur",
// "producer_granule_id",
// "project",
// "provider",
// "short_name",
// "start_date",
// "end_date",
// "version",
// "platform",
// "instrument",
// "sensor",
// "day_night_flag",
// "cloud_cover",
// "revision_date",
// }
//
// func validSortField(val string) bool {
// val = strings.ReplaceAll(val, "-", "")
// for _, s := range sortByFields {
// if val == s {
// return true
// }
// }
// return false
// }
14 changes: 8 additions & 6 deletions cmd/granules/writers.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,6 @@ import (

type outputWriter func(internal.GranuleResult, io.Writer, []string) error

func longWriter(zult internal.GranuleResult, w io.Writer, fields []string) error {
return nil
}

func shortWriter(zult internal.GranuleResult, w io.Writer, _ []string) error {
fields := []string{"name", "size", "native_id", "concept_id", "revision_id"}
t := table.NewWriter()
Expand Down Expand Up @@ -82,15 +78,21 @@ func jsonWriter(zult internal.GranuleResult, w io.Writer, fields []string) error
}

func csvWriter(zult internal.GranuleResult, w io.Writer, fields []string) error {
w.Write([]byte(strings.Join(fields, ",") + "\n"))
_, err := w.Write([]byte(strings.Join(fields, ",") + "\n"))
if err != nil {
return err
}

for granule := range zult.Ch {
vals := []string{}
m := granuleToMap(granule, fields)
for _, name := range fields {
vals = append(vals, fmt.Sprintf("%v", m[name]))
}
w.Write([]byte(strings.Join(vals, ",") + "\n"))
_, err := w.Write([]byte(strings.Join(vals, ",") + "\n"))
if err != nil {
return err
}
}
return zult.Err()
}
Expand Down
6 changes: 0 additions & 6 deletions cmd/root.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,6 @@ import (
"github.com/spf13/cobra"
)

func failOnError(err error) {
if err != nil {
panic(err)
}
}

var rootCmd = &cobra.Command{
Use: "cmrfetch",
Short: "Search for and download NASA Earthdata collections and granules",
Expand Down
11 changes: 5 additions & 6 deletions internal/cmrsearch_collections_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,9 @@ package internal
import (
"context"
"fmt"
"io/ioutil"
"log"
"net/http"
"net/http/httptest"
"os"
"testing"
"time"

Expand Down Expand Up @@ -57,7 +56,7 @@ func TestSearchCollectionParams(t *testing.T) {
}

func Test_newCollectionFromUMM(t *testing.T) {
dat, err := ioutil.ReadFile("testdata/aerdt_collection.umm_json")
dat, err := os.ReadFile("testdata/aerdt_collection.umm_json")
require.NoError(t, err)

col := newCollectionFromUMM(gjson.Parse(string(dat)).Get("items.0"))
Expand Down Expand Up @@ -85,7 +84,7 @@ func TestSearchCollections(t *testing.T) {
w.Header().Set("cmr-hits", hits)
}
w.WriteHeader(status)
w.Write([]byte(body))
_, _ = w.Write([]byte(body))
}))
url := fmt.Sprintf("http://%s", ts.Listener.Addr())
origURL := defaultCMRURL
Expand All @@ -99,7 +98,7 @@ func TestSearchCollections(t *testing.T) {
doGet := func(t *testing.T, params *SearchCollectionParams) ScrollResult[Collection] {
t.Helper()

api := NewCMRSearchAPI(log.Default())
api := NewCMRSearchAPI()
// make sure we're not waiting long
zult, err := api.SearchCollections(context.Background(), params)
require.NoError(t, err)
Expand All @@ -108,7 +107,7 @@ func TestSearchCollections(t *testing.T) {
}

t.Run("get", func(t *testing.T) {
dat, err := ioutil.ReadFile("testdata/aerdt_collection.umm_json")
dat, err := os.ReadFile("testdata/aerdt_collection.umm_json")
require.NoError(t, err)
require.True(t, gjson.Valid(string(dat)))

Expand Down
3 changes: 0 additions & 3 deletions internal/cmrsearch_granules.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@ import (
"github.com/tidwall/gjson"
)

const notProvided = "Not provided"

func joinFloats(vals []float64) string {
s := []string{}
for _, v := range vals {
Expand Down Expand Up @@ -192,7 +190,6 @@ func findDownloadURLs(zult *gjson.Result, directAccess bool) map[string]string {
}

func newGranulesFromUMM(zult gjson.Result) []Granule {

granules := []Granule{}

for _, gran := range findGranules(zult) {
Expand Down
11 changes: 5 additions & 6 deletions internal/cmrsearch_granules_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package internal
import (
"context"
"fmt"
"log"
"net/http"
"net/http/httptest"
"os"
Expand Down Expand Up @@ -144,7 +143,6 @@ func Test_newGranuleFromUMM(t *testing.T) {
"-131.310653687,66.963340759,-92.430793762,55.710681915,-37.703670502,63.907997131,-4.32655859,82.950004578,-131.310653687,66.963340759",
}, gran.BoundingBox)
})

}

func TestSearchGranules(t *testing.T) {
Expand All @@ -155,7 +153,8 @@ func TestSearchGranules(t *testing.T) {
w.Header().Set("cmr-hits", hits)
}
w.WriteHeader(status)
w.Write([]byte(body))
_, err := w.Write([]byte(body))
require.NoError(t, err)
}))
url := fmt.Sprintf("http://%s", ts.Listener.Addr())
origURL := defaultCMRURL
Expand All @@ -169,7 +168,7 @@ func TestSearchGranules(t *testing.T) {
doGet := func(t *testing.T, params *SearchGranuleParams) ScrollResult[Granule] {
t.Helper()

api := NewCMRSearchAPI(log.Default())
api := NewCMRSearchAPI()
// make sure we're not waiting long
zult, err := api.SearchGranules(context.Background(), params)
require.NoError(t, err)
Expand Down Expand Up @@ -199,7 +198,7 @@ func TestSearchGranules(t *testing.T) {
})
}

func testDecodeArchiveInfo(t *testing.T) {
func TestDecodeArchiveInfo(t *testing.T) {
ar := gjson.Parse(`
[
{
Expand Down Expand Up @@ -235,7 +234,7 @@ func testDecodeArchiveInfo(t *testing.T) {
require.Equal(t, "ffffffffffffffffffffffffffffffff", info.Checksum)

info = infos["CAL_LID_L1-Standard-V4-51.2016-08-31T23-21-32ZD.hdf.met"]
require.Equal(t, "8.0 KB", info.Size)
require.Equal(t, "8 KB", info.Size)
require.Equal(t, "MD5", info.ChecksumAlg)
require.Equal(t, "3e84cf5f8ffb0e97627ff9462cec8534", info.Checksum)
}
6 changes: 3 additions & 3 deletions internal/cmrsearch_keywords_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package internal
import (
"context"
"fmt"
"log"
"net/http"
"net/http/httptest"
"testing"
Expand All @@ -19,7 +18,8 @@ func TestSearchFacets(t *testing.T) {
w.Header().Set("cmr-hits", hits)
}
w.WriteHeader(status)
w.Write([]byte(body))
_, err := w.Write([]byte(body))
require.NoError(t, err)
}))
url := fmt.Sprintf("http://%s", ts.Listener.Addr())
origURL := defaultCMRURL
Expand All @@ -33,7 +33,7 @@ func TestSearchFacets(t *testing.T) {
doGet := func(t *testing.T, val string, types []string) ScrollResult[Facet] {
t.Helper()

api := NewCMRSearchAPI(log.Default())
api := NewCMRSearchAPI()
// make sure we're not waiting long
zult, err := api.SearchFacets(context.Background(), val, types)
require.NoError(t, err)
Expand Down
5 changes: 3 additions & 2 deletions internal/cmrsearch_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ func TestCMRSearchAPI(t *testing.T) {
w.Header().Set("cmr-hits", hits)
}
w.WriteHeader(status)
w.Write([]byte(body))
_, err := w.Write([]byte(body))
require.NoError(t, err)
}))
url := fmt.Sprintf("http://%s", ts.Listener.Addr())
return ts, url
Expand All @@ -29,7 +30,7 @@ func TestCMRSearchAPI(t *testing.T) {
doGet := func(t *testing.T, url string) ScrollResult[gjson.Result] {
t.Helper()

api := NewCMRSearchAPI(nil)
api := NewCMRSearchAPI()
// make sure we're not waiting long
ctx, cancel := context.WithTimeout(context.Background(), time.Second)
defer cancel()
Expand Down
7 changes: 4 additions & 3 deletions internal/fetch_http_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"context"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"os"
Expand Down Expand Up @@ -60,7 +59,8 @@ func mockNetrc(t *testing.T) func() {

netrc, err := os.CreateTemp("", "")
require.NoError(t, err)
ioutil.WriteFile(netrc.Name(), []byte("machine testhost.com login LOGIN password PASSWORD"), 0o644)
err = os.WriteFile(netrc.Name(), []byte("machine testhost.com login LOGIN password PASSWORD"), 0o644)
require.NoError(t, err)

orig := defaultNetrcFinder
defaultNetrcFinder = func() (string, error) {
Expand Down Expand Up @@ -94,7 +94,8 @@ func TestHTTPFetcher(t *testing.T) {
t.Run("ok", func(t *testing.T) {
body := []byte("xxx")
svr := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Write(body)
_, err := w.Write(body)
require.NoError(t, err)
}))
defer svr.Close()

Expand Down
4 changes: 2 additions & 2 deletions internal/fetch_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ package internal
import (
"bytes"
"crypto/md5"
"io/ioutil"
"os"
"path/filepath"
"testing"
Expand Down Expand Up @@ -85,7 +84,8 @@ func Test_findNetrc(t *testing.T) {
defer os.RemoveAll(dir)

path := filepath.Join(dir, ".netrc")
ioutil.WriteFile(path, []byte("xxx"), 0o644)
err = os.WriteFile(path, []byte("xxx"), 0o644)
require.NoError(t, err)
t.Setenv("HOME", dir)

gotpath, err := findNetrc()
Expand Down
11 changes: 7 additions & 4 deletions internal/holdingsapi.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@ import (

var defaultHoldingsURL = "https://cmr.earthdata.nasa.gov/search/provider_holdings.json"

type ProviderCollection struct {
}
type ProviderCollection struct{}

/*
{
Expand Down Expand Up @@ -105,7 +104,9 @@ func writeCachedProviderHoldings(providers []Provider) error {
return err
}
dir = filepath.Join(dir, "cmrfetch")
os.MkdirAll(dir, 0o755)
if err := os.MkdirAll(dir, 0o755); err != nil {
return fmt.Errorf("making dir: %w", err)
}
f, err := os.Create(filepath.Join(dir, "provider_holdings.json"))
if err != nil {
return err
Expand All @@ -132,6 +133,8 @@ func GetProviderHoldings() ([]Provider, error) {
return nil, err
}
// intentionally ignoring error
writeCachedProviderHoldings(providers)
if err := writeCachedProviderHoldings(providers); err != nil {
return nil, fmt.Errorf("writing to cache dir: %w", err)
}
return providers, err
}
Loading

0 comments on commit 98fe38b

Please sign in to comment.