Skip to content

Commit

Permalink
Merge pull request #406 from wader/fix-lint-cleanup
Browse files Browse the repository at this point in the history
Some lint and cleanup fixes
  • Loading branch information
wader authored Nov 14, 2023
2 parents cc83c4f + 975a4cc commit a728c3b
Show file tree
Hide file tree
Showing 11 changed files with 59 additions and 40 deletions.
5 changes: 3 additions & 2 deletions cmd/ydls/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -152,8 +152,9 @@ func download(y ydls.YDLS) {
mediaWriter = os.Stdout
}

io.Copy(mediaWriter, dr.Media)
dr.Media.Close()
_, err = io.Copy(mediaWriter, dr.Media)
fatalIfErrorf(err, "copy failed")
_ = dr.Media.Close()
dr.Wait()
fmt.Print("\n")
}
Expand Down
7 changes: 3 additions & 4 deletions internal/ffmpeg/ffmpeg.go
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,9 @@ func (pi *ProbeInfo) UnmarshalJSON(text []byte) error {
type probeInfo ProbeInfo
var piDummy probeInfo
err := json.Unmarshal(text, &piDummy)
json.Unmarshal(text, &piDummy.Raw)
if err := json.Unmarshal(text, &piDummy.Raw); err != nil {
return err
}
*pi = ProbeInfo(piDummy)
return err
}
Expand Down Expand Up @@ -367,7 +369,6 @@ func (f *FFmpeg) Start(ctx context.Context) error {
type ffmpegOutput struct {
arg string // ffmpeg output argument (pipe:, url)
}
outputs := []*ffmpegOutput{}
outputsMap := map[Output]*ffmpegOutput{}

closeAfterStartFns := []func(){}
Expand Down Expand Up @@ -456,14 +457,12 @@ func (f *FFmpeg) Start(ctx context.Context) error {
pw.Close()
})

outputs = append(outputs, fo)
outputsMap[o] = fo
case URL:
fo := &ffmpegOutput{
arg: string(o),
}

outputs = append(outputs, fo)
outputsMap[o] = fo
default:
panic(fmt.Sprintf("unknown output type %v", o))
Expand Down
45 changes: 27 additions & 18 deletions internal/ffmpeg/ffmpeg_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import (
"context"
"io"
"io/ioutil"
"log"
"os"
"testing"
"time"
Expand Down Expand Up @@ -106,14 +105,14 @@ func TestReader(t *testing.T) {

ffmpegP := &FFmpeg{
Streams: []Stream{
Stream{
{
Maps: []Map{
Map{
{
Input: Reader{Reader: dummy1},
Specifier: "a:0",
Codec: AudioCodec("libvorbis"),
},
Map{
{
Input: Reader{Reader: dummy2},
Specifier: "v:0",
Codec: VideoCodec("vp8"),
Expand All @@ -130,7 +129,9 @@ func TestReader(t *testing.T) {
if err := ffmpegP.Start(context.Background()); err != nil {
t.Fatal(err)
}
ffmpegP.Wait()
if err := ffmpegP.Wait(); err != nil {
t.Fatal(err)
}

pi, piErr := Probe(context.Background(), Reader{Reader: bytes.NewBuffer(output.Bytes())}, nil, nil)
if piErr != nil {
Expand Down Expand Up @@ -158,33 +159,37 @@ func TestURLInput(t *testing.T) {
dummy1 := mustDummy(t, "matroska", "mp3", "h264")
tempFile1, tempFile1Err := ioutil.TempFile("", "TestURLInput")
if tempFile1Err != nil {
log.Fatal(tempFile1)
t.Fatal(tempFile1)
}
defer os.Remove(tempFile1.Name())
io.Copy(tempFile1, dummy1)
if _, err := io.Copy(tempFile1, dummy1); err != nil {
t.Fatal(err)
}
tempFile1.Close()

dummy2 := mustDummy(t, "matroska", "mp3", "h264")
tempFile2, tempFile2Err := ioutil.TempFile("", "TestURLInput")
if tempFile2Err != nil {
log.Fatal(tempFile2Err)
t.Fatal(tempFile2Err)
}
defer os.Remove(tempFile2.Name())
io.Copy(tempFile2, dummy2)
if _, err := io.Copy(tempFile2, dummy2); err != nil {
t.Fatal(err)
}
tempFile2.Close()

output := &closeBuffer{}

ffmpegP := &FFmpeg{
Streams: []Stream{
Stream{
{
Maps: []Map{
Map{
{
Input: URL(tempFile1.Name()),
Specifier: "a:0",
Codec: AudioCodec("libvorbis"),
},
Map{
{
Input: URL(tempFile2.Name()),
Specifier: "v:0",
Codec: VideoCodec("vp8"),
Expand All @@ -201,7 +206,9 @@ func TestURLInput(t *testing.T) {
if err := ffmpegP.Start(context.Background()); err != nil {
t.Fatal(err)
}
ffmpegP.Wait()
if err := ffmpegP.Wait(); err != nil {
t.Fatal(err)
}

pi, piErr := Probe(context.Background(), Reader{Reader: bytes.NewBuffer(output.Bytes())}, nil, nil)
if piErr != nil {
Expand Down Expand Up @@ -232,9 +239,9 @@ func TestWriterOutput(t *testing.T) {

ffmpegP := &FFmpeg{
Streams: []Stream{
Stream{
{
Maps: []Map{
Map{
{
Input: Reader{Reader: dummy1},
Specifier: "a:0",
Codec: AudioCodec("copy"),
Expand All @@ -243,9 +250,9 @@ func TestWriterOutput(t *testing.T) {
Format: Format{Name: "matroska"},
Output: Writer{Writer: outputAudio},
},
Stream{
{
Maps: []Map{
Map{
{
Input: Reader{Reader: dummy1},
Specifier: "v:0",
Codec: VideoCodec("copy"),
Expand All @@ -262,7 +269,9 @@ func TestWriterOutput(t *testing.T) {
if err := ffmpegP.Start(context.Background()); err != nil {
t.Fatal(err)
}
ffmpegP.Wait()
if err := ffmpegP.Wait(); err != nil {
t.Fatal(err)
}

piAudio, piErr := Probe(context.Background(), Reader{Reader: bytes.NewBuffer(outputAudio.Bytes())}, nil, nil)
if piErr != nil {
Expand Down
4 changes: 2 additions & 2 deletions internal/id3v2/id3v2.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ func binaryWriteMany(w io.Writer, fields []interface{}) (int, error) {
return tn, nil
}

// Write write ID3v2 tag
func Write(w io.Writer, frames []Frame) (int, error) {
// Encode write ID3v2 tag
func Encode(w io.Writer, frames []Frame) (int, error) {
var err error
framesBuf := &bytes.Buffer{}

Expand Down
4 changes: 3 additions & 1 deletion internal/id3v2/id3v2_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ func TestWrite(t *testing.T) {
}

actual := &bytes.Buffer{}
Write(actual, frames)
if _, err := Encode(actual, frames); err != nil {
t.Fatal(err)
}

expected := []byte(
"ID3\x03\x00\x00\x00\x00\x00[" +
Expand Down
8 changes: 6 additions & 2 deletions internal/rereader/rereader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@ func testShort(t *testing.T, r io.Reader, w io.Writer, restart func()) {
b1 := make([]byte, 1)
b2 := make([]byte, 2)

w.Write([]byte{0, 1, 2, 3})
if _, err := w.Write([]byte{0, 1, 2, 3}); err != nil {
t.Fatal(err)
}

if n, err := io.ReadFull(r, b2); err != nil || n != 2 || !reflect.DeepEqual(b2[:n], []byte{0, 1}) {
t.Errorf("read %#v %#v %#v", err, n, b2)
Expand All @@ -31,7 +33,9 @@ func testShort(t *testing.T, r io.Reader, w io.Writer, restart func()) {
func testLarger(t *testing.T, r io.Reader, w io.Writer, restart func()) {
b4 := make([]byte, 4)

w.Write([]byte{0, 1})
if _, err := w.Write([]byte{0, 1}); err != nil {
t.Fatal(err)
}

// read buffer larger than reread buffer
if n, err := io.ReadFull(r, b4); err == nil || n != 2 || !reflect.DeepEqual(b4[:n], []byte{0, 1}) {
Expand Down
4 changes: 3 additions & 1 deletion internal/stringprioset/stringprioset_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,9 @@ func TestString(t *testing.T) {

func TestUnmarshalJSON(t *testing.T) {
s := Set{}
json.Unmarshal([]byte(`["a", "b"]`), &s)
if err := json.Unmarshal([]byte(`["a", "b"]`), &s); err != nil {
t.Fatal(err)
}

if !s.Member("a") {
t.Error("expectd a to be a member")
Expand Down
4 changes: 2 additions & 2 deletions internal/ydls/handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ func (yh *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
if r.URL.Path == "/" && r.URL.RawQuery == "" {
if yh.IndexTmpl != nil {
w.Header().Set("Content-Security-Policy", "default-src 'self'; style-src 'unsafe-inline'; form-action 'self'")
yh.IndexTmpl.Execute(w, yh.YDLS.Config.Formats)
_ = yh.IndexTmpl.Execute(w, yh.YDLS.Config.Formats)
} else {
http.Error(w, "Not found", http.StatusNotFound)
}
Expand Down Expand Up @@ -148,7 +148,7 @@ func (yh *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
)
}

io.Copy(w, dr.Media)
_, _ = io.Copy(w, dr.Media)
dr.Media.Close()
dr.Wait()
}
1 change: 0 additions & 1 deletion internal/ydls/requestoptions.go
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,6 @@ func NewRequestOptionsFromOpts(opts []string, formats Formats) (RequestOptions,
return RequestOptions{}, fmt.Errorf("invalid items count")
}
r.Items = uint(itemsN)
strconv.ParseUint("", 10, 32)
} else if _, ok := codecNames[opt]; ok {
r.Codecs = append(r.Codecs, opt)
} else if tr, trErr := timerange.NewTimeRangeFromString(opt); trErr == nil {
Expand Down
8 changes: 4 additions & 4 deletions internal/ydls/ydls.go
Original file line number Diff line number Diff line change
Expand Up @@ -482,15 +482,15 @@ func (ydls *YDLS) downloadRSS(

// this needs to use a goroutine to have same api as DownloadFormat etc
go func() {
w.Write([]byte(xml.Header))
_, _ = w.Write([]byte(xml.Header))
rssRoot := RSSFromYDLSInfo(
options,
ydlResult.Info,
linkIconRawURL,
)
feedWriter := xml.NewEncoder(w)
feedWriter.Indent("", " ")
feedWriter.Encode(rssRoot)
_ = feedWriter.Encode(rssRoot)
w.Close()
close(waitCh)
}()
Expand Down Expand Up @@ -882,15 +882,15 @@ func (ydls *YDLS) downloadFormat(
// TODO: ffmpeg mp3enc id3 writer does not work with streamed output
// (id3v2 header length update requires seek)
if options.RequestOptions.Format.Prepend == "id3v2" {
id3v2.Write(w, id3v2FramesFromMetadata(metadata, ydlResult.Info))
_, _ = id3v2.Encode(w, id3v2FramesFromMetadata(metadata, ydlResult.Info))
}
log.Printf("Starting to copy")
n, err := io.Copy(w, ffmpegR)

log.Printf("Copy ffmpeg done (n=%v err=%v)", n, err)

cleanupOnDoneFn()
ffmpegP.Wait()
_ = ffmpegP.Wait()
ffmpegStderrPW.Close()

log.Printf("Done")
Expand Down
9 changes: 6 additions & 3 deletions internal/ydls/ydls_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import (
"context"
"encoding/xml"
"io"
"io/ioutil"
"net/url"
"strings"
"sync"
Expand Down Expand Up @@ -279,7 +278,9 @@ func TestContextCloseDownload(t *testing.T) {
cancelFn()
wg.Done()
}()
io.Copy(ioutil.Discard, dr.Media)
if _, err := io.Copy(io.Discard, dr.Media); err != nil {
t.Fatal(err)
}
cancelFn()
wg.Wait()
}
Expand Down Expand Up @@ -477,6 +478,8 @@ func TestDownloadFormatFallback(t *testing.T) {
if err != nil {
t.Error("expected no error while download")
}
io.Copy(ioutil.Discard, dr.Media)
if _, err := io.Copy(io.Discard, dr.Media); err != nil {
t.Fatal(err)
}
cancelFn()
}

0 comments on commit a728c3b

Please sign in to comment.