Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Compare "|" and "->" separators #7

Open
wants to merge 1 commit into
base: funclight-apply
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
2 changes: 1 addition & 1 deletion src/archive/tar/fuzz_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ func FuzzReader(f *testing.F) {
}
f.Add(b.Bytes())

f.Fuzz(func { t, b |
f.Fuzz(func { t, b ->
r := NewReader(bytes.NewReader(b))
type file struct {
header *Header
Expand Down
4 changes: 2 additions & 2 deletions src/archive/tar/reader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,7 @@ func TestReader(t *testing.T) {
}}

for _, v := range vectors {
t.Run(path.Base(v.file), func { t |
t.Run(path.Base(v.file), func { t ->
f, err := os.Open(v.file)
if err != nil {
t.Fatalf("unexpected error: %v", err)
Expand Down Expand Up @@ -718,7 +718,7 @@ func TestPartialRead(t *testing.T) {
}}

for _, v := range vectors {
t.Run(path.Base(v.file), func { t |
t.Run(path.Base(v.file), func { t ->
f, err := os.Open(v.file)
if err != nil {
t.Fatalf("Open() error: %v", err)
Expand Down
8 changes: 4 additions & 4 deletions src/archive/tar/tar_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -794,9 +794,9 @@ func Benchmark(b *testing.B) {
}},
}}

b.Run("Writer", func { b |
b.Run("Writer", func { b ->
for _, v := range vectors {
b.Run(v.label, func { b |
b.Run(v.label, func { b ->
b.ReportAllocs()
for i := 0; i < b.N; i++ {
// Writing to io.Discard because we want to
Expand All @@ -818,7 +818,7 @@ func Benchmark(b *testing.B) {
}
})

b.Run("Reader", func { b |
b.Run("Reader", func { b ->
for _, v := range vectors {
var buf bytes.Buffer
var r bytes.Reader
Expand All @@ -830,7 +830,7 @@ func Benchmark(b *testing.B) {
tw.Write(file.body)
}
tw.Close()
b.Run(v.label, func { b |
b.Run(v.label, func { b ->
b.ReportAllocs()
// Read from the byte buffer.
for i := 0; i < b.N; i++ {
Expand Down
2 changes: 1 addition & 1 deletion src/archive/tar/writer.go
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ func (tw *Writer) writeRawHeader(blk *block, size int64, flag byte) error {
// It walks the directory tree starting at the root of the filesystem
// adding each file to the tar archive while maintaining the directory structure.
func (tw *Writer) AddFS(fsys fs.FS) error {
return fs.WalkDir(fsys, ".", func { name, d, err |
return fs.WalkDir(fsys, ".", func { name, d, err ->
if err != nil {
return err
}
Expand Down
16 changes: 8 additions & 8 deletions src/archive/tar/writer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@ func TestWriter(t *testing.T) {
return x == y
}
for _, v := range vectors {
t.Run(path.Base(v.file), func { t |
t.Run(path.Base(v.file), func { t ->
const maxSize = 10 << 10 // 10KiB
buf := new(bytes.Buffer)
tw := NewWriter(iotest.TruncateWriter(buf, maxSize))
Expand Down Expand Up @@ -837,7 +837,7 @@ func (w *failOnceWriter) Write(b []byte) (int, error) {
}

func TestWriterErrors(t *testing.T) {
t.Run("HeaderOnly", func { t |
t.Run("HeaderOnly", func { t ->
tw := NewWriter(new(bytes.Buffer))
hdr := &Header{Name: "dir/", Typeflag: TypeDir}
if err := tw.WriteHeader(hdr); err != nil {
Expand All @@ -848,22 +848,22 @@ func TestWriterErrors(t *testing.T) {
}
})

t.Run("NegativeSize", func { t |
t.Run("NegativeSize", func { t ->
tw := NewWriter(new(bytes.Buffer))
hdr := &Header{Name: "small.txt", Size: -1}
if err := tw.WriteHeader(hdr); err == nil {
t.Fatalf("WriteHeader() = nil, want non-nil error")
}
})

t.Run("BeforeHeader", func { t |
t.Run("BeforeHeader", func { t ->
tw := NewWriter(new(bytes.Buffer))
if _, err := tw.Write([]byte("Kilts")); err != ErrWriteTooLong {
t.Fatalf("Write() = %v, want %v", err, ErrWriteTooLong)
}
})

t.Run("AfterClose", func { t |
t.Run("AfterClose", func { t ->
tw := NewWriter(new(bytes.Buffer))
hdr := &Header{Name: "small.txt"}
if err := tw.WriteHeader(hdr); err != nil {
Expand All @@ -883,7 +883,7 @@ func TestWriterErrors(t *testing.T) {
}
})

t.Run("PrematureFlush", func { t |
t.Run("PrematureFlush", func { t ->
tw := NewWriter(new(bytes.Buffer))
hdr := &Header{Name: "small.txt", Size: 5}
if err := tw.WriteHeader(hdr); err != nil {
Expand All @@ -894,7 +894,7 @@ func TestWriterErrors(t *testing.T) {
}
})

t.Run("PrematureClose", func { t |
t.Run("PrematureClose", func { t ->
tw := NewWriter(new(bytes.Buffer))
hdr := &Header{Name: "small.txt", Size: 5}
if err := tw.WriteHeader(hdr); err != nil {
Expand All @@ -905,7 +905,7 @@ func TestWriterErrors(t *testing.T) {
}
})

t.Run("Persistence", func { t |
t.Run("Persistence", func { t ->
tw := NewWriter(new(failOnceWriter))
if err := tw.WriteHeader(&Header{}); err != io.ErrShortWrite {
t.Fatalf("WriteHeader() = %v, want %v", err, io.ErrShortWrite)
Expand Down
2 changes: 1 addition & 1 deletion src/archive/zip/example_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ func ExampleWriter_RegisterCompressor() {
w := zip.NewWriter(buf)

// Register a custom Deflate compressor.
w.RegisterCompressor(zip.Deflate, func { out | return flate.NewWriter(out, flate.BestCompression) })
w.RegisterCompressor(zip.Deflate, func { out -> return flate.NewWriter(out, flate.BestCompression) })

// Proceed to add files to w.
}
2 changes: 1 addition & 1 deletion src/archive/zip/fuzz_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ func FuzzReader(f *testing.F) {
f.Add(b)
}

f.Fuzz(func { t, b |
f.Fuzz(func { t, b ->
r, err := NewReader(bytes.NewReader(b), int64(len(b)))
if err != nil {
return
Expand Down
6 changes: 3 additions & 3 deletions src/archive/zip/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -862,7 +862,7 @@ func (r *Reader) initFileList() {
}
}

slices.SortFunc(r.fileList, func { a, b | return fileEntryCompare(a.name, b.name) })
slices.SortFunc(r.fileList, func { a, b -> return fileEntryCompare(a.name, b.name) })
})
}

Expand Down Expand Up @@ -941,15 +941,15 @@ func (r *Reader) openLookup(name string) *fileListEntry {

func (r *Reader) openReadDir(dir string) []fileListEntry {
files := r.fileList
i, _ := slices.BinarySearchFunc(files, dir, func { a, dir |
i, _ := slices.BinarySearchFunc(files, dir, func { a, dir ->
idir, _, _ := split(a.name)
if dir != idir {
return strings.Compare(idir, dir)
}
// find the first entry with dir
return +1
})
j, _ := slices.BinarySearchFunc(files, dir, func { a, dir |
j, _ := slices.BinarySearchFunc(files, dir, func { a, dir ->
jdir, _, _ := split(a.name)
if dir != jdir {
return strings.Compare(jdir, dir)
Expand Down
16 changes: 8 additions & 8 deletions src/archive/zip/reader_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -582,7 +582,7 @@ var tests = []ZipTest{

func TestReader(t *testing.T) {
for _, zt := range tests {
t.Run(zt.Name, func { t | readTestZip(t, zt) })
t.Run(zt.Name, func { t -> readTestZip(t, zt) })
}
}

Expand Down Expand Up @@ -820,13 +820,13 @@ func messWith(fileName string, corrupter func(b []byte)) (r io.ReaderAt, size in
}

func returnCorruptCRC32Zip() (r io.ReaderAt, size int64) {
return messWith("go-with-datadesc-sig.zip", func { b |
return messWith("go-with-datadesc-sig.zip", func { b ->
// Corrupt one of the CRC32s in the data descriptor:
b[0x2d]++ })
}

func returnCorruptNotStreamedZip() (r io.ReaderAt, size int64) {
return messWith("crc32-not-streamed.zip", func { b |
return messWith("crc32-not-streamed.zip", func { b ->
// Corrupt foo.txt's final crc32 byte, in both
// the file header and TOC. (0x7e -> 0x7f)
b[0x11]++
Expand Down Expand Up @@ -1210,7 +1210,7 @@ func TestFS(t *testing.T) {
},
} {
test := test
t.Run(test.file, func { t |
t.Run(test.file, func { t ->
t.Parallel()
z, err := OpenReader(test.file)
if err != nil {
Expand Down Expand Up @@ -1244,15 +1244,15 @@ func TestFSWalk(t *testing.T) {
},
} {
test := test
t.Run(test.file, func { t |
t.Run(test.file, func { t ->
t.Parallel()
z, err := OpenReader(test.file)
if err != nil {
t.Fatal(err)
}
var files []string
sawErr := false
err = fs.WalkDir(z, ".", func { path, d, err |
err = fs.WalkDir(z, ".", func { path, d, err ->
if err != nil {
if !test.wantErr {
t.Errorf("%s: %v", path, err)
Expand Down Expand Up @@ -1619,7 +1619,7 @@ func TestUnderSize(t *testing.T) {
}

for _, f := range z.File {
t.Run(f.Name, func { t |
t.Run(f.Name, func { t ->
rd, err := f.Open()
if err != nil {
t.Fatal(err)
Expand All @@ -1646,7 +1646,7 @@ func TestIssue54801(t *testing.T) {
// Make file a directory
f.Name += "/"

t.Run(f.Name, func { t |
t.Run(f.Name, func { t ->
t.Logf("CompressedSize64: %d, Flags: %#x", f.CompressedSize64, f.Flags)

rd, err := f.Open()
Expand Down
4 changes: 2 additions & 2 deletions src/archive/zip/register.go
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@ var (
)

func init() {
compressors.Store(Store, Compressor(func { w | return &nopCloser{w}, nil }))
compressors.Store(Deflate, Compressor(func { w | return newFlateWriter(w), nil }))
compressors.Store(Store, Compressor(func { w -> return &nopCloser{w}, nil }))
compressors.Store(Deflate, Compressor(func { w -> return newFlateWriter(w), nil }))

decompressors.Store(Store, Decompressor(io.NopCloser))
decompressors.Store(Deflate, Decompressor(newFlateReader))
Expand Down
2 changes: 1 addition & 1 deletion src/archive/zip/writer.go
Original file line number Diff line number Diff line change
Expand Up @@ -501,7 +501,7 @@ func (w *Writer) RegisterCompressor(method uint16, comp Compressor) {
// It walks the directory tree starting at the root of the filesystem
// adding each file to the zip using deflate while maintaining the directory structure.
func (w *Writer) AddFS(fsys fs.FS) error {
return fs.WalkDir(fsys, ".", func { name, d, err |
return fs.WalkDir(fsys, ".", func { name, d, err ->
if err != nil {
return err
}
Expand Down
2 changes: 1 addition & 1 deletion src/archive/zip/writer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -596,7 +596,7 @@ func BenchmarkCompressedZipGarbage(b *testing.B) {
runOnce(&bytes.Buffer{})
b.ResetTimer()

b.RunParallel(func { pb |
b.RunParallel(func { pb ->
var buf bytes.Buffer
for pb.Next() {
runOnce(&buf)
Expand Down
20 changes: 10 additions & 10 deletions src/archive/zip/zip_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ func (r *rleBuffer) ReadAt(p []byte, off int64) (n int, err error) {
if len(p) == 0 {
return
}
skipParts, _ := slices.BinarySearchFunc(r.buf, off, func { rb, off | return cmp.Compare(rb.off+rb.n, off) })
skipParts, _ := slices.BinarySearchFunc(r.buf, off, func { rb, off -> return cmp.Compare(rb.off+rb.n, off) })
parts := r.buf[skipParts:]
if len(parts) > 0 {
skipBytes := off - parts[0].off
Expand Down Expand Up @@ -305,7 +305,7 @@ func TestZip64DirectoryOffset(t *testing.T) {
const filename = "huge.txt"
gen := func(wantOff uint64) func(*Writer) {
return func(w *Writer) {
w.testHookCloseSizeOffset = func { size, off |
w.testHookCloseSizeOffset = func { size, off ->
if off != wantOff {
t.Errorf("central directory offset = %d (%x); want %d", off, off, wantOff)
}
Expand All @@ -327,13 +327,13 @@ func TestZip64DirectoryOffset(t *testing.T) {
}
}
}
t.Run("uint32max-2_NoZip64", func { t |
t.Run("uint32max-2_NoZip64", func { t ->
t.Parallel()
if generatesZip64(t, gen(0xfffffffe)) {
t.Error("unexpected zip64")
}
})
t.Run("uint32max-1_Zip64", func { t |
t.Run("uint32max-1_Zip64", func { t ->
t.Parallel()
if !generatesZip64(t, gen(0xffffffff)) {
t.Error("expected zip64")
Expand Down Expand Up @@ -364,14 +364,14 @@ func TestZip64ManyRecords(t *testing.T) {
}
}
// 16k-1 records shouldn't make a zip64:
t.Run("uint16max-1_NoZip64", func { t |
t.Run("uint16max-1_NoZip64", func { t ->
t.Parallel()
if generatesZip64(t, gen(0xfffe)) {
t.Error("unexpected zip64")
}
})
// 16k records should make a zip64:
t.Run("uint16max_Zip64", func { t |
t.Run("uint16max_Zip64", func { t ->
t.Parallel()
if !generatesZip64(t, gen(0xffff)) {
t.Error("expected zip64")
Expand Down Expand Up @@ -503,7 +503,7 @@ func TestZip64LargeDirectory(t *testing.T) {
// of central directory.
gen := func(wantLen int64) func(*Writer) {
return func(w *Writer) {
w.testHookCloseSizeOffset = func { size, off |
w.testHookCloseSizeOffset = func { size, off ->
if size != uint64(wantLen) {
t.Errorf("Close central directory size = %d; want %d", size, wantLen)
}
Expand Down Expand Up @@ -534,13 +534,13 @@ func TestZip64LargeDirectory(t *testing.T) {
}
}
}
t.Run("uint32max-1_NoZip64", func { t |
t.Run("uint32max-1_NoZip64", func { t ->
t.Parallel()
if generatesZip64(t, gen(uint32max-1)) {
t.Error("unexpected zip64")
}
})
t.Run("uint32max_HasZip64", func { t |
t.Run("uint32max_HasZip64", func { t ->
t.Parallel()
if !generatesZip64(t, gen(uint32max)) {
t.Error("expected zip64")
Expand Down Expand Up @@ -764,7 +764,7 @@ func BenchmarkZip64Test(b *testing.B) {

func BenchmarkZip64TestSizes(b *testing.B) {
for _, size := range []int64{1 << 12, 1 << 20, 1 << 26} {
b.Run(fmt.Sprint(size), func { b | b.RunParallel(func { pb | for pb.Next() {
b.Run(fmt.Sprint(size), func { b -> b.RunParallel(func { pb -> for pb.Next() {
testZip64(b, size)
} }) })
}
Expand Down
2 changes: 1 addition & 1 deletion src/bufio/bufio_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1462,7 +1462,7 @@ func TestWriterReadFromWithBufferedData(t *testing.T) {

func TestReadZero(t *testing.T) {
for _, size := range []int{100, 2} {
t.Run(fmt.Sprintf("bufsize=%d", size), func { t |
t.Run(fmt.Sprintf("bufsize=%d", size), func { t ->
r := io.MultiReader(strings.NewReader("abc"), &emptyThenNonEmptyReader{r: strings.NewReader("def"), n: 1})
br := NewReaderSize(r, size)
want := func(s string, wantErr error) {
Expand Down
2 changes: 1 addition & 1 deletion src/bytes/buffer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -721,7 +721,7 @@ func BenchmarkBufferFullSmallReads(b *testing.B) {
func BenchmarkBufferWriteBlock(b *testing.B) {
block := make([]byte, 1024)
for _, n := range []int{1 << 12, 1 << 16, 1 << 20} {
b.Run(fmt.Sprintf("N%d", n), func { b |
b.Run(fmt.Sprintf("N%d", n), func { b ->
b.ReportAllocs()
for i := 0; i < b.N; i++ {
var bb Buffer
Expand Down
2 changes: 1 addition & 1 deletion src/bytes/bytes.go
Original file line number Diff line number Diff line change
Expand Up @@ -780,7 +780,7 @@ func Title(s []byte) []byte {
// the closure once per rune.
prev := ' '
return Map(
func { r |
func { r ->
if isSeparator(prev) {
prev = r
return unicode.ToTitle(r)
Expand Down
Loading