|
| 1 | +// Copyright 2021 the u-root Authors. All rights reserved |
| 2 | +// Use of this source code is governed by a BSD-style |
| 3 | +// license that can be found in the LICENSE file. |
| 4 | + |
| 5 | +package uio |
| 6 | + |
| 7 | +import ( |
| 8 | + "bytes" |
| 9 | + "io" |
| 10 | + "math/rand" |
| 11 | + "strings" |
| 12 | + "testing" |
| 13 | + "time" |
| 14 | + |
| 15 | + "github.com/pierrec/lz4/v4" |
| 16 | +) |
| 17 | + |
| 18 | +const choices = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" |
| 19 | + |
| 20 | +func TestArchiveReaderRegular(t *testing.T) { |
| 21 | + dataStr := strings.Repeat("This is an important data!@#$%^^&&*&**(()())", 1000) |
| 22 | + |
| 23 | + ar, err := NewArchiveReader(bytes.NewReader([]byte(dataStr))) |
| 24 | + if err != nil { |
| 25 | + t.Fatalf("newArchiveReader(bytes.NewReader(%v)) returned error: %v", []byte(dataStr), err) |
| 26 | + } |
| 27 | + |
| 28 | + buf := new(strings.Builder) |
| 29 | + if _, err := io.Copy(buf, ar); err != nil { |
| 30 | + t.Errorf("io.Copy(%v, %v) returned error: %v, want nil.", buf, ar, err) |
| 31 | + } |
| 32 | + if buf.String() != dataStr { |
| 33 | + t.Errorf("got %s, want %s", buf.String(), dataStr) |
| 34 | + } |
| 35 | +} |
| 36 | + |
| 37 | +func TestArchiveReaderPreReadShort(t *testing.T) { |
| 38 | + dataStr := "short data" |
| 39 | + ar, err := NewArchiveReader(bytes.NewReader([]byte(dataStr))) |
| 40 | + if err != nil { |
| 41 | + t.Errorf("newArchiveReader(bytes.NewReader([]byte(%s))) returned err: %v, want nil", dataStr, err) |
| 42 | + } |
| 43 | + got, err := io.ReadAll(ar) |
| 44 | + if err != nil { |
| 45 | + t.Errorf("got error reading archive reader: %v, want nil", err) |
| 46 | + } |
| 47 | + if string(got) != dataStr { |
| 48 | + t.Errorf("got %s, want %s", string(got), dataStr) |
| 49 | + } |
| 50 | + // Pre-read nothing. |
| 51 | + dataStr = "" |
| 52 | + ar, err = NewArchiveReader(bytes.NewReader([]byte(dataStr))) |
| 53 | + if err != ErrPreReadError { |
| 54 | + t.Errorf("newArchiveReader(bytes.NewReader([]byte(%s))) returned err: %v, want %v", dataStr, err, ErrPreReadError) |
| 55 | + } |
| 56 | + got, err = io.ReadAll(ar) |
| 57 | + if err != nil { |
| 58 | + t.Errorf("got error reading archive reader: %v, want nil", err) |
| 59 | + } |
| 60 | + if string(got) != dataStr { |
| 61 | + t.Errorf("got %s, want %s", string(got), dataStr) |
| 62 | + } |
| 63 | +} |
| 64 | + |
| 65 | +// randomString generates random string of fixed length in a fast and simple way. |
| 66 | +func randomString(l int) string { |
| 67 | + rand.Seed(time.Now().UnixNano()) |
| 68 | + r := make([]byte, l) |
| 69 | + for i := 0; i < l; i++ { |
| 70 | + r[i] = byte(choices[rand.Intn(len(choices))]) |
| 71 | + } |
| 72 | + return string(r) |
| 73 | +} |
| 74 | + |
| 75 | +func checkArchiveReaderLZ4(t *testing.T, tt archiveReaderLZ4Case) { |
| 76 | + t.Helper() |
| 77 | + |
| 78 | + srcR := bytes.NewReader([]byte(tt.dataStr)) |
| 79 | + |
| 80 | + srcBuf := new(bytes.Buffer) |
| 81 | + lz4w := tt.setup(srcBuf) |
| 82 | + |
| 83 | + n, err := io.Copy(lz4w, srcR) |
| 84 | + if err != nil { |
| 85 | + t.Fatalf("io.Copy(%v, %v) returned error: %v, want nil", lz4w, srcR, err) |
| 86 | + } |
| 87 | + if n != int64(len([]byte(tt.dataStr))) { |
| 88 | + t.Fatalf("got %d bytes compressed, want %d", n, len([]byte(tt.dataStr))) |
| 89 | + } |
| 90 | + if err = lz4w.Close(); err != nil { |
| 91 | + t.Fatalf("Failed to close lz4 writer: %v", err) |
| 92 | + } |
| 93 | + |
| 94 | + // Test ArchiveReader reading it. |
| 95 | + ar, err := NewArchiveReader(bytes.NewReader(srcBuf.Bytes())) |
| 96 | + if err != nil { |
| 97 | + t.Fatalf("newArchiveReader(bytes.NewReader(%v)) returned error: %v", srcBuf.Bytes(), err) |
| 98 | + } |
| 99 | + buf := new(strings.Builder) |
| 100 | + if _, err := io.Copy(buf, ar); err != nil { |
| 101 | + t.Errorf("io.Copy(%v, %v) returned error: %v, want nil.", buf, ar, err) |
| 102 | + } |
| 103 | + if buf.String() != tt.dataStr { |
| 104 | + t.Errorf("got %s, want %s", buf.String(), tt.dataStr) |
| 105 | + } |
| 106 | +} |
| 107 | + |
| 108 | +type archiveReaderLZ4Case struct { |
| 109 | + name string |
| 110 | + setup func(w io.Writer) *lz4.Writer |
| 111 | + dataStr string |
| 112 | +} |
| 113 | + |
| 114 | +func TestArchiveReaderLZ4(t *testing.T) { |
| 115 | + for _, tt := range []archiveReaderLZ4Case{ |
| 116 | + { |
| 117 | + name: "non-legacy regular", |
| 118 | + setup: func(w io.Writer) *lz4.Writer { |
| 119 | + return lz4.NewWriter(w) |
| 120 | + }, |
| 121 | + dataStr: randomString(1024), |
| 122 | + }, |
| 123 | + { |
| 124 | + name: "non-legacy larger data", |
| 125 | + setup: func(w io.Writer) *lz4.Writer { |
| 126 | + return lz4.NewWriter(w) |
| 127 | + }, |
| 128 | + dataStr: randomString(5 * 1024), |
| 129 | + }, |
| 130 | + { |
| 131 | + name: "non-legacy short data", // Likley not realistic for most cases in the real world. |
| 132 | + setup: func(w io.Writer) *lz4.Writer { |
| 133 | + return lz4.NewWriter(w) |
| 134 | + }, |
| 135 | + dataStr: randomString(100), // Smaller than pre-read size, 1024 bytes. |
| 136 | + }, |
| 137 | + { |
| 138 | + name: "legacy regular", |
| 139 | + setup: func(w io.Writer) *lz4.Writer { |
| 140 | + lz4w := lz4.NewWriter(w) |
| 141 | + lz4w.Apply(lz4.LegacyOption(true)) |
| 142 | + return lz4w |
| 143 | + }, |
| 144 | + dataStr: randomString(1024), |
| 145 | + }, |
| 146 | + { |
| 147 | + name: "legacy larger data", |
| 148 | + setup: func(w io.Writer) *lz4.Writer { |
| 149 | + lz4w := lz4.NewWriter(w) |
| 150 | + lz4w.Apply(lz4.LegacyOption(true)) |
| 151 | + return lz4w |
| 152 | + }, |
| 153 | + dataStr: randomString(5 * 1024), |
| 154 | + }, |
| 155 | + { |
| 156 | + name: "legacy small data", |
| 157 | + setup: func(w io.Writer) *lz4.Writer { |
| 158 | + lz4w := lz4.NewWriter(w) |
| 159 | + lz4w.Apply(lz4.LegacyOption(true)) |
| 160 | + return lz4w |
| 161 | + }, |
| 162 | + dataStr: randomString(100), // Smaller than pre-read size, 1024 bytes.. |
| 163 | + }, |
| 164 | + { |
| 165 | + name: "legacy small data", |
| 166 | + setup: func(w io.Writer) *lz4.Writer { |
| 167 | + lz4w := lz4.NewWriter(w) |
| 168 | + lz4w.Apply(lz4.LegacyOption(true)) |
| 169 | + return lz4w |
| 170 | + }, |
| 171 | + dataStr: randomString(100), // Smaller than pre-read size, 1024 bytes.. |
| 172 | + }, |
| 173 | + { |
| 174 | + name: "regular larger data with fast compression", |
| 175 | + setup: func(w io.Writer) *lz4.Writer { |
| 176 | + lz4w := lz4.NewWriter(w) |
| 177 | + lz4w.Apply(lz4.CompressionLevelOption(lz4.Fast)) |
| 178 | + return lz4w |
| 179 | + }, |
| 180 | + dataStr: randomString(5 * 1024), |
| 181 | + }, |
| 182 | + { |
| 183 | + name: "legacy larger data with fast compression", |
| 184 | + setup: func(w io.Writer) *lz4.Writer { |
| 185 | + lz4w := lz4.NewWriter(w) |
| 186 | + lz4w.Apply(lz4.LegacyOption(true)) |
| 187 | + lz4w.Apply(lz4.CompressionLevelOption(lz4.Fast)) |
| 188 | + return lz4w |
| 189 | + }, |
| 190 | + dataStr: randomString(5 * 1024), |
| 191 | + }, |
| 192 | + } { |
| 193 | + t.Run(tt.name, func(t *testing.T) { |
| 194 | + checkArchiveReaderLZ4(t, tt) |
| 195 | + }) |
| 196 | + } |
| 197 | +} |
| 198 | + |
| 199 | +func TestArchiveReaderLZ4SlowCompressed(t *testing.T) { |
| 200 | + for _, tt := range []archiveReaderLZ4Case{ |
| 201 | + { |
| 202 | + name: "regular larger data with medium compression", |
| 203 | + setup: func(w io.Writer) *lz4.Writer { |
| 204 | + lz4w := lz4.NewWriter(w) |
| 205 | + lz4w.Apply(lz4.CompressionLevelOption(lz4.Level5)) |
| 206 | + return lz4w |
| 207 | + }, |
| 208 | + dataStr: randomString(5 * 1024), |
| 209 | + }, |
| 210 | + { |
| 211 | + name: "regular larger data with slow compression", |
| 212 | + setup: func(w io.Writer) *lz4.Writer { |
| 213 | + lz4w := lz4.NewWriter(w) |
| 214 | + lz4w.Apply(lz4.CompressionLevelOption(lz4.Level9)) |
| 215 | + return lz4w |
| 216 | + }, |
| 217 | + dataStr: randomString(5 * 1024), |
| 218 | + }, |
| 219 | + { |
| 220 | + name: "legacy larger data with medium compression", |
| 221 | + setup: func(w io.Writer) *lz4.Writer { |
| 222 | + lz4w := lz4.NewWriter(w) |
| 223 | + lz4w.Apply(lz4.LegacyOption(true)) |
| 224 | + lz4w.Apply(lz4.CompressionLevelOption(lz4.Level5)) |
| 225 | + return lz4w |
| 226 | + }, |
| 227 | + dataStr: randomString(5 * 1024), |
| 228 | + }, |
| 229 | + { |
| 230 | + name: "legacy larger data with slow compression", |
| 231 | + setup: func(w io.Writer) *lz4.Writer { |
| 232 | + lz4w := lz4.NewWriter(w) |
| 233 | + lz4w.Apply(lz4.LegacyOption(true)) |
| 234 | + lz4w.Apply(lz4.CompressionLevelOption(lz4.Level9)) |
| 235 | + return lz4w |
| 236 | + }, |
| 237 | + dataStr: randomString(5 * 1024), |
| 238 | + }, |
| 239 | + } { |
| 240 | + t.Run(tt.name, func(t *testing.T) { |
| 241 | + checkArchiveReaderLZ4(t, tt) |
| 242 | + }) |
| 243 | + } |
| 244 | +} |
0 commit comments