-
Notifications
You must be signed in to change notification settings - Fork 615
/
Copy pathAggregate.scala
1529 lines (1358 loc) · 60.1 KB
/
Aggregate.scala
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// SPDX-License-Identifier: Apache-2.0
package chisel3
import chisel3.experimental.VecLiterals.AddVecLiteralConstructor
import chisel3.experimental.dataview.{isView, reify, reifyIdentityView, InvalidViewException}
import scala.collection.immutable.{SeqMap, VectorMap}
import scala.collection.mutable.{HashSet, LinkedHashMap}
import scala.language.experimental.macros
import chisel3.experimental.{BaseModule, BundleLiteralException, HasTypeAlias, OpaqueType, VecLiteralException}
import chisel3.experimental.{requireIsChiselType, requireIsHardware, SourceInfo, UnlocatableSourceInfo}
import chisel3.internal._
import chisel3.internal.binding._
import chisel3.internal.Builder.pushCommand
import chisel3.internal.firrtl.ir._
import chisel3.internal.sourceinfo.{SourceInfoTransform, VecTransform}
import chisel3.reflect.DataMirror
import _root_.firrtl.{ir => fir}
import java.lang.Math.{floor, log10, pow}
import scala.collection.mutable
class AliasedAggregateFieldException(message: String) extends chisel3.ChiselException(message)
/** An abstract class for data types that solely consist of (are an aggregate
* of) other Data objects.
*/
sealed abstract class Aggregate extends Data {
private def checkingLitOption(checkForDontCares: Boolean): Option[BigInt] = {
// Shift the accumulated value by our width and add in our component, masked by our width.
def shiftAdd(elt: Data, accumulator: Option[BigInt]): Option[BigInt] = {
(accumulator, elt.litOption) match {
case (Some(accumulator), Some(eltLit)) =>
val width = elt.width.get
val masked = ((BigInt(1) << width) - 1) & eltLit // also handles the negative case with two's complement
Some((accumulator << width) + masked)
case (Some(accumulator), None) if checkForDontCares =>
Builder.error(s"Called litValue on aggregate $this contains DontCare")(UnlocatableSourceInfo)
None
case (None, _) => None
case (_, None) => None
}
}
topBindingOpt match {
// Don't accidentally invent a literal value for a view that is empty
case Some(_: AggregateViewBinding) if this.getElements.isEmpty =>
reifyIdentityView(this) match {
case Some((target: Aggregate, _)) => target.checkingLitOption(checkForDontCares)
// This can occur with empty Vecs or Bundles
case _ => None
}
case Some(_: BundleLitBinding | _: VecLitBinding | _: AggregateViewBinding) =>
// Records store elements in reverse order and higher indices are more significant in Vecs
this.getElements.foldRight(Option(BigInt(0)))(shiftAdd)
case _ => None
}
}
/** Return an Aggregate's literal value if it is a literal, None otherwise.
* If any element of the aggregate is not a literal (or DontCare), the result isn't a literal.
*
* @note [[DontCare]] is allowed and will be replaced with 0. Use [[litValue]] to disallow DontCare.
* @return an Aggregate's literal value if it is a literal, None otherwise.
*/
override def litOption: Option[BigInt] = {
checkingLitOption(checkForDontCares = false)
}
/** Return an Aggregate's literal value if it is a literal, otherwise an exception is thrown.
* If any element of the aggregate is not a literal with a defined width, the result isn't a literal.
*
* @return an Aggregate's literal value if it is a literal, exception otherwise.
*/
override def litValue: BigInt = {
checkingLitOption(checkForDontCares = true).getOrElse(
throw new ChiselException(s"Cannot ask for litValue of $this as it is not a literal.")
)
}
/** Returns a Seq of the immediate contents of this Aggregate, in order.
*/
def getElements: Seq[Data]
/** Similar to [[getElements]] but allows for more optimized use */
private[chisel3] def elementsIterator: Iterator[Data]
private[chisel3] def width: Width = elementsIterator.map(_.width).foldLeft(0.W)(_ + _)
// Emits the FIRRTL `this <= that`, or `this is invalid` if that == DontCare
private[chisel3] def firrtlConnect(that: Data)(implicit sourceInfo: SourceInfo): Unit = {
// If the source is a DontCare, generate a DefInvalid for the sink, otherwise, issue a Connect.
if (that == DontCare) {
pushCommand(DefInvalid(sourceInfo, lref))
} else {
pushCommand(Connect(sourceInfo, lref, Node(that)))
}
}
// Due to prior lack of zero-width wire support, .asUInt for an empty Aggregate has returned 0.U (equivalent to 0.U(1.W))
// In the case where an empty Aggregate is a child of an outer Aggregate, however, it would flatten out the empty inner Aggregate
// This means we need the `first` argument so that we can preserve this behavior of Aggregates while still allowing subclasses
// to override .asUInt behavior
override private[chisel3] def _asUIntImpl(first: Boolean)(implicit sourceInfo: SourceInfo): UInt = {
checkingLitOption(checkForDontCares = false) match {
case Some(value) =>
// Using UInt.Lit instead of .U so we can use Width argument which may be Unknown
UInt.Lit(value, this.width)
case None =>
val elts = this.getElements.map(_._asUIntImpl(false))
if (elts.isEmpty && !first) 0.U(0.W) else SeqUtils.do_asUInt(elts)
}
}
private[chisel3] override def connectFromBits(
that: Bits
)(
implicit sourceInfo: SourceInfo
): Unit = {
var i = 0
val bits = if (that.isLit) that else WireDefault(UInt(this.width), that) // handles width padding
for (x <- flatten) {
val fieldWidth = x.getWidth
if (fieldWidth > 0) {
x.connectFromBits(bits(i + fieldWidth - 1, i))
i += fieldWidth
} else {
// There's a zero-width field in this bundle.
// Zero-width fields can't really be assigned to, but the frontend complains if there are uninitialized fields,
// so we assign it to DontCare. We can't use connectFromBits() on DontCare, so use := instead.
x := DontCare
}
}
}
}
trait VecFactory extends SourceInfoDoc {
/** Creates a new [[Vec]] with `n` entries of the specified data type.
*
* @note elements are NOT assigned by default and have no value
*/
def apply[T <: Data](n: Int, gen: T)(implicit sourceInfo: SourceInfo): Vec[T] = {
requireIsChiselType(gen, "vec type")
new Vec(gen.cloneTypeFull, n)
}
/** Truncate an index to implement modulo-power-of-2 addressing. */
private[chisel3] def truncateIndex(
idx: UInt,
n: BigInt
)(
implicit sourceInfo: SourceInfo
): UInt = {
val w = (n - 1).bitLength
if (n <= 1) 0.U
else if (idx.width.known && idx.width.get <= w) idx
else if (idx.width.known) idx(w - 1, 0)
else (idx | 0.U(w.W))(w - 1, 0)
}
}
/** A vector (array) of [[Data]] elements. Provides hardware versions of various
* collection transformation functions found in software array implementations.
*
* Careful consideration should be given over the use of [[Vec]] vs
* [[scala.collection.immutable.Seq Seq]] or some other Scala collection. In general [[Vec]] only
* needs to be used when there is a need to express the hardware collection in a [[Reg]] or IO
* [[Bundle]] or when access to elements of the array is indexed via a hardware signal.
*
* Example of indexing into a [[Vec]] using a hardware address and where the [[Vec]] is defined in
* an IO [[Bundle]]
*
* {{{
* val io = IO(new Bundle {
* val in = Input(Vec(20, UInt(16.W)))
* val addr = Input(UInt(5.W))
* val out = Output(UInt(16.W))
* })
* io.out := io.in(io.addr)
* }}}
*
* @tparam T type of elements
*
* @note
* - when multiple conflicting assignments are performed on a Vec element, the last one takes effect (unlike Mem, where the result is undefined)
* - Vecs, unlike classes in Scala's collection library, are propagated intact to FIRRTL as a vector type, which may make debugging easier
*/
sealed class Vec[T <: Data] private[chisel3] (gen: => T, val length: Int) extends Aggregate with VecLike[T] {
override def toString: String = {
topBindingOpt match {
case Some(VecLitBinding(vecLitBinding)) =>
val contents = vecLitBinding.zipWithIndex.map {
case ((data, lit), index) =>
s"$index=$lit"
}.mkString(", ")
s"${sample_element.cloneType}[$length]($contents)"
case _ => stringAccessor(s"${sample_element.cloneType}[$length]")
}
}
/** Give this Vec a default, stable desired name using the supplied `Data`
* generator's `typeName`
*/
override def typeName = s"Vec${length}_${gen.typeName}"
override def containsAFlipped = sample_element.containsAFlipped
private[chisel3] override def bind(target: Binding, parentDirection: SpecifiedDirection): Unit = {
this.maybeAddToParentIds(target)
binding = target
val resolvedDirection = SpecifiedDirection.fromParent(parentDirection, specifiedDirection)
sample_element.bind(SampleElementBinding(this), resolvedDirection)
for (child <- elementsIterator) { // assume that all children are the same
child.bind(ChildBinding(this), resolvedDirection)
}
// Since all children are the same, we can just use the sample_element rather than all children
direction =
ActualDirection.fromChildren(Set(sample_element.direction), resolvedDirection).getOrElse(ActualDirection.Empty)
}
// Note: the constructor takes a gen() function instead of a Seq to enforce
// that all elements must be the same and because it makes FIRRTL generation
// simpler.
private lazy val self: Seq[T] = {
val _self = Vector.fill(length)(gen)
for ((elt, i) <- _self.zipWithIndex)
elt.setRef(this, i)
_self
}
/**
* sample_element 'tracks' all changes to the elements.
* For consistency, sample_element is always used for creating dynamically
* indexed ports and outputing the FIRRTL type.
*
* Needed specifically for the case when the Vec is length 0.
*/
private[chisel3] val sample_element: T = gen
// allElements current includes sample_element
// This is somewhat weird although I think the best course of action here is
// to deprecate allElements in favor of dispatched functions to Data or
// a pattern matched recursive descent
private[chisel3] final override def allElements: Seq[Element] =
(sample_element +: self).flatMap(_.allElements)
/** The "bulk connect operator", assigning elements in this Vec from elements in a Seq.
*
* For chisel3._, uses the `chisel3.internal.BiConnect` algorithm; sub-elements of `that` may end up driving sub-elements of `this`
* - Complicated semantics, will likely be deprecated in the future
*
* For Chisel._, emits the FIRRTL.<- operator
* - Equivalent to `this :<>= that` but bundle field names and vector sizes do not have to match
*
* @note the length of this Vec and that Seq must match
* @param that the Seq to connect from
* @group connection
*/
def <>(that: Seq[T])(implicit sourceInfo: SourceInfo): Unit = {
if (this.length != that.length)
Builder.error(
s"Vec (size ${this.length}) and Seq (size ${that.length}) being bulk connected have different lengths!"
)
for ((a, b) <- this.zip(that)) {
a <> b
}
}
/** The "bulk connect operator", assigning elements in this Vec from elements in a Vec.
*
* For chisel3._, uses the `chisel3.internal.BiConnect` algorithm; sub-elements of `that` may end up driving sub-elements of `this`
* - See docs/src/explanations/connection-operators.md for details
*
* For Chisel._, emits the FIRRTL.<- operator
* - Equivalent to `this :<>= that` without the restrictions that bundle field names and vector sizes must match
*
* @note This is necessary in [[Aggregate]], rather than relying on [[Data.<>]], due to supporting the Seq
* @note the length of this Vec and that Vec must match
* @param that the Vec to connect from
* @group connection
*/
def <>(that: Vec[T])(implicit sourceInfo: SourceInfo): Unit =
this.bulkConnect(that.asInstanceOf[Data])
/** "The strong connect operator", assigning elements in this Vec from elements in a Seq.
*
* For chisel3._, this operator is mono-directioned; all sub-elements of `this` will be driven by sub-elements of `that`.
* - Equivalent to `this :#= that`
*
* For Chisel._, this operator connections bi-directionally via emitting the FIRRTL.<=
* - Equivalent to `this :<>= that`
*
* @note the length of this Vec must match the length of the input Seq
* @group connection
*/
def :=(that: Seq[T])(implicit sourceInfo: SourceInfo): Unit = {
require(
this.length == that.length,
s"Cannot assign to a Vec of length ${this.length} from a Seq of different length ${that.length}"
)
for ((a, b) <- this.zip(that))
a := b
}
/** "The strong connect operator", assigning elements in this Vec from elements in a Vec.
*
* For chisel3._, this operator is mono-directioned; all sub-elements of `this` will be driven by sub-elements of `that`.
* - Equivalent to `this :#= that`
*
* For Chisel._, this operator connections bi-directionally via emitting the FIRRTL.<=
* - Equivalent to `this :<>= that`, with the additional restriction that the relative bundle field flips must match
*
* @note This is necessary in [[Aggregate]], rather than relying on [[Data.:=]], due to supporting the Seq
* @note the length of this Vec must match the length of the input Vec
* @group connection
*/
def :=(that: Vec[T])(implicit sourceInfo: SourceInfo): Unit = this.connect(that)
override def do_apply(p: UInt)(implicit sourceInfo: SourceInfo): T = {
requireIsHardware(this, "vec")
requireIsHardware(p, "vec index")
// Don't bother with complex dynamic indexing logic when the index is a literal and therefore static
// We also don't want to warn on literals that are "too small"
p.litOption match {
case Some(idx) if idx < length => return this.apply(idx.intValue)
case _ => // Fall through to control flow below
}
if (length == 0) {
Builder.warning(Warning(WarningID.ExtractFromVecSizeZero, s"Cannot extract from Vec of size 0."))
} else {
p.widthOption.foreach { pWidth =>
val correctWidth = BigInt(length - 1).bitLength
def mkMsg(msg: String): String =
s"Dynamic index with width $pWidth is too $msg for Vec of size $length (expected index width $correctWidth)."
if (pWidth > correctWidth) {
Builder.warning(Warning(WarningID.DynamicIndexTooWide, mkMsg("wide")))
} else if (pWidth < correctWidth) {
Builder.warning(Warning(WarningID.DynamicIndexTooNarrow, mkMsg("narrow")))
}
}
}
// Special handling for views
if (isView(this)) {
reifyIdentityView(this) match {
// Views complicate things a bit, but views that correspond exactly to an identical Vec can just forward the
// dynamic indexing to the target Vec
// In theory, we could still do this forwarding if the sample element were different by deriving a DataView
case Some((target: Vec[T @unchecked], _))
if this.length == target.length &&
this.sample_element.typeEquivalent(target.sample_element) =>
return target.apply(p)
case _ => throw InvalidViewException("Dynamic indexing of Views is not yet supported")
}
}
val port = gen
// Reconstruct the resolvedDirection (in Aggregate.bind), since it's not stored.
// It may not be exactly equal to that value, but the results are the same.
val reconstructedResolvedDirection = direction match {
case ActualDirection.Input => SpecifiedDirection.Input
case ActualDirection.Output => SpecifiedDirection.Output
case ActualDirection.Bidirectional(ActualDirection.Default) | ActualDirection.Unspecified =>
SpecifiedDirection.Unspecified
case ActualDirection.Bidirectional(ActualDirection.Flipped) => SpecifiedDirection.Flip
case ActualDirection.Empty => SpecifiedDirection.Unspecified
}
// TODO port technically isn't directly child of this data structure, but the result of some
// muxes / demuxes. However, this does make access consistent with the top-level bindings.
// Perhaps there's a cleaner way of accomplishing this...
port.bind(ChildBinding(this), reconstructedResolvedDirection)
val i = Vec.truncateIndex(p, length)(UnlocatableSourceInfo)
port.setRef(this, i)
port
}
/** Creates a statically indexed read or write accessor into the array.
*/
def apply(idx: Int): T = self(idx)
override def cloneType: this.type = {
new Vec(gen.cloneTypeFull, length).asInstanceOf[this.type]
}
override def getElements: Seq[Data] = self
final override private[chisel3] def elementsIterator: Iterator[Data] = self.iterator
/** Default "pretty-print" implementation
* Analogous to printing a Seq
* Results in "Vec(elt0, elt1, ...)"
*/
def toPrintable: Printable = {
val elts =
if (length == 0) List.empty[Printable]
else self.flatMap(e => List(e.toPrintable, PString(", "))).dropRight(1)
PString("Vec(") + Printables(elts) + PString(")")
}
/** A reduce operation in a tree like structure instead of sequentially
* @example An adder tree
* {{{
* val sumOut = inputNums.reduceTree((a: T, b: T) => (a + b))
* }}}
*/
def reduceTree(redOp: (T, T) => T): T = macro VecTransform.reduceTreeDefault
/** A reduce operation in a tree like structure instead of sequentially
* @example A pipelined adder tree
* {{{
* val sumOut = inputNums.reduceTree(
* (a: T, b: T) => RegNext(a + b),
* (a: T) => RegNext(a)
* )
* }}}
*/
def reduceTree(redOp: (T, T) => T, layerOp: (T) => T): T = macro VecTransform.reduceTree
def do_reduceTree(
redOp: (T, T) => T,
layerOp: (T) => T = (x: T) => x
)(
implicit sourceInfo: SourceInfo
): T = {
require(!isEmpty, "Cannot apply reduction on a vec of size 0")
def recReduce[T](s: Seq[T], op: (T, T) => T, lop: (T) => T): T = {
val n = s.length
n match {
case 1 => lop(s(0))
case 2 => op(s(0), s(1))
case _ =>
val m = pow(2, floor(log10(n - 1) / log10(2))).toInt // number of nodes in next level, will be a power of 2
val p = 2 * m - n // number of nodes promoted
val l = s.take(p).map(lop)
val r = s
.drop(p)
.grouped(2)
.map {
case Seq(a, b) => op(a, b)
}
.toVector
recReduce(l ++ r, op, lop)
}
}
recReduce(this, redOp, layerOp)
}
/** Creates a Vec literal of this type with specified values. this must be a chisel type.
*
* @param elementInitializers literal values, specified as a pair of the Vec field to the literal value.
* The Vec field is specified as a function from an object of this type to the field.
* Fields that aren't initialized to DontCare, and assignment to a wire will overwrite any
* existing value with DontCare.
* @return a Vec literal of this type with subelement values specified
*
* Vec(2, UInt(8.W)).Lit(
* 1 -> 0x0A.U,
* 2 -> 0x0B.U
* )
* }}}
*/
private[chisel3] def _makeLit(
elementInitializers: (Int, T)*
)(
implicit sourceInfo: SourceInfo
): this.type = {
def checkLiteralConstruction(): Unit = {
val dupKeys = elementInitializers.map { x => x._1 }.groupBy(x => x).flatMap {
case (k, v) =>
if (v.length > 1) {
Some(k, v.length)
} else {
None
}
}
if (dupKeys.nonEmpty) {
throw new VecLiteralException(
s"VecLiteral: has duplicated indices ${dupKeys.map { case (k, n) => s"$k($n times)" }.mkString(",")}"
)
}
val outOfRangeIndices = elementInitializers.map(_._1).filter { case index => index < 0 || index >= length }
if (outOfRangeIndices.nonEmpty) {
throw new VecLiteralException(
s"VecLiteral: The following indices (${outOfRangeIndices.mkString(",")}) " +
s"are less than zero or greater or equal to than Vec length"
)
}
// look for literals of this vec that are wider than the vec's type
val badLits = elementInitializers.flatMap {
case (index, lit) =>
(sample_element.width, lit.width) match {
case (KnownWidth(m), KnownWidth(n)) =>
if (m < n) Some(index -> lit) else None
case (KnownWidth(_), _) =>
None
case (UnknownWidth(), _) =>
None
case _ =>
Some(index -> lit)
}
case _ => None
}
if (badLits.nonEmpty) {
throw new VecLiteralException(
s"VecLiteral: Vec[$gen] has the following incorrectly typed or sized initializers: " +
badLits.map { case (a, b) => s"$a -> $b" }.mkString(",")
)
}
}
requireIsChiselType(this, "vec literal constructor model")
checkLiteralConstruction()
val clone = cloneType
val cloneFields = getRecursiveFields(clone, "(vec root)").toMap
// Create the Vec literal binding from litArgs of arguments
val vecLitLinkedMap = new mutable.LinkedHashMap[Data, LitArg]()
elementInitializers.sortBy { case (a, _) => a }.foreach {
case (fieldIndex, value) =>
val field = clone.apply(fieldIndex)
val fieldName = cloneFields.getOrElse(
field,
throw new VecLiteralException(
s"field $field (with value $value) is not a field," +
s" ensure the field is specified as a function returning a field on an object of class ${this.getClass}," +
s" eg '_.a' to select hypothetical bundle field 'a'"
)
)
val valueBinding = value.topBindingOpt match {
case Some(litBinding: LitBinding) => litBinding
case _ => throw new VecLiteralException(s"field $fieldIndex specified with non-literal value $value")
}
field match { // Get the litArg(s) for this field
case bitField: Bits =>
if (!field.typeEquivalent(bitField)) {
throw new VecLiteralException(
s"VecLit: Literal specified at index $fieldIndex ($value) does not match Vec type $sample_element"
)
}
if (bitField.getWidth > field.getWidth) {
throw new VecLiteralException(
s"VecLit: Literal specified at index $fieldIndex ($value) is too wide for Vec type $sample_element"
)
}
val litArg = valueBinding match {
case ElementLitBinding(litArg) => litArg
case BundleLitBinding(litMap) =>
litMap.getOrElse(
value,
throw new BundleLiteralException(s"Field $fieldName specified with unspecified value")
)
case VecLitBinding(litMap) =>
litMap.getOrElse(
value,
throw new VecLiteralException(s"Field $fieldIndex specified with unspecified value")
)
}
val adjustedLitArg = litArg.cloneWithWidth(sample_element.width)
vecLitLinkedMap(bitField) = adjustedLitArg
case recordField: Record =>
if (!(recordField.typeEquivalent(value))) {
throw new VecLiteralException(
s"field $fieldIndex $recordField specified with non-type-equivalent value $value"
)
}
// Copy the source BundleLitBinding with fields (keys) remapped to the clone
val remap = getMatchedFields(value, recordField).toMap
valueBinding.asInstanceOf[BundleLitBinding].litMap.map {
case (valueField, valueValue) =>
vecLitLinkedMap(remap(valueField)) = valueValue
}
case vecField: Vec[_] =>
if (!(vecField.typeEquivalent(value))) {
throw new VecLiteralException(
s"field $fieldIndex $vecField specified with non-type-equivalent value $value"
)
}
// Copy the source VecLitBinding with vecFields (keys) remapped to the clone
val remap = getMatchedFields(value, vecField).toMap
value.topBinding.asInstanceOf[VecLitBinding].litMap.map {
case (valueField, valueValue) =>
vecLitLinkedMap(remap(valueField)) = valueValue
}
case enumField: EnumType => {
if (!(enumField.typeEquivalent(value))) {
throw new VecLiteralException(
s"field $fieldIndex $enumField specified with non-type-equivalent enum value $value"
)
}
val litArg = valueBinding match {
case ElementLitBinding(litArg) => litArg
case _ =>
throw new VecLiteralException(s"field $fieldIndex $enumField could not bematched with $valueBinding")
}
vecLitLinkedMap(field) = litArg
}
case _ => throw new VecLiteralException(s"unsupported field $fieldIndex of type $field")
}
}
clone.bind(VecLitBinding(VectorMap(vecLitLinkedMap.toSeq: _*)))
clone
}
}
object VecInit extends SourceInfoDoc {
/** Gets the correct connect operation (directed hardware assign or bulk connect) for element in Vec.
*/
private def getConnectOpFromDirectionality[T <: Data](
proto: T
)(
implicit sourceInfo: SourceInfo
): (T, T) => Unit = proto.direction match {
case ActualDirection.Input | ActualDirection.Output | ActualDirection.Unspecified =>
// When internal wires are involved, driver / sink must be specified explicitly, otherwise
// the system is unable to infer which is driver / sink
(x, y) => x := y
case ActualDirection.Bidirectional(_) =>
// For bidirectional, must issue a bulk connect so subelements are resolved correctly.
// Bulk connecting two wires may not succeed because Chisel frontend does not infer
// directions.
(x, y) => x <> y
case ActualDirection.Empty =>
(x, y) => x <> y
}
/** Creates a new [[Vec]] composed of elements of the input Seq of [[Data]]
* nodes.
*
* @note input elements should be of the same type (this is checked at the
* FIRRTL level, but not at the Scala / Chisel level)
* @note the width of all output elements is the width of the largest input
* element
* @note output elements are connected from the input elements
*/
def apply[T <: Data](elts: Seq[T]): Vec[T] = macro VecTransform.apply_elts
/** @group SourceInfoTransformMacro */
def do_apply[T <: Data](elts: Seq[T])(implicit sourceInfo: SourceInfo): Vec[T] = {
// REVIEW TODO: this should be removed in favor of the apply(elts: T*)
// varargs constructor, which is more in line with the style of the Scala
// collection API. However, a deprecation phase isn't possible, since
// changing apply(elt0, elts*) to apply(elts*) causes a function collision
// with apply(Seq) after type erasure. Workarounds by either introducing a
// DummyImplicit or additional type parameter will break some code.
// Check that types are homogeneous. Width mismatch for Elements is safe.
require(elts.nonEmpty, "Vec hardware values are not allowed to be empty")
elts.foreach(requireIsHardware(_, "vec element"))
val vec = Wire(Vec(elts.length, cloneSupertype(elts, "Vec")))
val op = getConnectOpFromDirectionality(vec.head)
(vec.zip(elts)).foreach { x =>
op(x._1, x._2)
}
vec
}
/** Creates a new [[Vec]] composed of the input [[Data]] nodes.
*
* @note input elements should be of the same type (this is checked at the
* FIRRTL level, but not at the Scala / Chisel level)
* @note the width of all output elements is the width of the largest input
* element
* @note output elements are connected from the input elements
*/
def apply[T <: Data](elt0: T, elts: T*): Vec[T] = macro VecTransform.apply_elt0
/** @group SourceInfoTransformMacro */
def do_apply[T <: Data](elt0: T, elts: T*)(implicit sourceInfo: SourceInfo): Vec[T] =
apply(elt0 +: elts.toSeq)
/** Creates a new [[Vec]] of length `n` composed of the results of the given
* function applied over a range of integer values starting from 0.
*
* @param n number of elements in the vector (the function is applied from
* 0 to `n-1`)
* @param gen function that takes in an Int (the index) and returns a
* [[Data]] that becomes the output element
*/
def tabulate[T <: Data](n: Int)(gen: (Int) => T): Vec[T] = macro VecTransform.tabulate
/** @group SourceInfoTransformMacro */
def do_tabulate[T <: Data](
n: Int
)(gen: (Int) => T
)(
implicit sourceInfo: SourceInfo
): Vec[T] =
apply((0 until n).map(i => gen(i)))
/** Creates a new 2D [[Vec]] of length `n by m` composed of the results of the given
* function applied over a range of integer values starting from 0.
*
* @param n number of 1D vectors inside outer vector
* @param m number of elements in each 1D vector (the function is applied from
* 0 to `n-1`)
* @param gen function that takes in an Int (the index) and returns a
* [[Data]] that becomes the output element
*/
def tabulate[T <: Data](n: Int, m: Int)(gen: (Int, Int) => T): Vec[Vec[T]] = macro VecTransform.tabulate2D
/** @group SourceInfoTransformMacro */
def do_tabulate[T <: Data](
n: Int,
m: Int
)(gen: (Int, Int) => T
)(
implicit sourceInfo: SourceInfo
): Vec[Vec[T]] = {
// TODO make this lazy (requires LazyList and cross compilation, beyond the scope of this PR)
val elts = Seq.tabulate(n, m)(gen)
val flatElts = elts.flatten
require(flatElts.nonEmpty, "Vec hardware values are not allowed to be empty")
flatElts.foreach(requireIsHardware(_, "vec element"))
val tpe = cloneSupertype(flatElts, "Vec.tabulate")
val myVec = Wire(Vec(n, Vec(m, tpe)))
val op = getConnectOpFromDirectionality(myVec.head.head)
for {
(xs1D, ys1D) <- myVec.zip(elts)
(x, y) <- xs1D.zip(ys1D)
} {
op(x, y)
}
myVec
}
/** Creates a new 3D [[Vec]] of length `n by m by p` composed of the results of the given
* function applied over a range of integer values starting from 0.
*
* @param n number of 2D vectors inside outer vector
* @param m number of 1D vectors in each 2D vector
* @param p number of elements in each 1D vector
* @param gen function that takes in an Int (the index) and returns a
* [[Data]] that becomes the output element
*/
def tabulate[T <: Data](n: Int, m: Int, p: Int)(gen: (Int, Int, Int) => T): Vec[Vec[Vec[T]]] =
macro VecTransform.tabulate3D
/** @group SourceInfoTransformMacro */
def do_tabulate[T <: Data](
n: Int,
m: Int,
p: Int
)(gen: (Int, Int, Int) => T
)(
implicit sourceInfo: SourceInfo
): Vec[Vec[Vec[T]]] = {
// TODO make this lazy (requires LazyList and cross compilation, beyond the scope of this PR)
val elts = Seq.tabulate(n, m, p)(gen)
val flatElts = elts.flatten.flatten
require(flatElts.nonEmpty, "Vec hardware values are not allowed to be empty")
flatElts.foreach(requireIsHardware(_, "vec element"))
val tpe = cloneSupertype(flatElts, "Vec.tabulate")
val myVec = Wire(Vec(n, Vec(m, Vec(p, tpe))))
val op = getConnectOpFromDirectionality(myVec.head.head.head)
for {
(xs2D, ys2D) <- myVec.zip(elts)
(xs1D, ys1D) <- xs2D.zip(ys2D)
(x, y) <- xs1D.zip(ys1D)
} {
op(x, y)
}
myVec
}
/** Creates a new [[Vec]] of length `n` composed of the result of the given
* function applied to an element of data type T.
*
* @param n number of elements in the vector
* @param gen function that takes in an element T and returns an output
* element of the same type
*/
def fill[T <: Data](n: Int)(gen: => T): Vec[T] = macro VecTransform.fill
/** @group SourceInfoTransformMacro */
def do_fill[T <: Data](n: Int)(gen: => T)(implicit sourceInfo: SourceInfo): Vec[T] =
if (n == 0) { Wire(Vec(0, gen.cloneTypeFull)) }
else { apply(Seq.fill(n)(gen)) }
/** Creates a new 2D [[Vec]] of length `n by m` composed of the result of the given
* function applied to an element of data type T.
*
* @param n number of inner vectors (rows) in the outer vector
* @param m number of elements in each inner vector (column)
* @param gen function that takes in an element T and returns an output
* element of the same type
*/
def fill[T <: Data](n: Int, m: Int)(gen: => T): Vec[Vec[T]] = macro VecTransform.fill2D
/** @group SourceInfoTransformMacro */
def do_fill[T <: Data](
n: Int,
m: Int
)(gen: => T
)(
implicit sourceInfo: SourceInfo
): Vec[Vec[T]] = {
do_tabulate(n, m)((_, _) => gen)
}
/** Creates a new 3D [[Vec]] of length `n by m by p` composed of the result of the given
* function applied to an element of data type T.
*
* @param n number of 2D vectors inside outer vector
* @param m number of 1D vectors in each 2D vector
* @param p number of elements in each 1D vector
* @param gen function that takes in an element T and returns an output
* element of the same type
*/
def fill[T <: Data](n: Int, m: Int, p: Int)(gen: => T): Vec[Vec[Vec[T]]] = macro VecTransform.fill3D
/** @group SourceInfoTransformMacro */
def do_fill[T <: Data](
n: Int,
m: Int,
p: Int
)(gen: => T
)(
implicit sourceInfo: SourceInfo
): Vec[Vec[Vec[T]]] = {
do_tabulate(n, m, p)((_, _, _) => gen)
}
/** Creates a new [[Vec]] of length `n` composed of the result of the given
* function applied to an element of data type T.
*
* @param start First element in the Vec
* @param len Lenth of elements in the Vec
* @param f Function that applies the element T from previous index and returns the output
* element to the next index
*/
def iterate[T <: Data](start: T, len: Int)(f: (T) => T): Vec[T] = macro VecTransform.iterate
/** @group SourceInfoTransformMacro */
def do_iterate[T <: Data](
start: T,
len: Int
)(f: (T) => T
)(
implicit sourceInfo: SourceInfo
): Vec[T] =
apply(Seq.iterate(start, len)(f))
}
/** A trait for [[Vec]]s containing common hardware generators for collection
* operations.
*/
trait VecLike[T <: Data] extends IndexedSeq[T] with HasId with SourceInfoDoc {
/** Creates a dynamically indexed read or write accessor into the array.
*/
def apply(p: UInt): T = macro SourceInfoTransform.pArg
/** @group SourceInfoTransformMacro */
def do_apply(p: UInt)(implicit sourceInfo: SourceInfo): T
// IndexedSeq has its own hashCode/equals that we must not use
override def hashCode: Int = super[HasId].hashCode
override def equals(that: Any): Boolean = super[HasId].equals(that)
/** Outputs true if p outputs true for every element.
*/
def forall(p: T => Bool): Bool = macro SourceInfoTransform.pArg
/** @group SourceInfoTransformMacro */
def do_forall(p: T => Bool)(implicit sourceInfo: SourceInfo): Bool =
(this.map(p)).fold(true.B)(_ && _)
/** Outputs true if p outputs true for at least one element.
*/
def exists(p: T => Bool): Bool = macro SourceInfoTransform.pArg
/** @group SourceInfoTransformMacro */
def do_exists(p: T => Bool)(implicit sourceInfo: SourceInfo): Bool =
(this.map(p)).fold(false.B)(_ || _)
/** Outputs true if the vector contains at least one element equal to x (using
* the === operator).
*/
def contains(x: T)(implicit ev: T <:< UInt): Bool = macro VecTransform.contains
/** @group SourceInfoTransformMacro */
def do_contains(x: T)(implicit sourceInfo: SourceInfo, ev: T <:< UInt): Bool =
this.exists(_ === x)
/** Outputs the number of elements for which p is true.
*/
def count(p: T => Bool): UInt = macro SourceInfoTransform.pArg
/** @group SourceInfoTransformMacro */
def do_count(p: T => Bool)(implicit sourceInfo: SourceInfo): UInt =
SeqUtils.count(this.map(p))
/** Helper function that appends an index (literal value) to each element,
* useful for hardware generators which output an index.
*/
private def indexWhereHelper(p: T => Bool) = this.map(p).zip((0 until length).map(i => i.asUInt))
/** Outputs the index of the first element for which p outputs true.
*/
def indexWhere(p: T => Bool): UInt = macro SourceInfoTransform.pArg
/** @group SourceInfoTransformMacro */
def do_indexWhere(p: T => Bool)(implicit sourceInfo: SourceInfo): UInt =
SeqUtils.priorityMux(indexWhereHelper(p))
/** Outputs the index of the last element for which p outputs true.
*/
def lastIndexWhere(p: T => Bool): UInt = macro SourceInfoTransform.pArg
/** @group SourceInfoTransformMacro */
def do_lastIndexWhere(p: T => Bool)(implicit sourceInfo: SourceInfo): UInt =
SeqUtils.priorityMux(indexWhereHelper(p).reverse)
/** Outputs the index of the element for which p outputs true, assuming that
* the there is exactly one such element.
*
* The implementation may be more efficient than a priority mux, but
* incorrect results are possible if there is not exactly one true element.
*
* @note the assumption that there is only one element for which p outputs
* true is NOT checked (useful in cases where the condition doesn't always
* hold, but the results are not used in those cases)
*/
def onlyIndexWhere(p: T => Bool): UInt = macro SourceInfoTransform.pArg
/** @group SourceInfoTransformMacro */
def do_onlyIndexWhere(p: T => Bool)(implicit sourceInfo: SourceInfo): UInt =
SeqUtils.oneHotMux(indexWhereHelper(p))
}
/** Base class for Aggregates based on key values pairs of String and Data
*
* Record should only be extended by libraries and fairly sophisticated generators.
* RTL writers should use [[Bundle]]. See [[Record#elements]] for an example.
*/
abstract class Record extends Aggregate {
/** The list of parameter accessors used in the constructor of this [[chisel3.Record]].
*
* @note This is automatically overridden via the compiler plugin for user-defined bundles that mix-in [[chisel3.experimental.HasAutoTypename]],
* and is meant for internal Chisel use only. Can not be manually overridden by users, or else an error will be thrown.
* @note This lives in Record rather than the [[chisel3.experimental.HasAutoTypename]] trait, due to compiler implementation details
* preventing us from overriding a definition within a trait via the compiler plugin
*/
protected def _typeNameConParams: Iterable[Any] = Vector.empty
private[chisel3] def _isOpaqueType: Boolean = this match {
case maybe: OpaqueType => maybe.opaqueType
case _ => false
}
private def checkClone(clone: Record): Unit = {
for ((name, field) <- _elements) {
if (clone._elements(name) eq field) {
throw new AutoClonetypeException(
s"The bundle plugin was unable to clone $clone that has field '$name' aliased with base $this." +
"This likely happened because you tried nesting Data arguments inside of other data structures." +
" Try wrapping the field(s) in Input(...), Output(...), or Flipped(...) if appropriate." +
" As a last resort, you can call chisel3.reflect.DataMirror.internal.chiselTypeClone on any nested Data arguments." +
" See the cookbook entry 'How do I deal with the \"unable to clone\" error?' for more details."
)
}
}