Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Support FrIndexArray and FrMapArray #258

Merged
merged 3 commits into from
Jul 25, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 10 additions & 3 deletions pkg/cmd/trace.go
Original file line number Diff line number Diff line change
Expand Up @@ -192,11 +192,18 @@ func rowSummariser(col trace.Column) string {
}

func widthSummariser(col trace.Column) string {
return fmt.Sprintf("%d bits", col.Data().ByteWidth()*8)
return fmt.Sprintf("%d bits", col.Data().BitWidth())
}

func bytesSummariser(col trace.Column) string {
return fmt.Sprintf("%d bytes", col.Data().Len()*col.Data().ByteWidth())
bitwidth := col.Data().BitWidth()
byteWidth := bitwidth / 8
// Determine proper bytewidth
if bitwidth%8 != 0 {
byteWidth++
}

return fmt.Sprintf("%d bytes", col.Data().Len()*byteWidth)
}

func uniqueSummariser(col trace.Column) string {
Expand Down Expand Up @@ -234,7 +241,7 @@ func trWidthSummariser(lowWidth uint, highWidth uint) traceSummariser {
summary: func(tr trace.Trace) string {
count := 0
for i := uint(0); i < tr.Columns().Len(); i++ {
ithWidth := tr.Columns().Get(i).Data().ByteWidth() * 8
ithWidth := tr.Columns().Get(i).Data().BitWidth()
if ithWidth >= lowWidth && ithWidth <= highWidth {
count++
}
Expand Down
8 changes: 4 additions & 4 deletions pkg/hir/lower.go
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,7 @@ func extractCondition(e Expr, schema *mir.Schema) mir.Expr {
} else if p, ok := e.(*Exp); ok {
return extractCondition(p.Arg, schema)
} else if p, ok := e.(*IfZero); ok {
return lowerIfZeroCondition(p, schema)
return extractIfZeroCondition(p, schema)
} else if p, ok := e.(*Sub); ok {
return extractConditions(p.Args, schema)
}
Expand All @@ -209,9 +209,9 @@ func extractConditions(es []Expr, schema *mir.Schema) mir.Expr {
return r
}

// Lowering conditional expressions is slightly more complex than others, so it
// gets a case of its own.
func lowerIfZeroCondition(e *IfZero, schema *mir.Schema) mir.Expr {
// Extracting from conditional expressions is slightly more complex than others,
// so it gets a case of its own.
func extractIfZeroCondition(e *IfZero, schema *mir.Schema) mir.Expr {
var bc mir.Expr
// Lower condition
cc := extractCondition(e.Condition, schema)
Expand Down
2 changes: 1 addition & 1 deletion pkg/schema/assignment/byte_decomposition.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ func (p *ByteDecomposition) ExpandTrace(tr trace.Trace) error {
// Initialise columns
for i := 0; i < n; i++ {
// Construct a byte column for ith byte
cols[i] = util.NewFrArray(source.Height(), 1)
cols[i] = util.NewFrArray(source.Height(), 8)
}
// Decompose each row of each column
for i := uint(0); i < source.Height(); i = i + 1 {
Expand Down
2 changes: 1 addition & 1 deletion pkg/schema/assignment/computed_column.go
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ func (p *ComputedColumn[E]) ExpandTrace(tr trace.Trace) error {
// Determine multiplied height
height := tr.Modules().Get(p.target.Context().Module()).Height() * multiplier
// Make space for computed data
data := util.NewFrArray(height, 32)
data := util.NewFrArray(height, 256)
// Expand the trace
for i := uint(0); i < data.Len(); i++ {
val := p.expr.EvalAt(int(i), tr)
Expand Down
6 changes: 3 additions & 3 deletions pkg/schema/assignment/interleave.go
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ func (p *Interleaving) ExpandTrace(tr tr.Trace) error {
columns := tr.Columns()
ctx := p.target.Context()
// Byte width records the largest width of any column.
byte_width := uint(0)
bit_width := uint(0)
// Ensure target column doesn't exist
for i := p.Columns(); i.HasNext(); {
ith := i.Next()
// Update byte width
byte_width = max(byte_width, ith.Type().ByteWidth())
bit_width = max(bit_width, ith.Type().BitWidth())
// Sanity check no column already exists with this name.
if _, ok := columns.IndexOf(ctx.Module(), ith.Name()); ok {
return fmt.Errorf("interleaved column already exists ({%s})", ith.Name())
Expand All @@ -92,7 +92,7 @@ func (p *Interleaving) ExpandTrace(tr tr.Trace) error {
// the interleaved column)
height := tr.Modules().Get(ctx.Module()).Height() * multiplier
// Construct empty array
data := util.NewFrArray(height*width, byte_width)
data := util.NewFrArray(height*width, bit_width)
// Offset just gives the column index
offset := uint(0)
// Copy interleaved data
Expand Down
6 changes: 3 additions & 3 deletions pkg/schema/assignment/lexicographic_sort.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,17 +82,17 @@ func (p *LexicographicSort) ExpandTrace(tr trace.Trace) error {
// Initialise new data columns
bit := make([]util.FrArray, ncols)
// Byte width records the largest width of any column.
byte_width := uint(0)
bit_width := uint(0)

for i := 0; i < ncols; i++ {
// TODO: following can be optimised to use a single bit per element,
// rather than an entire byte.
bit[i] = util.NewFrArray(nrows, 1)
ith := columns.Get(p.sources[i])
byte_width = max(byte_width, ith.Data().ByteWidth())
bit_width = max(bit_width, ith.Data().BitWidth())
}

delta := util.NewFrArray(nrows, byte_width)
delta := util.NewFrArray(nrows, bit_width)

for i := uint(0); i < nrows; i++ {
set := false
Expand Down
2 changes: 1 addition & 1 deletion pkg/trace/json/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ func FromBytes(bytes []byte) (trace.Trace, error) {
for name, rawInts := range rawData {
// Translate raw bigints into raw field elements
// TODO: support native field widths in column name.
rawElements := util.FrArrayFromBigInts(32, rawInts)
rawElements := util.FrArrayFromBigInts(256, rawInts)
// Add column and sanity check for errors
if err := builder.Add(name, &zero, rawElements); err != nil {
return nil, err
Expand Down
2 changes: 1 addition & 1 deletion pkg/trace/lt/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ func readColumnHeader(buf *bytes.Reader) (columnHeader, error) {

func readColumnData(header columnHeader, bytes []byte) util.FrArray {
// Construct array
data := util.NewFrArray(header.length, header.width)
data := util.NewFrArray(header.length, header.width*8)
// Assign elements
offset := uint(0)

Expand Down
7 changes: 6 additions & 1 deletion pkg/trace/lt/writer.go
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,13 @@ func WriteBytes(tr trace.Trace, buf io.Writer) error {
if n != int(nameLen) || err != nil {
log.Fatal(err)
}
// Determine number of bytes required to hold element of this column.
byteWidth := data.BitWidth() / 8
if data.BitWidth()%8 != 0 {
byteWidth++
}
// Write bytes per element
if err := binary.Write(buf, binary.BigEndian, uint8(data.ByteWidth())); err != nil {
if err := binary.Write(buf, binary.BigEndian, uint8(byteWidth)); err != nil {
log.Fatal(err)
}
// Write Data length
Expand Down
Loading