Skip to content

Commit

Permalink
Merge pull request #38 from metrico/ndjson-support
Browse files Browse the repository at this point in the history
JSONEachRow / NDJSON support
  • Loading branch information
lmangani authored Oct 18, 2024
2 parents f62bff0 + e6ff18f commit 057a8c0
Showing 1 changed file with 56 additions and 0 deletions.
56 changes: 56 additions & 0 deletions utils/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"database/sql"
"encoding/json"
"fmt"
"bytes"
"quackpipe/model"
"regexp"
"strings"
Expand All @@ -30,6 +31,12 @@ func ConversationOfRows(rows *sql.Rows, default_format string, duration time.Dur
return "", err
}
return result, nil
case "JSONEachRow", "NDJSON":
result, err := rowsToNDJSON(rows)
if err != nil {
return "", err
}
return result, nil
case "CSVWithNames":
result, err := rowsToCSV(rows, true)
if err != nil {
Expand Down Expand Up @@ -122,6 +129,55 @@ func rowsToJSON(rows *sql.Rows, elapsedTime time.Duration) (string, error) {
return string(jsonData), nil
}

// rowsToNDJSON converts the rows to NDJSON strings
func rowsToNDJSON(rows *sql.Rows) (string, error) {
defer rows.Close()

columns, err := rows.Columns()
if err != nil {
return "", err
}

var buffer bytes.Buffer
values := make([]interface{}, len(columns))
scanArgs := make([]interface{}, len(columns))
for i := range values {
scanArgs[i] = &values[i]
}

for rows.Next() {
err := rows.Scan(scanArgs...)
if err != nil {
return "", err
}

rowMap := make(map[string]interface{})
for i, col := range columns {
val := values[i]
b, ok := val.([]byte)
if ok {
rowMap[col] = string(b)
} else {
rowMap[col] = val
}
}

jsonData, err := json.Marshal(rowMap)
if err != nil {
return "", err
}

buffer.Write(jsonData)
buffer.WriteByte('\n')
}

if err = rows.Err(); err != nil {
return "", err
}

return buffer.String(), nil
}

// rowsToTSV converts the rows to TSV string
func rowsToTSV(rows *sql.Rows, cols bool) (string, error) {
var result []string
Expand Down

0 comments on commit 057a8c0

Please sign in to comment.