Reputation: 574
Essentially, I am trying to run a query on a MySQL database, get the data made converted into JSON and sent back to the client. I have tried several methods and all of the "easy" ones result in sending back all of the JSON as a string. I need this to be send back as a key (string
) with []float64
value. This way I have an array of data associated with a key. Also, this needs to have a type. The best method I've found so far to accomplish this was to build put all of the data into a struct, encode it and send that back to the ResponseWriter
.
I have seen several questions on making JSON from a database but I haven't found anything utilizing the struct method. I wrote the below code into a single function to illustrate my question. This is VERY limited in that it will only handle two fields and it MUST be a float64
.
Therefore, my question is: How do I create this JSON from a query response that has the correct type before sending this back to the client and is there a way to do this dynamically (ie, can accept a variable number of columns and unknown types)?:
{ "Values":[12.54, 76.98, 34.90], "Dates": ["2017-02-03", "2017-02-04:, "2017-02-05"]}
type DbDao struct{
db *sql.DB
}
type JSONData struct {
Values []float64
Dates []string
}
func (d *DbDao) SendJSON(sqlString string, w http.ResponseWriter) (error) {
stmt, err := d.db.Prepare(sqlString)
if err != nil {
return err
}
defer stmt.Close()
rows, err := stmt.Query()
if err != nil {
return err
}
defer rows.Close()
values := make([]interface{}, 2)
scanArgs := make([]interface{}, 2)
for i := range values {
scanArgs[i] = &values[i]
}
for rows.Next() {
err := rows.Scan(scanArgs...)
if err != nil {
return err
}
var tempDate string
var tempValue float64
var myjson JSONData
d, dok := values[0].([]byte)
v, vok := values[1].(float64)
if dok {
tempDate = string(d)
if err != nil {
return err
}
myjson.Dates = append(myjson.Dates, tempDate)
}
if vok {
tempValue = v
myjson.Values = append(myjson.Values, tempValue)
fmt.Println(v)
fmt.Println(tempValue)
}
err = json.NewEncoder(w).Encode(&myjson)
if err != nil {
return err
}
}
return nil
}
Upvotes: 11
Views: 31994
Reputation: 335
occurs with same problem,go-sql-driver/mysql just can't correctly scan rows to interface{}, I wrote this tool, borrowed from grafana's solution.
You can use this tool, and bury the ugly type matching and converting codes under the dependence
https://github.com/naughtyGitCat/anonymous-query-scan
import (
"encoding/json"
"fmt"
_ "github.com/go-sql-driver/mysql"
mysql "github.com/naughtyGitCat/anonymous-query-scan/mysql"
)
func main() {
db, err := sql.Open("mysql", mysqlConnectionStr)
if err != nil {
panic(err)
}
rows, err := db.Query("select * from user")
if err != nil {
panic(err)
mappedRows, err := mysql.ScanAnonymousMappedRows(rows)
if err != nil {
return nil, err
}
rowBytes, err := json.Marshal(mappedRows)
if err != nil {
return nil, err
}
fmt.Println(string(bytes))
}
Upvotes: 0
Reputation:
This is a much better way to do it (Tested with Postgres). No reflect/reflection needed:
columnTypes, err := rows.ColumnTypes()
if err != nil {
return err
}
count := len(columnTypes)
finalRows := []interface{}{};
for rows.Next() {
scanArgs := make([]interface{}, count)
for i, v := range columnTypes {
switch v.DatabaseTypeName() {
case "VARCHAR", "TEXT", "UUID", "TIMESTAMP":
scanArgs[i] = new(sql.NullString)
break;
case "BOOL":
scanArgs[i] = new(sql.NullBool)
break;
case "INT4":
scanArgs[i] = new(sql.NullInt64)
break;
default:
scanArgs[i] = new(sql.NullString)
}
}
err := rows.Scan(scanArgs...)
if err != nil {
return err
}
masterData := map[string]interface{}{}
for i, v := range columnTypes {
if z, ok := (scanArgs[i]).(*sql.NullBool); ok {
masterData[v.Name()] = z.Bool
continue;
}
if z, ok := (scanArgs[i]).(*sql.NullString); ok {
masterData[v.Name()] = z.String
continue;
}
if z, ok := (scanArgs[i]).(*sql.NullInt64); ok {
masterData[v.Name()] = z.Int64
continue;
}
if z, ok := (scanArgs[i]).(*sql.NullFloat64); ok {
masterData[v.Name()] = z.Float64
continue;
}
if z, ok := (scanArgs[i]).(*sql.NullInt32); ok {
masterData[v.Name()] = z.Int32
continue;
}
masterData[v.Name()] = scanArgs[i]
}
finalRows = append(finalRows, masterData)
}
z, err := json.Marshal(finalRows)
Upvotes: 14
Reputation: 96547
Here is a better solution, using reflection. It handles types correctly (e.g. a string value of true
won't erroneously be turned into a bool and so on.
It also handles possibly-null types (only tested with MySQL - you will probably need to modify it for other drivers).
package main
import (
"database/sql"
"encoding/json"
"fmt"
"reflect"
"github.com/go-sql-driver/mysql"
)
// Additional scan types returned by the MySQL driver. I haven't looked at
// what PostgreSQL does.
type jsonNullInt64 struct {
sql.NullInt64
}
func (v jsonNullInt64) MarshalJSON() ([]byte, error) {
if !v.Valid {
return json.Marshal(nil)
}
return json.Marshal(v.Int64)
}
type jsonNullFloat64 struct {
sql.NullFloat64
}
func (v jsonNullFloat64) MarshalJSON() ([]byte, error) {
if !v.Valid {
return json.Marshal(nil)
}
return json.Marshal(v.Float64)
}
type jsonNullTime struct {
mysql.NullTime
}
func (v jsonNullTime) MarshalJSON() ([]byte, error) {
if !v.Valid {
return json.Marshal(nil)
}
return json.Marshal(v.Time)
}
var jsonNullInt64Type = reflect.TypeOf(jsonNullInt64{})
var jsonNullFloat64Type = reflect.TypeOf(jsonNullFloat64{})
var jsonNullTimeType = reflect.TypeOf(jsonNullTime{})
var nullInt64Type = reflect.TypeOf(sql.NullInt64{})
var nullFloat64Type = reflect.TypeOf(sql.NullFloat64{})
var nullTimeType = reflect.TypeOf(mysql.NullTime{})
// SQLToJSON takes an SQL result and converts it to a nice JSON form. It also
// handles possibly-null values nicely. See https://stackoverflow.com/a/52572145/265521
func SQLToJSON(rows *sql.Rows) ([]byte, error) {
columns, err := rows.Columns()
if err != nil {
return nil, fmt.Errorf("Column error: %v", err)
}
tt, err := rows.ColumnTypes()
if err != nil {
return nil, fmt.Errorf("Column type error: %v", err)
}
types := make([]reflect.Type, len(tt))
for i, tp := range tt {
st := tp.ScanType()
if st == nil {
return nil, fmt.Errorf("Scantype is null for column: %v", err)
}
switch st {
case nullInt64Type:
types[i] = jsonNullInt64Type
case nullFloat64Type:
types[i] = jsonNullFloat64Type
case nullTimeType:
types[i] = jsonNullTimeType
default:
types[i] = st
}
}
values := make([]interface{}, len(tt))
data := make(map[string][]interface{})
for rows.Next() {
for i := range values {
values[i] = reflect.New(types[i]).Interface()
}
err = rows.Scan(values...)
if err != nil {
return nil, fmt.Errorf("Failed to scan values: %v", err)
}
for i, v := range values {
data[columns[i]] = append(data[columns[i]], v)
}
}
return json.Marshal(data)
}
Upvotes: 10
Reputation: 574
This is the best implementation that I was able to come up with that would make it dynamic. It is also significantly shorter than my original. As I've seen this type of question quite a bit, I hope this helps others. I am open to other answers that have a better implementation of this:
func (d *DbDao) makeStructJSON(queryText string, w http.ResponseWriter) error {
// returns rows *sql.Rows
rows, err := d.db.Query(queryText)
if err != nil {
return err
}
columns, err := rows.Columns()
if err != nil {
return err
}
count := len(columns)
values := make([]interface{}, count)
scanArgs := make([]interface{}, count)
for i := range values {
scanArgs[i] = &values[i]
}
masterData := make(map[string][]interface{})
for rows.Next() {
err := rows.Scan(scanArgs...)
if err != nil {
return err
}
for i, v := range values {
x := v.([]byte)
//NOTE: FROM THE GO BLOG: JSON and GO - 25 Jan 2011:
// The json package uses map[string]interface{} and []interface{} values to store arbitrary JSON objects and arrays; it will happily unmarshal any valid JSON blob into a plain interface{} value. The default concrete Go types are:
//
// bool for JSON booleans,
// float64 for JSON numbers,
// string for JSON strings, and
// nil for JSON null.
if nx, ok := strconv.ParseFloat(string(x), 64); ok == nil {
masterData[columns[i]] = append(masterData[columns[i]], nx)
} else if b, ok := strconv.ParseBool(string(x)); ok == nil {
masterData[columns[i]] = append(masterData[columns[i]], b)
} else if "string" == fmt.Sprintf("%T", string(x)) {
masterData[columns[i]] = append(masterData[columns[i]], string(x))
} else {
fmt.Printf("Failed on if for type %T of %v\n", x, x)
}
}
}
w.Header().Set("Content-Type", "application/json")
err = json.NewEncoder(w).Encode(masterData)
if err != nil {
return err
}
return err
}
Upvotes: 15
Reputation: 1500
I think the best option you have is to use json library from golang
import "encoding/json"
type JSONData struct {
Values []float64 `json:"Values"`
Dates []string `json:"Dates"`
}
I don't think there is a good way to do this dynamically, since golang has no way of matching up the database column name and the output'd json Also as a side note I usually write the db querying code by sending the type directly to the db library
var tempDate string
var tempValue float64
err := rows.Scan(&tempDate, &tempValue)
if err != nil {
return err
}
If you really want to do this automatically you can look into golang code generation.
Upvotes: 0