Skip to content

Commit

Permalink
Update query JSON output to be consistent across pipelings - also han…
Browse files Browse the repository at this point in the history
…dle duplicate column names. Closes #4317
  • Loading branch information
pskrbasu committed Sep 2, 2024
1 parent 8a96a91 commit c6b8c83
Show file tree
Hide file tree
Showing 25 changed files with 1,223 additions and 710 deletions.
5 changes: 4 additions & 1 deletion pkg/db/db_client/db_client_execute.go
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,10 @@ func (c *DbClient) ExecuteInSession(ctx context.Context, session *db_common.Data
return
}

colDefs := fieldDescriptionsToColumns(rows.FieldDescriptions(), session.Connection.Conn())
colDefs, err := fieldDescriptionsToColumns(rows.FieldDescriptions(), session.Connection.Conn())
if err != nil {
return nil, err
}

result := queryresult.NewResult(colDefs)

Expand Down
31 changes: 29 additions & 2 deletions pkg/db/db_client/pgx_types.go
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
package db_client

import (
"fmt"
"strconv"
"strings"

"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgconn"
"github.com/turbot/pipe-fittings/utils"
"github.com/turbot/steampipe/pkg/query/queryresult"
)

Expand All @@ -18,7 +20,7 @@ func columnTypeDatabaseTypeName(field pgconn.FieldDescription, connection *pgx.C
return strconv.FormatInt(int64(field.DataTypeOID), 10)
}

func fieldDescriptionsToColumns(fieldDescriptions []pgconn.FieldDescription, connection *pgx.Conn) []*queryresult.ColumnDef {
func fieldDescriptionsToColumns(fieldDescriptions []pgconn.FieldDescription, connection *pgx.Conn) ([]*queryresult.ColumnDef, error) {
cols := make([]*queryresult.ColumnDef, len(fieldDescriptions))

for i, f := range fieldDescriptions {
Expand All @@ -29,5 +31,30 @@ func fieldDescriptionsToColumns(fieldDescriptions []pgconn.FieldDescription, con
DataType: typeName,
}
}
return cols

// Ensure column names are unique
if err := ensureUniqueColumnName(cols); err != nil {
return nil, err
}

return cols, nil
}

func ensureUniqueColumnName(cols []*queryresult.ColumnDef) error {
// create a unique name generator
nameGenerator := utils.NewUniqueNameGenerator()

for colIdx, col := range cols {
uniqueName, err := nameGenerator.GetUniqueName(col.Name, colIdx)
if err != nil {
return fmt.Errorf("error generating unique column name: %w", err)
}
// if the column name has changed, store the original name and update the column name to be the unique name
if uniqueName != col.Name {
// set the original name first, BEFORE mutating name
col.OriginalName = col.Name
col.Name = uniqueName
}
}
return nil
}
11 changes: 8 additions & 3 deletions pkg/display/column.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,16 @@ import (
"github.com/turbot/steampipe/pkg/query/queryresult"
)

// ColumnNames :: extract names from columns
func ColumnNames(columns []*queryresult.ColumnDef) []string {
// columnNames builds a list of name from a slice of column defs - respecting the original name if present
func columnNames(columns []*queryresult.ColumnDef) []string {
var colNames = make([]string, len(columns))
for i, c := range columns {
colNames[i] = c.Name
// respect original name
if c.OriginalName != "" {
colNames[i] = c.OriginalName
} else {
colNames[i] = c.Name
}
}

return colNames
Expand Down
38 changes: 30 additions & 8 deletions pkg/display/display.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ import (
"github.com/karrick/gows"
"github.com/spf13/viper"
"github.com/turbot/go-kit/helpers"
pqueryresult "github.com/turbot/pipe-fittings/queryresult"
"github.com/turbot/steampipe/pkg/cmdconfig"
"github.com/turbot/steampipe/pkg/constants"
"github.com/turbot/steampipe/pkg/error_helpers"
Expand Down Expand Up @@ -184,6 +185,7 @@ func getTerminalColumnsRequiredForString(str string) int {
}

type jsonOutput struct {
Columns []pqueryresult.ColumnDef `json:"columns"`
Rows []map[string]interface{} `json:"rows"`
Metadata *queryresult.TimingResult `json:"metadata,omitempty"`
}
Expand All @@ -199,12 +201,27 @@ func displayJSON(ctx context.Context, result *queryresult.Result) (int, *queryre
rowErrors := 0
jsonOutput := newJSONOutput()

// add column defs to the JSON output
for _, col := range result.Cols {
// create a new column def, converting the data type to lowercase
c := pqueryresult.ColumnDef{
Name: col.Name,
OriginalName: col.OriginalName,
DataType: strings.ToLower(col.DataType),
}
// add to the column def array
jsonOutput.Columns = append(jsonOutput.Columns, c)
}

// define function to add each row to the JSON output
rowFunc := func(row []interface{}, result *queryresult.Result) {
record := map[string]interface{}{}
for idx, col := range result.Cols {
value, _ := ParseJSONOutputColumnValue(row[idx], col)
record[col.Name] = value
// get the column def
c := jsonOutput.Columns[idx]
// add the value under the unique column name
record[c.Name] = value
}
jsonOutput.Rows = append(jsonOutput.Rows, record)
}
Expand Down Expand Up @@ -237,7 +254,7 @@ func displayCSV(ctx context.Context, result *queryresult.Result) (int, *queryres
csvWriter.Comma = []rune(cmdconfig.Viper().GetString(constants.ArgSeparator))[0]

if cmdconfig.Viper().GetBool(constants.ArgHeader) {
_ = csvWriter.Write(ColumnNames(result.Cols))
_ = csvWriter.Write(columnNames(result.Cols))
}

// print the data as it comes
Expand Down Expand Up @@ -291,16 +308,19 @@ func displayLine(ctx context.Context, result *queryresult.Result) (int, *queryre
lineFormat := fmt.Sprintf("%%-%ds | %%s\n", maxColNameLength)
multiLineFormat := fmt.Sprintf("%%-%ds | %%-%ds", maxColNameLength, requiredTerminalColumnsForValuesOfRecord)

fmt.Printf("-[ RECORD %-2d ]%s\n", (itemIdx + 1), strings.Repeat("-", 75))
fmt.Printf("-[ RECORD %-2d ]%s\n", itemIdx+1, strings.Repeat("-", 75)) //nolint:forbidigo // intentional use of fmt

// get the column names (this takes into account the original name)
columnNames := columnNames(result.Cols)
for idx, column := range recordAsString {
lines := strings.Split(column, "\n")
if len(lines) == 1 {
fmt.Printf(lineFormat, result.Cols[idx].Name, lines[0])
fmt.Printf(lineFormat, columnNames[idx], lines[0])
} else {
for lineIdx, line := range lines {
if lineIdx == 0 {
// the first line
fmt.Printf(multiLineFormat, result.Cols[idx].Name, line)
fmt.Printf(multiLineFormat, columnNames[idx], line)
} else {
// next lines
fmt.Printf(multiLineFormat, "", line)
Expand Down Expand Up @@ -347,10 +367,12 @@ func displayTable(ctx context.Context, result *queryresult.Result) (int, *queryr
var colConfigs []table.ColumnConfig
headers := make(table.Row, len(result.Cols))

for idx, column := range result.Cols {
headers[idx] = column.Name
// get the column names (this takes into account the original name)
columnNames := columnNames(result.Cols)
for idx, columnName := range columnNames {
headers[idx] = columnName
colConfigs = append(colConfigs, table.ColumnConfig{
Name: column.Name,
Name: columnName,
Number: idx + 1,
WidthMax: constants.MaxColumnWidth,
})
Expand Down
7 changes: 4 additions & 3 deletions pkg/query/queryresult/column_def.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@ import "reflect"

// ColumnDef is a struct used to store column information from query results
type ColumnDef struct {
Name string `json:"name"`
DataType string `json:"data_type"`
isScalar *bool
Name string `json:"name"`
DataType string `json:"data_type"`
isScalar *bool
OriginalName string `json:"original_name"`
}

// IsScalar checks if the given value is a scalar value
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
{
"columns": [
{
"name": "resource_name",
"data_type": "text"
},
{
"name": "mod_name",
"data_type": "text"
},
{
"name": "file_name",
"data_type": "text"
},
{
"name": "start_line_number",
"data_type": "int4"
},
{
"name": "end_line_number",
"data_type": "int4"
},
{
"name": "auto_generated",
"data_type": "bool"
},
{
"name": "source_definition",
"data_type": "text"
},
{
"name": "is_anonymous",
"data_type": "bool"
},
{
"name": "severity",
"data_type": "text"
},
{
"name": "width",
"data_type": "text"
},
{
"name": "type",
"data_type": "text"
},
{
"name": "sql",
"data_type": "text"
},
{
"name": "args",
"data_type": "jsonb"
},
{
"name": "params",
"data_type": "jsonb"
},
{
"name": "query",
"data_type": "text"
},
{
"name": "path",
"data_type": "jsonb"
},
{
"name": "qualified_name",
"data_type": "text"
},
{
"name": "title",
"data_type": "text"
},
{
"name": "description",
"data_type": "text"
},
{
"name": "documentation",
"data_type": "text"
},
{
"name": "tags",
"data_type": "jsonb"
}
],
"rows": [
{
"args": {
"args_list": null,
"refs": null
},
"auto_generated": false,
"description": "Sample control to test introspection functionality",
"documentation": null,
"end_line_number": 33,
"file_name": "/Users/pskrbasu/work/src/steampipe/tests/acceptance/test_data/mods/introspection_table_mod/resources.sp",
"is_anonymous": false,
"mod_name": "introspection_table_mod",
"params": null,
"path": [
[
"mod.introspection_table_mod",
"introspection_table_mod.benchmark.sample_benchmark_1",
"introspection_table_mod.control.sample_control_1"
]
],
"qualified_name": "introspection_table_mod.control.sample_control_1",
"query": "introspection_table_mod.query.sample_query_1",
"resource_name": "sample_control_1",
"severity": "high",
"source_definition": "control \"sample_control_1\" {\n title = \"Sample control 1\"\n description = \"Sample control to test introspection functionality\"\n query = query.sample_query_1\n severity = \"high\"\n tags = {\n \"foo\": \"bar\"\n }\n}",
"sql": null,
"start_line_number": 25,
"tags": {
"foo": "bar"
},
"title": "Sample control 1",
"type": null,
"width": null
}
]
}
Loading

0 comments on commit c6b8c83

Please sign in to comment.