mirror of
https://github.com/turbot/steampipe.git
synced 2025-12-19 09:58:53 -05:00
- JSON output format has changed to move the rows to under a `rows` property, with timing information under the `metadata` property - Update timing display to show rows returned and rows fetched, as well as adding verbose mode which lists all scans - Use enums for output mode and timing mode - timing is now either `on`, `off` or `verbose` - Bugfix: ensure error is returned from ExecuteSystemClientCall. Closes #4246
This commit is contained in:
2
.github/workflows/release_cli_and_assets.yml
vendored
2
.github/workflows/release_cli_and_assets.yml
vendored
@@ -346,6 +346,8 @@ jobs:
|
||||
test_block: migration
|
||||
- platform: macos-latest
|
||||
test_block: force_stop
|
||||
- platform: ubuntu-latest
|
||||
test_block: chaos_and_query
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Trim asset version prefix and Validate
|
||||
|
||||
2
.github/workflows/test.yml
vendored
2
.github/workflows/test.yml
vendored
@@ -128,8 +128,6 @@ jobs:
|
||||
test_block: migration
|
||||
- platform: macos-latest
|
||||
test_block: force_stop
|
||||
- platform: ubuntu-latest
|
||||
test_block: chaos_and_query
|
||||
runs-on: ${{ matrix.platform }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
|
||||
20
cmd/check.go
20
cmd/check.go
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/thediveo/enumflag/v2"
|
||||
"github.com/turbot/go-kit/helpers"
|
||||
"github.com/turbot/steampipe-plugin-sdk/v5/sperr"
|
||||
"github.com/turbot/steampipe/pkg/cmdconfig"
|
||||
@@ -26,6 +27,12 @@ import (
|
||||
"github.com/turbot/steampipe/pkg/workspace"
|
||||
)
|
||||
|
||||
// variable used to assign the timing mode flag
|
||||
var checkTimingMode = constants.CheckTimingModeOff
|
||||
|
||||
// variable used to assign the output mode flag
|
||||
var checkOutputMode = constants.CheckOutputModeText
|
||||
|
||||
func checkCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "check [flags] [mod/benchmark/control/\"all\"]",
|
||||
@@ -63,8 +70,13 @@ You may specify one or more benchmarks or controls to run (separated by a space)
|
||||
AddBoolFlag(constants.ArgHeader, true, "Include column headers for csv and table output").
|
||||
AddBoolFlag(constants.ArgHelp, false, "Help for check", cmdconfig.FlagOptions.WithShortHand("h")).
|
||||
AddStringFlag(constants.ArgSeparator, ",", "Separator string for csv output").
|
||||
AddStringFlag(constants.ArgOutput, constants.OutputFormatText, "Output format: brief, csv, html, json, md, text, snapshot or none").
|
||||
AddBoolFlag(constants.ArgTiming, false, "Turn on the timer which reports check time").
|
||||
AddVarFlag(enumflag.New(&checkOutputMode, constants.ArgOutput, constants.CheckOutputModeIds, enumflag.EnumCaseInsensitive),
|
||||
constants.ArgOutput,
|
||||
fmt.Sprintf("Output format; one of: %s", strings.Join(constants.FlagValues(constants.CheckOutputModeIds), ", "))).
|
||||
AddVarFlag(enumflag.New(&checkTimingMode, constants.ArgTiming, constants.CheckTimingModeIds, enumflag.EnumCaseInsensitive),
|
||||
constants.ArgTiming,
|
||||
fmt.Sprintf("Display timing information; one of: %s", strings.Join(constants.FlagValues(constants.CheckTimingModeIds), ", ")),
|
||||
cmdconfig.FlagOptions.NoOptDefVal(constants.CheckTimingModeIds[checkTimingMode][0])).
|
||||
AddStringSliceFlag(constants.ArgSearchPath, nil, "Set a custom search_path for the steampipe user for a check session (comma-separated)").
|
||||
AddStringSliceFlag(constants.ArgSearchPathPrefix, nil, "Set a prefix to the current search path for a check session (comma-separated)").
|
||||
AddStringFlag(constants.ArgTheme, "dark", "Set the output theme for 'text' output: light, dark or plain").
|
||||
@@ -378,8 +390,8 @@ func printTiming(tree *controlexecute.ExecutionTree) {
|
||||
|
||||
func shouldPrintTiming() bool {
|
||||
outputFormat := viper.GetString(constants.ArgOutput)
|
||||
|
||||
return (viper.GetBool(constants.ArgTiming) && !viper.GetBool(constants.ArgDryRun)) &&
|
||||
timingMode := viper.GetString(constants.ArgTiming)
|
||||
return (timingMode != constants.ArgOff && !viper.GetBool(constants.ArgDryRun)) &&
|
||||
(outputFormat == constants.OutputFormatText || outputFormat == constants.OutputFormatBrief)
|
||||
}
|
||||
|
||||
|
||||
16
cmd/query.go
16
cmd/query.go
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/thediveo/enumflag/v2"
|
||||
"github.com/turbot/go-kit/helpers"
|
||||
"github.com/turbot/steampipe-plugin-sdk/v5/sperr"
|
||||
"github.com/turbot/steampipe/pkg/cmdconfig"
|
||||
@@ -31,6 +32,12 @@ import (
|
||||
"github.com/turbot/steampipe/pkg/workspace"
|
||||
)
|
||||
|
||||
// variable used to assign the timing mode flag
|
||||
var queryTimingMode = constants.QueryTimingModeOff
|
||||
|
||||
// variable used to assign the output mode flag
|
||||
var queryOutputMode = constants.QueryOutputModeTable
|
||||
|
||||
func queryCmd() *cobra.Command {
|
||||
cmd := &cobra.Command{
|
||||
Use: "query",
|
||||
@@ -78,8 +85,13 @@ Examples:
|
||||
AddBoolFlag(constants.ArgHelp, false, "Help for query", cmdconfig.FlagOptions.WithShortHand("h")).
|
||||
AddBoolFlag(constants.ArgHeader, true, "Include column headers csv and table output").
|
||||
AddStringFlag(constants.ArgSeparator, ",", "Separator string for csv output").
|
||||
AddStringFlag(constants.ArgOutput, "table", "Output format: line, csv, json, table or snapshot").
|
||||
AddBoolFlag(constants.ArgTiming, false, "Turn on the timer which reports query time").
|
||||
AddVarFlag(enumflag.New(&queryOutputMode, constants.ArgOutput, constants.QueryOutputModeIds, enumflag.EnumCaseInsensitive),
|
||||
constants.ArgOutput,
|
||||
fmt.Sprintf("Output format; one of: %s", strings.Join(constants.FlagValues(constants.QueryOutputModeIds), ", "))).
|
||||
AddVarFlag(enumflag.New(&queryTimingMode, constants.ArgTiming, constants.QueryTimingModeIds, enumflag.EnumCaseInsensitive),
|
||||
constants.ArgTiming,
|
||||
fmt.Sprintf("Display query timing; one of: %s", strings.Join(constants.FlagValues(constants.QueryTimingModeIds), ", ")),
|
||||
cmdconfig.FlagOptions.NoOptDefVal(constants.ArgOn)).
|
||||
AddBoolFlag(constants.ArgWatch, true, "Watch SQL files in the current workspace (works only in interactive mode)").
|
||||
AddStringSliceFlag(constants.ArgSearchPath, nil, "Set a custom search_path for the steampipe user for a query session (comma-separated)").
|
||||
AddStringSliceFlag(constants.ArgSearchPathPrefix, nil, "Set a prefix to the current search path for a query session (comma-separated)").
|
||||
|
||||
90
design/timing_output.md
Normal file
90
design/timing_output.md
Normal file
@@ -0,0 +1,90 @@
|
||||
# Steampipe CLI .timing output
|
||||
|
||||
## CLI Implementation
|
||||
When the `--timing` flag is enabled, the Steampipe CLI outputs the row count, number of hydrate calls and the time taken to execute the query.
|
||||
|
||||
The timing data is stored by the FDW in the foreign table `steampipe_internal.steampipe_scan_metadata`.
|
||||
|
||||
```
|
||||
> select * from steampipe_internal.steampipe_scan_metadata
|
||||
+-----+------------------+-----------+--------------+---------------+---------------------------+----------+--------------------------------------+-------+---------------------------------------+
|
||||
| id | table | cache_hit | rows_fetched | hydrate_calls | start_time | duration | columns | limit | quals |
|
||||
+-----+------------------+-----------+--------------+---------------+---------------------------+----------+--------------------------------------+-------+---------------------------------------+
|
||||
| 191 | aws_ec2_instance | false | 1 | 0 | 2024-04-04T09:29:52+01:00 | 439 | ["instance_id","vpc_id","subnet_id"] | 0 | [ |
|
||||
| | | | | | | | | | { |
|
||||
| | | | | | | | | | "column": "subnet_id", |
|
||||
| | | | | | | | | | "operator": "=", |
|
||||
| | | | | | | | | | "value": "subnet-0a2c499fc37a6c1fe" |
|
||||
| | | | | | | | | | } |
|
||||
| | | | | | | | | | ] |
|
||||
| | | | | | | | | | |
|
||||
| 192 | aws_ec2_instance | false | 0 | 0 | 2024-04-04T09:29:53+01:00 | 433 | ["instance_id","vpc_id","subnet_id"] | 0 | [ |
|
||||
| | | | | | | | | | { |
|
||||
| | | | | | | | | | "column": "subnet_id", |
|
||||
| | | | | | | | | | "operator": "=", |
|
||||
| | | | | | | | | | "value": "subnet-0b8060c3ee31f4ba7" |
|
||||
| | | | | | | | | | } |
|
||||
| | | | | | | | | | ] |
|
||||
| | | | | | | | | | |etc
|
||||
etc.
|
||||
```
|
||||
Every scan which executes results in a row written to this table, with an incrementing id
|
||||
|
||||
The CLI DB client keeps track of the `id` of previous scan metadata which was read from the `steampipe_internal.steampipe_scan_metadata`.
|
||||
Every time the client executes a query, it fetches data from the table with an `id` greater than the last `id` read.
|
||||
A single query may consist of multiple scans so there may be multiple rows written to this table for a single query.
|
||||
The DB client reads all these rows and combines them to display the timing data for the query.
|
||||
|
||||
## Populating the steampipe_internal.steampipe_scan_metadata table
|
||||
For every scan which the FDW executes, it stores `ScanMetadata` in the `Hub` struct.
|
||||
|
||||
```
|
||||
type ScanMetadata struct {
|
||||
Id int
|
||||
Table string
|
||||
CacheHit bool
|
||||
RowsFetched int64
|
||||
HydrateCalls int64
|
||||
Columns []string
|
||||
Quals map[string]*proto.Quals
|
||||
Limit int64
|
||||
StartTime time.Time
|
||||
Duration time.Duration
|
||||
}
|
||||
```
|
||||
|
||||
This is then used to populate `steampipe_internal.steampipe_scan_metadata` foreign table.:
|
||||
```
|
||||
// AsResultRow returns the ScanMetadata as a map[string]interface which can be returned as a query result
|
||||
func (m ScanMetadata) AsResultRow() map[string]interface{} {
|
||||
res := map[string]interface{}{
|
||||
"id": m.Id,
|
||||
"table": m.Table,
|
||||
"cache_hit": m.CacheHit,
|
||||
"rows_fetched": m.RowsFetched,
|
||||
"hydrate_calls": m.HydrateCalls,
|
||||
"start_time": m.StartTime,
|
||||
"duration": m.Duration.Milliseconds(),
|
||||
"columns": m.Columns,
|
||||
}
|
||||
if m.Limit != -1 {
|
||||
res["limit"] = m.Limit
|
||||
}
|
||||
if len(m.Quals) > 0 {
|
||||
// ignore error
|
||||
res["quals"], _ = grpc.QualMapToJSONString(m.Quals)
|
||||
}
|
||||
return res
|
||||
}
|
||||
```
|
||||
|
||||
## Receiving the `ScanMetadata` from the plugin
|
||||
The `Hub` ScanMetadata is populated by the scan iterator which executed the scan.
|
||||
NOTE: if the query is for an aggregator connection, the scan iterator will have multiple ScanMetadata entries,
|
||||
one per connection. *These are summed* when populating scan metadata on the Hub.
|
||||
|
||||
Every result row which the plugin streams to the FDW also contains `QueryMetadata` (the protobuf representation of `ScanMetadata`).
|
||||
The iterator has a map of scan metadata, keyed by connection (to support aggregators).
|
||||
When a row is received from the result stream, the metadata for that connection is *replaced*.
|
||||
|
||||
|
||||
2
go.mod
2
go.mod
@@ -8,6 +8,7 @@ replace (
|
||||
github.com/c-bata/go-prompt => github.com/turbot/go-prompt v0.2.6-steampipe.0.0.20221028122246-eb118ec58d50
|
||||
github.com/docker/distribution => github.com/distribution/distribution v2.7.1+incompatible
|
||||
github.com/docker/docker => github.com/moby/moby v20.10.17+incompatible
|
||||
|
||||
)
|
||||
|
||||
require (
|
||||
@@ -49,6 +50,7 @@ require (
|
||||
github.com/spf13/pflag v1.0.5
|
||||
github.com/spf13/viper v1.18.2
|
||||
github.com/stevenle/topsort v0.2.0
|
||||
github.com/thediveo/enumflag/v2 v2.0.5
|
||||
github.com/turbot/go-kit v0.10.0-rc.0
|
||||
github.com/turbot/pipe-fittings v1.1.1
|
||||
github.com/turbot/steampipe-cloud-sdk-go v0.6.0
|
||||
|
||||
14
go.sum
14
go.sum
@@ -396,6 +396,8 @@ github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91
|
||||
github.com/go-playground/validator/v10 v10.19.0 h1:ol+5Fu+cSq9JD7SoSqe04GMI92cbn0+wvQ3bZ8b/AU4=
|
||||
github.com/go-playground/validator/v10 v10.19.0/go.mod h1:dbuPbCMFw/DrkbEynArYaCwl3amGuJotoKCe95atGMM=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 h1:tfuBGBXKqDEevZMzYi5KSi8KkcZtzBcTgAUUtapy0OI=
|
||||
github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572/go.mod h1:9Pwr4B2jHnOSGXyyzV8ROjYa2ojvAY6HCGYYfMoC3Ls=
|
||||
github.com/go-test/deep v1.1.0 h1:WOcxcdHcvdgThNXjw0t76K42FXTU7HpNQWHpA2HHNlg=
|
||||
github.com/go-test/deep v1.1.0/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
@@ -482,6 +484,8 @@ github.com/google/pprof v0.0.0-20210226084205-cbba55b83ad5/go.mod h1:kpwsk12EmLe
|
||||
github.com/google/pprof v0.0.0-20210601050228-01bbb1931b22/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210609004039-a478d1d731e9/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20210720184732-4bb14d4b1be1/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
|
||||
github.com/google/pprof v0.0.0-20211214055906-6f57359322fd h1:1FjCyPC+syAzJ5/2S8fqdZK1R22vvA0J7JZKcuOIQ7Y=
|
||||
github.com/google/pprof v0.0.0-20211214055906-6f57359322fd/go.mod h1:KgnwoLYCZ8IQu3XUZ8Nc/bM9CCZFOyjUNOSygVozoDg=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
|
||||
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
|
||||
@@ -682,8 +686,10 @@ github.com/oklog/run v1.0.0 h1:Ru7dDtJNOyC66gQ5dQmaCa0qIsAUFY3sFpK1Xk8igrw=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI=
|
||||
github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M=
|
||||
github.com/onsi/ginkgo/v2 v2.13.0 h1:0jY9lJquiL8fcf3M4LAXN5aMlS/b2BV86HFFPCPMgE4=
|
||||
github.com/onsi/ginkgo/v2 v2.13.0/go.mod h1:TE309ZR8s5FsKKpuB1YAQYBzCaAfUgatB/xlT/ETL/o=
|
||||
github.com/onsi/gomega v1.28.1 h1:MijcGUbfYuznzK/5R4CPNoUP/9Xvuo20sXfEm6XxoTA=
|
||||
github.com/onsi/gomega v1.28.1/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ=
|
||||
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
|
||||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||
github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug=
|
||||
@@ -798,6 +804,10 @@ github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsT
|
||||
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/thediveo/enumflag/v2 v2.0.5 h1:VJjvlAqUb6m6mxOrB/0tfBJI0Kvi9wJ8ulh38xK87i8=
|
||||
github.com/thediveo/enumflag/v2 v2.0.5/go.mod h1:0NcG67nYgwwFsAvoQCmezG0J0KaIxZ0f7skg9eLq1DA=
|
||||
github.com/thediveo/success v1.0.1 h1:NVwUOwKUwaN8szjkJ+vsiM2L3sNBFscldoDJ2g2tAPg=
|
||||
github.com/thediveo/success v1.0.1/go.mod h1:AZ8oUArgbIsCuDEWrzWNQHdKnPbDOLQsWOFj9ynwLt0=
|
||||
github.com/tklauser/go-sysconf v0.3.9 h1:JeUVdAOWhhxVcU6Eqr/ATFHgXk/mmiItdKeJPev3vTo=
|
||||
github.com/tklauser/go-sysconf v0.3.9/go.mod h1:11DU/5sG7UexIrp/O6g35hrWzu0JxlwQ3LSFUzyeuhs=
|
||||
github.com/tklauser/numcpus v0.3.0 h1:ILuRUQBtssgnxw0XXIjKUC56fgnOrFoQQ/4+DeU2biQ=
|
||||
|
||||
@@ -78,7 +78,7 @@ func OnCmd(cmd *cobra.Command) *CmdBuilder {
|
||||
}
|
||||
|
||||
// AddStringFlag is a helper function to add a string flag to a command
|
||||
func (c *CmdBuilder) AddStringFlag(name string, defaultValue string, desc string, opts ...flagOpt) *CmdBuilder {
|
||||
func (c *CmdBuilder) AddStringFlag(name string, defaultValue string, desc string, opts ...FlagOption) *CmdBuilder {
|
||||
c.cmd.Flags().String(name, defaultValue, desc)
|
||||
c.bindings[name] = c.cmd.Flags().Lookup(name)
|
||||
for _, o := range opts {
|
||||
@@ -89,7 +89,7 @@ func (c *CmdBuilder) AddStringFlag(name string, defaultValue string, desc string
|
||||
}
|
||||
|
||||
// AddIntFlag is a helper function to add an integer flag to a command
|
||||
func (c *CmdBuilder) AddIntFlag(name string, defaultValue int, desc string, opts ...flagOpt) *CmdBuilder {
|
||||
func (c *CmdBuilder) AddIntFlag(name string, defaultValue int, desc string, opts ...FlagOption) *CmdBuilder {
|
||||
c.cmd.Flags().Int(name, defaultValue, desc)
|
||||
c.bindings[name] = c.cmd.Flags().Lookup(name)
|
||||
for _, o := range opts {
|
||||
@@ -99,7 +99,7 @@ func (c *CmdBuilder) AddIntFlag(name string, defaultValue int, desc string, opts
|
||||
}
|
||||
|
||||
// AddBoolFlag ia s helper function to add a boolean flag to a command
|
||||
func (c *CmdBuilder) AddBoolFlag(name string, defaultValue bool, desc string, opts ...flagOpt) *CmdBuilder {
|
||||
func (c *CmdBuilder) AddBoolFlag(name string, defaultValue bool, desc string, opts ...FlagOption) *CmdBuilder {
|
||||
c.cmd.Flags().Bool(name, defaultValue, desc)
|
||||
c.bindings[name] = c.cmd.Flags().Lookup(name)
|
||||
for _, o := range opts {
|
||||
@@ -132,7 +132,7 @@ func (c *CmdBuilder) AddModLocationFlag() *CmdBuilder {
|
||||
}
|
||||
|
||||
// AddStringSliceFlag is a helper function to add a flag that accepts an array of strings
|
||||
func (c *CmdBuilder) AddStringSliceFlag(name string, defaultValue []string, desc string, opts ...flagOpt) *CmdBuilder {
|
||||
func (c *CmdBuilder) AddStringSliceFlag(name string, defaultValue []string, desc string, opts ...FlagOption) *CmdBuilder {
|
||||
c.cmd.Flags().StringSlice(name, defaultValue, desc)
|
||||
c.bindings[name] = c.cmd.Flags().Lookup(name)
|
||||
for _, o := range opts {
|
||||
@@ -142,7 +142,7 @@ func (c *CmdBuilder) AddStringSliceFlag(name string, defaultValue []string, desc
|
||||
}
|
||||
|
||||
// AddStringArrayFlag is a helper function to add a flag that accepts an array of strings
|
||||
func (c *CmdBuilder) AddStringArrayFlag(name string, defaultValue []string, desc string, opts ...flagOpt) *CmdBuilder {
|
||||
func (c *CmdBuilder) AddStringArrayFlag(name string, defaultValue []string, desc string, opts ...FlagOption) *CmdBuilder {
|
||||
c.cmd.Flags().StringArray(name, defaultValue, desc)
|
||||
c.bindings[name] = c.cmd.Flags().Lookup(name)
|
||||
for _, o := range opts {
|
||||
@@ -152,7 +152,7 @@ func (c *CmdBuilder) AddStringArrayFlag(name string, defaultValue []string, desc
|
||||
}
|
||||
|
||||
// AddStringMapStringFlag is a helper function to add a flag that accepts a map of strings
|
||||
func (c *CmdBuilder) AddStringMapStringFlag(name string, defaultValue map[string]string, desc string, opts ...flagOpt) *CmdBuilder {
|
||||
func (c *CmdBuilder) AddStringMapStringFlag(name string, defaultValue map[string]string, desc string, opts ...FlagOption) *CmdBuilder {
|
||||
c.cmd.Flags().StringToString(name, defaultValue, desc)
|
||||
c.bindings[name] = c.cmd.Flags().Lookup(name)
|
||||
for _, o := range opts {
|
||||
@@ -160,3 +160,15 @@ func (c *CmdBuilder) AddStringMapStringFlag(name string, defaultValue map[string
|
||||
}
|
||||
return c
|
||||
}
|
||||
|
||||
func (c *CmdBuilder) AddVarFlag(value pflag.Value, name string, usage string, opts ...FlagOption) *CmdBuilder {
|
||||
c.cmd.Flags().Var(value, name, usage)
|
||||
|
||||
c.bindings[name] = c.cmd.Flags().Lookup(name)
|
||||
for _, o := range opts {
|
||||
o(c.cmd, name, name)
|
||||
}
|
||||
|
||||
//
|
||||
return c
|
||||
}
|
||||
|
||||
@@ -11,15 +11,15 @@ import (
|
||||
|
||||
var requiredColor = color.New(color.Bold).SprintfFunc()
|
||||
|
||||
type flagOpt func(c *cobra.Command, name string, key string)
|
||||
type FlagOption func(c *cobra.Command, name string, key string)
|
||||
|
||||
// FlagOptions - shortcut for common flag options
|
||||
var FlagOptions = struct {
|
||||
Required func() flagOpt
|
||||
Hidden func() flagOpt
|
||||
Deprecated func(string) flagOpt
|
||||
NoOptDefVal func(string) flagOpt
|
||||
WithShortHand func(string) flagOpt
|
||||
Required func() FlagOption
|
||||
Hidden func() FlagOption
|
||||
Deprecated func(string) FlagOption
|
||||
NoOptDefVal func(string) FlagOption
|
||||
WithShortHand func(string) FlagOption
|
||||
}{
|
||||
Required: requiredOpt,
|
||||
Hidden: hiddenOpt,
|
||||
@@ -29,7 +29,7 @@ var FlagOptions = struct {
|
||||
}
|
||||
|
||||
// Helper function to mark a flag as required
|
||||
func requiredOpt() flagOpt {
|
||||
func requiredOpt() FlagOption {
|
||||
return func(c *cobra.Command, name, key string) {
|
||||
err := c.MarkFlagRequired(key)
|
||||
error_helpers.FailOnErrorWithMessage(err, "could not mark flag as required")
|
||||
@@ -40,25 +40,25 @@ func requiredOpt() flagOpt {
|
||||
}
|
||||
}
|
||||
|
||||
func hiddenOpt() flagOpt {
|
||||
func hiddenOpt() FlagOption {
|
||||
return func(c *cobra.Command, name, _ string) {
|
||||
c.Flag(name).Hidden = true
|
||||
}
|
||||
}
|
||||
|
||||
func deprecatedOpt(replacement string) flagOpt {
|
||||
func deprecatedOpt(replacement string) FlagOption {
|
||||
return func(c *cobra.Command, name, _ string) {
|
||||
c.Flag(name).Deprecated = fmt.Sprintf("please use %s", replacement)
|
||||
}
|
||||
}
|
||||
|
||||
func noOptDefValOpt(noOptDefVal string) flagOpt {
|
||||
func noOptDefValOpt(noOptDefVal string) FlagOption {
|
||||
return func(c *cobra.Command, name, _ string) {
|
||||
c.Flag(name).NoOptDefVal = noOptDefVal
|
||||
}
|
||||
}
|
||||
|
||||
func withShortHand(shorthand string) flagOpt {
|
||||
func withShortHand(shorthand string) FlagOption {
|
||||
return func(c *cobra.Command, name, _ string) {
|
||||
c.Flag(name).Shorthand = shorthand
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ const (
|
||||
ArgTiming = "timing"
|
||||
ArgOn = "on"
|
||||
ArgOff = "off"
|
||||
ArgVerbose = "verbose"
|
||||
ArgClear = "clear"
|
||||
ArgDatabaseListenAddresses = "database-listen"
|
||||
ArgDatabasePort = "database-port"
|
||||
|
||||
@@ -28,7 +28,7 @@ const (
|
||||
// constants for installing db and fdw images
|
||||
const (
|
||||
DatabaseVersion = "14.2.0"
|
||||
FdwVersion = "1.10.0"
|
||||
FdwVersion = "1.11.0-rc.0"
|
||||
|
||||
// PostgresImageRef is the OCI Image ref for the database binaries
|
||||
PostgresImageRef = "us-docker.pkg.dev/steampipe/steampipe/db:14.2.0"
|
||||
@@ -71,6 +71,7 @@ const (
|
||||
ConnectionStateError = "error"
|
||||
|
||||
// foreign tables in internal schema
|
||||
ForeignTableScanMetadataSummary = "steampipe_scan_metadata_summary"
|
||||
ForeignTableScanMetadata = "steampipe_scan_metadata"
|
||||
ForeignTableSettings = "steampipe_settings"
|
||||
ForeignTableSettingsKeyColumn = "name"
|
||||
|
||||
98
pkg/constants/flags.go
Normal file
98
pkg/constants/flags.go
Normal file
@@ -0,0 +1,98 @@
|
||||
package constants
|
||||
|
||||
import (
|
||||
"github.com/thediveo/enumflag/v2"
|
||||
"github.com/turbot/pipe-fittings/constants"
|
||||
)
|
||||
|
||||
type QueryOutputMode enumflag.Flag
|
||||
|
||||
const (
|
||||
QueryOutputModeCsv QueryOutputMode = iota
|
||||
QueryOutputModeJson
|
||||
QueryOutputModeLine
|
||||
QueryOutputModeSnapshot
|
||||
QueryOutputModeSnapshotShort
|
||||
QueryOutputModeTable
|
||||
)
|
||||
|
||||
var QueryOutputModeIds = map[QueryOutputMode][]string{
|
||||
QueryOutputModeCsv: {constants.OutputFormatCSV},
|
||||
QueryOutputModeJson: {constants.OutputFormatJSON},
|
||||
QueryOutputModeLine: {constants.OutputFormatLine},
|
||||
QueryOutputModeSnapshot: {constants.OutputFormatSnapshot},
|
||||
QueryOutputModeSnapshotShort: {constants.OutputFormatSnapshotShort},
|
||||
QueryOutputModeTable: {constants.OutputFormatTable},
|
||||
}
|
||||
|
||||
type QueryTimingMode enumflag.Flag
|
||||
|
||||
const (
|
||||
QueryTimingModeOff QueryTimingMode = iota
|
||||
QueryTimingModeOn
|
||||
QueryTimingModeVerbose
|
||||
)
|
||||
|
||||
var QueryTimingModeIds = map[QueryTimingMode][]string{
|
||||
QueryTimingModeOff: {constants.ArgOff},
|
||||
QueryTimingModeOn: {constants.ArgOn},
|
||||
QueryTimingModeVerbose: {constants.ArgVerbose},
|
||||
}
|
||||
|
||||
var QueryTimingValueLookup = map[string]struct{}{
|
||||
constants.ArgOff: {},
|
||||
constants.ArgOn: {},
|
||||
constants.ArgVerbose: {},
|
||||
}
|
||||
|
||||
type CheckTimingMode enumflag.Flag
|
||||
|
||||
const (
|
||||
CheckTimingModeOff CheckTimingMode = iota
|
||||
CheckTimingModeOn
|
||||
)
|
||||
|
||||
var CheckTimingModeIds = map[CheckTimingMode][]string{
|
||||
CheckTimingModeOff: {constants.ArgOff},
|
||||
CheckTimingModeOn: {constants.ArgOn},
|
||||
}
|
||||
|
||||
var CheckTimingValueLookup = map[string]struct{}{
|
||||
constants.ArgOff: {},
|
||||
constants.ArgOn: {},
|
||||
}
|
||||
|
||||
type CheckOutputMode enumflag.Flag
|
||||
|
||||
const (
|
||||
CheckOutputModeText CheckOutputMode = iota
|
||||
CheckOutputModeBrief CheckOutputMode = iota
|
||||
CheckOutputModeCsv
|
||||
CheckOutputModeHTML
|
||||
CheckOutputModeJSON
|
||||
CheckOutputModeMd
|
||||
CheckOutputModeSnapshot
|
||||
CheckOutputModeSnapshotShort
|
||||
CheckOutputModeNone
|
||||
)
|
||||
|
||||
var CheckOutputModeIds = map[CheckOutputMode][]string{
|
||||
CheckOutputModeText: {constants.OutputFormatText},
|
||||
CheckOutputModeBrief: {constants.OutputFormatBrief},
|
||||
CheckOutputModeCsv: {constants.OutputFormatCSV},
|
||||
CheckOutputModeHTML: {constants.OutputFormatHTML},
|
||||
CheckOutputModeJSON: {constants.OutputFormatJSON},
|
||||
CheckOutputModeMd: {constants.OutputFormatMD},
|
||||
CheckOutputModeSnapshot: {constants.OutputFormatSnapshot},
|
||||
CheckOutputModeSnapshotShort: {constants.OutputFormatSnapshotShort},
|
||||
CheckOutputModeNone: {constants.OutputFormatNone},
|
||||
}
|
||||
|
||||
func FlagValues[T comparable](mappings map[T][]string) []string {
|
||||
var res = make([]string, 0, len(mappings))
|
||||
for _, v := range mappings {
|
||||
res = append(res, v[0])
|
||||
}
|
||||
return res
|
||||
|
||||
}
|
||||
@@ -3,12 +3,12 @@ package db_client
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
"log"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/jackc/pgx/v5"
|
||||
"github.com/jackc/pgx/v5/pgconn"
|
||||
"github.com/jackc/pgx/v5/pgxpool"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/turbot/steampipe/pkg/constants"
|
||||
@@ -54,9 +54,6 @@ type DbClient struct {
|
||||
searchPathPrefix []string
|
||||
// the default user search path
|
||||
userSearchPath []string
|
||||
// a cached copy of (viper.GetBool(constants.ArgTiming) && viper.GetString(constants.ArgOutput) == constants.OutputFormatTable)
|
||||
// (cached to avoid concurrent access error on viper)
|
||||
showTimingFlag bool
|
||||
// disable timing - set whilst in process of querying the timing
|
||||
disableTiming bool
|
||||
onConnectionCallback DbConnectionCallback
|
||||
@@ -148,20 +145,19 @@ func (c *DbClient) loadServerSettings(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *DbClient) setShouldShowTiming(ctx context.Context, session *db_common.DatabaseSession) {
|
||||
currentShowTimingFlag := viper.GetBool(constants.ArgTiming)
|
||||
|
||||
// if we are turning timing ON, fetch the ScanMetadataMaxId
|
||||
// to ensure we only select the relevant scan metadata table entries
|
||||
if currentShowTimingFlag && !c.showTimingFlag {
|
||||
c.updateScanMetadataMaxId(ctx, session)
|
||||
func (c *DbClient) shouldFetchTiming() bool {
|
||||
// check for override flag (this is to prevent timing being fetched when we read the timing metadata table)
|
||||
if c.disableTiming {
|
||||
return false
|
||||
}
|
||||
// only fetch timing if timing flag is set, or output is JSON
|
||||
return (viper.GetString(constants.ArgTiming) != constants.ArgOff) ||
|
||||
(viper.GetString(constants.ArgOutput) == constants.OutputFormatJSON)
|
||||
|
||||
c.showTimingFlag = currentShowTimingFlag
|
||||
}
|
||||
|
||||
func (c *DbClient) shouldShowTiming() bool {
|
||||
return c.showTimingFlag && !c.disableTiming
|
||||
func (c *DbClient) shouldFetchVerboseTiming() bool {
|
||||
return (viper.GetString(constants.ArgTiming) == constants.ArgVerbose) ||
|
||||
(viper.GetString(constants.ArgOutput) == constants.OutputFormatJSON)
|
||||
}
|
||||
|
||||
// ServerSettings returns the settings of the steampipe service that this DbClient is connected to
|
||||
|
||||
@@ -3,8 +3,8 @@ package db_client
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/netip"
|
||||
"strings"
|
||||
"time"
|
||||
@@ -33,10 +33,6 @@ func (c *DbClient) ExecuteSync(ctx context.Context, query string, args ...any) (
|
||||
return nil, sessionResult.Error
|
||||
}
|
||||
|
||||
// set setShouldShowTiming flag
|
||||
// (this will refetch ScanMetadataMaxId if timing has just been enabled)
|
||||
c.setShouldShowTiming(ctx, sessionResult.Session)
|
||||
|
||||
defer func() {
|
||||
// we need to do this in a closure, otherwise the ctx will be evaluated immediately
|
||||
// and not in call-time
|
||||
@@ -69,7 +65,7 @@ func (c *DbClient) ExecuteSyncInSession(ctx context.Context, session *db_common.
|
||||
syncResult.Rows = append(syncResult.Rows, row)
|
||||
}
|
||||
}
|
||||
if c.shouldShowTiming() {
|
||||
if c.shouldFetchTiming() {
|
||||
syncResult.TimingResult = <-result.TimingResult
|
||||
}
|
||||
|
||||
@@ -85,13 +81,6 @@ func (c *DbClient) Execute(ctx context.Context, query string, args ...any) (*que
|
||||
if sessionResult.Error != nil {
|
||||
return nil, sessionResult.Error
|
||||
}
|
||||
// disable statushooks when timing is enabled, because setShouldShowTiming internally calls the readRows funcs which
|
||||
// calls the statushooks.Done, which hides the `Executing query…` spinner, when timing is enabled.
|
||||
timingCtx := statushooks.DisableStatusHooks(ctx)
|
||||
|
||||
// re-read ArgTiming from viper (in case the .timing command has been run)
|
||||
// (this will refetch ScanMetadataMaxId if timing has just been enabled)
|
||||
c.setShouldShowTiming(timingCtx, sessionResult.Session)
|
||||
|
||||
// define callback to close session when the async execution is complete
|
||||
closeSessionCallback := func() { sessionResult.Session.Close(error_helpers.IsContextCanceled(ctx)) }
|
||||
@@ -184,12 +173,13 @@ func (c *DbClient) getExecuteContext(ctx context.Context) context.Context {
|
||||
}
|
||||
|
||||
func (c *DbClient) getQueryTiming(ctx context.Context, startTime time.Time, session *db_common.DatabaseSession, resultChannel chan *queryresult.TimingResult) {
|
||||
if !c.shouldShowTiming() {
|
||||
// do not fetch if timing is disabled, unless output not JSON
|
||||
if !c.shouldFetchTiming() {
|
||||
return
|
||||
}
|
||||
|
||||
var timingResult = &queryresult.TimingResult{
|
||||
Duration: time.Since(startTime),
|
||||
DurationMs: time.Since(startTime).Milliseconds(),
|
||||
}
|
||||
// disable fetching timing information to avoid recursion
|
||||
c.disableTiming = true
|
||||
@@ -200,44 +190,76 @@ func (c *DbClient) getQueryTiming(ctx context.Context, startTime time.Time, sess
|
||||
resultChannel <- timingResult
|
||||
}()
|
||||
|
||||
var scanRows *ScanMetadataRow
|
||||
// load the timing summary
|
||||
summary, err := c.loadTimingSummary(ctx, session)
|
||||
if err != nil {
|
||||
log.Printf("[WARN] getQueryTiming: failed to read scan metadata, err: %s", err)
|
||||
return
|
||||
}
|
||||
|
||||
// only load the individual scan metadata if output is JSON or timing is verbose
|
||||
var scans []*queryresult.ScanMetadataRow
|
||||
if c.shouldFetchVerboseTiming() {
|
||||
scans, err = c.loadTimingMetadata(ctx, session)
|
||||
if err != nil {
|
||||
log.Printf("[WARN] getQueryTiming: failed to read scan metadata, err: %s", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// populate hydrate calls and rows fetched
|
||||
timingResult.Initialise(summary, scans)
|
||||
}
|
||||
|
||||
func (c *DbClient) loadTimingSummary(ctx context.Context, session *db_common.DatabaseSession) (*queryresult.QueryRowSummary, error) {
|
||||
var summary = &queryresult.QueryRowSummary{}
|
||||
err := db_common.ExecuteSystemClientCall(ctx, session.Connection.Conn(), func(ctx context.Context, tx pgx.Tx) error {
|
||||
query := fmt.Sprintf("select id, rows_fetched, cache_hit, hydrate_calls from %s.%s where id > %d", constants.InternalSchema, constants.ForeignTableScanMetadata, session.ScanMetadataMaxId)
|
||||
query := fmt.Sprintf(`select uncached_rows_fetched,
|
||||
cached_rows_fetched,
|
||||
hydrate_calls,
|
||||
scan_count,
|
||||
connection_count from %s.%s `, constants.InternalSchema, constants.ForeignTableScanMetadataSummary)
|
||||
//query := fmt.Sprintf("select id, 'table' as table, cache_hit, rows_fetched, hydrate_calls, start_time, duration, columns, 'limit' as limit, quals from %s.%s where id > %d", constants.InternalSchema, constants.ForeignTableScanMetadata, session.ScanMetadataMaxId)
|
||||
rows, err := tx.Query(ctx, query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
scanRows, err = pgx.CollectOneRow(rows, pgx.RowToAddrOfStructByName[ScanMetadataRow])
|
||||
return err
|
||||
|
||||
// scan into summary
|
||||
summary, err = pgx.CollectOneRow(rows, pgx.RowToAddrOfStructByName[queryresult.QueryRowSummary])
|
||||
// no rows counts as an error
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
// if we failed to read scan metadata (either because the query failed or the plugin does not support it) just return
|
||||
// we don't return the error, since we don't want to error out in this case
|
||||
if err != nil || scanRows == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// so we have scan metadata - create the metadata struct
|
||||
timingResult.Metadata = &queryresult.TimingMetadata{}
|
||||
timingResult.Metadata.HydrateCalls += scanRows.HydrateCalls
|
||||
if scanRows.CacheHit {
|
||||
timingResult.Metadata.CachedRowsFetched += scanRows.RowsFetched
|
||||
} else {
|
||||
timingResult.Metadata.RowsFetched += scanRows.RowsFetched
|
||||
}
|
||||
// update the max id for this session
|
||||
session.ScanMetadataMaxId = scanRows.Id
|
||||
return summary, err
|
||||
}
|
||||
|
||||
func (c *DbClient) updateScanMetadataMaxId(ctx context.Context, session *db_common.DatabaseSession) error {
|
||||
return db_common.ExecuteSystemClientCall(ctx, session.Connection.Conn(), func(ctx context.Context, tx pgx.Tx) error {
|
||||
row := tx.QueryRow(ctx, fmt.Sprintf("select max(id) from %s.%s", constants.InternalSchema, constants.ForeignTableScanMetadata))
|
||||
err := row.Scan(&session.ScanMetadataMaxId)
|
||||
if errors.Is(err, pgx.ErrNoRows) {
|
||||
return nil
|
||||
func (c *DbClient) loadTimingMetadata(ctx context.Context, session *db_common.DatabaseSession) ([]*queryresult.ScanMetadataRow, error) {
|
||||
var scans []*queryresult.ScanMetadataRow
|
||||
|
||||
err := db_common.ExecuteSystemClientCall(ctx, session.Connection.Conn(), func(ctx context.Context, tx pgx.Tx) error {
|
||||
query := fmt.Sprintf(`
|
||||
select connection,
|
||||
"table",
|
||||
cache_hit,
|
||||
rows_fetched,
|
||||
hydrate_calls,
|
||||
start_time,
|
||||
duration_ms,
|
||||
columns,
|
||||
"limit",
|
||||
quals from %s.%s order by duration_ms desc`, constants.InternalSchema, constants.ForeignTableScanMetadata)
|
||||
rows, err := tx.Query(ctx, query)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
scans, err = pgx.CollectRows(rows, pgx.RowToAddrOfStructByName[queryresult.ScanMetadataRow])
|
||||
return err
|
||||
})
|
||||
return scans, err
|
||||
}
|
||||
|
||||
// run query in a goroutine, so we can check for cancellation
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
package db_client
|
||||
|
||||
type ScanMetadataRow struct {
|
||||
// the fields of this struct need to be public since these are populated by pgx using RowsToStruct
|
||||
Id int64 `db:"id"`
|
||||
RowsFetched int64 `db:"rows_fetched"`
|
||||
CacheHit bool `db:"cache_hit"`
|
||||
HydrateCalls int64 `db:"hydrate_calls"`
|
||||
}
|
||||
@@ -17,9 +17,6 @@ type DatabaseSession struct {
|
||||
|
||||
// this gets rewritten, since the database/sql gives back a new instance everytime
|
||||
Connection *pgxpool.Conn `json:"-"`
|
||||
|
||||
// the id of the last scan metadata retrieved
|
||||
ScanMetadataMaxId int64 `json:"-"`
|
||||
}
|
||||
|
||||
func NewDBSession(backendPid uint32) *DatabaseSession {
|
||||
|
||||
@@ -33,14 +33,18 @@ func ExecuteSystemClientCall(ctx context.Context, conn *pgx.Conn, executor Syste
|
||||
}
|
||||
defer func() {
|
||||
// set back the original application name
|
||||
_, e = tx.Exec(ctx, fmt.Sprintf("SET application_name TO '%s'", conn.Config().RuntimeParams[constants.RuntimeParamsKeyApplicationName]))
|
||||
if e != nil {
|
||||
_, err = tx.Exec(ctx, fmt.Sprintf("SET application_name TO '%s'", conn.Config().RuntimeParams[constants.RuntimeParamsKeyApplicationName]))
|
||||
if err != nil {
|
||||
log.Println("[TRACE] could not reset application_name", e)
|
||||
}
|
||||
// if there is not already an error, set the error
|
||||
if e == nil {
|
||||
e = err
|
||||
}
|
||||
}()
|
||||
|
||||
if err := executor(ctx, tx); err != nil {
|
||||
return sperr.WrapWithMessage(err, "scoped execution failed with management client")
|
||||
return sperr.WrapWithMessage(err, "system client query execution failed")
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
@@ -78,7 +78,7 @@ type CreateDbOptions struct {
|
||||
// the provided username
|
||||
// if the database is not provided (empty), it connects to the default database in the service
|
||||
// that was created during installation.
|
||||
// NOTE: no session data callback is used - no session data will be present
|
||||
// NOTE: this connection will use the ServiceConnectionAppName
|
||||
func CreateLocalDbConnection(ctx context.Context, opts *CreateDbOptions) (*pgx.Conn, error) {
|
||||
utils.LogTime("db.CreateLocalDbConnection start")
|
||||
defer utils.LogTime("db.CreateLocalDbConnection end")
|
||||
@@ -95,6 +95,7 @@ func CreateLocalDbConnection(ctx context.Context, opts *CreateDbOptions) (*pgx.C
|
||||
|
||||
// set an app name so that we can track database connections from this Steampipe execution
|
||||
// this is used to determine whether the database can safely be closed
|
||||
// and also in pipes to allow accurate usage tracking (it excludes system calls)
|
||||
connConfig.Config.RuntimeParams = map[string]string{
|
||||
constants.RuntimeParamsKeyApplicationName: runtime.ServiceConnectionAppName,
|
||||
}
|
||||
@@ -114,7 +115,8 @@ func CreateLocalDbConnection(ctx context.Context, opts *CreateDbOptions) (*pgx.C
|
||||
return conn, nil
|
||||
}
|
||||
|
||||
// CreateConnectionPool
|
||||
// CreateConnectionPool creates a connection pool using the provided options
|
||||
// NOTE: this connection pool will use the ServiceConnectionAppName
|
||||
func CreateConnectionPool(ctx context.Context, opts *CreateDbOptions, maxConnections int) (*pgxpool.Pool, error) {
|
||||
utils.LogTime("db_client.establishConnectionPool start")
|
||||
defer utils.LogTime("db_client.establishConnectionPool end")
|
||||
|
||||
@@ -137,10 +137,15 @@ func setupInternal(ctx context.Context, conn *pgx.Conn) error {
|
||||
|
||||
queries := []string{
|
||||
"lock table pg_namespace;",
|
||||
// drop internal schema tables to force recreation (in case of schema change)
|
||||
fmt.Sprintf(`DROP FOREIGN TABLE IF EXISTS %s.%s;`, constants.InternalSchema, constants.ForeignTableScanMetadataSummary),
|
||||
fmt.Sprintf(`DROP FOREIGN TABLE IF EXISTS %s.%s;`, constants.InternalSchema, constants.ForeignTableScanMetadata),
|
||||
fmt.Sprintf(`DROP FOREIGN TABLE IF EXISTS %s.%s;`, constants.InternalSchema, constants.ForeignTableSettings),
|
||||
fmt.Sprintf(`CREATE SCHEMA IF NOT EXISTS %s;`, constants.InternalSchema),
|
||||
fmt.Sprintf(`GRANT USAGE ON SCHEMA %s TO %s;`, constants.InternalSchema, constants.DatabaseUsersRole),
|
||||
fmt.Sprintf("IMPORT FOREIGN SCHEMA \"%s\" FROM SERVER steampipe INTO %s;", constants.InternalSchema, constants.InternalSchema),
|
||||
fmt.Sprintf("GRANT INSERT ON %s.%s TO %s;", constants.InternalSchema, constants.ForeignTableSettings, constants.DatabaseUsersRole),
|
||||
fmt.Sprintf("GRANT SELECT ON %s.%s TO %s;", constants.InternalSchema, constants.ForeignTableScanMetadataSummary, constants.DatabaseUsersRole),
|
||||
fmt.Sprintf("GRANT SELECT ON %s.%s TO %s;", constants.InternalSchema, constants.ForeignTableScanMetadata, constants.DatabaseUsersRole),
|
||||
// legacy command schema support
|
||||
fmt.Sprintf(`CREATE SCHEMA IF NOT EXISTS %s;`, constants.LegacyCommandSchema),
|
||||
|
||||
@@ -7,21 +7,22 @@ import (
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/karrick/gows"
|
||||
"github.com/turbot/go-kit/helpers"
|
||||
"github.com/turbot/steampipe/pkg/error_helpers"
|
||||
|
||||
"github.com/jedib0t/go-pretty/v6/table"
|
||||
"github.com/jedib0t/go-pretty/v6/text"
|
||||
"github.com/karrick/gows"
|
||||
"github.com/spf13/viper"
|
||||
"github.com/turbot/go-kit/helpers"
|
||||
"github.com/turbot/steampipe/pkg/cmdconfig"
|
||||
"github.com/turbot/steampipe/pkg/constants"
|
||||
"github.com/turbot/steampipe/pkg/error_helpers"
|
||||
"github.com/turbot/steampipe/pkg/query/queryresult"
|
||||
"golang.org/x/text/language"
|
||||
"golang.org/x/text/message"
|
||||
@@ -30,24 +31,33 @@ import (
|
||||
// ShowOutput displays the output using the proper formatter as applicable
|
||||
func ShowOutput(ctx context.Context, result *queryresult.Result, opts ...DisplayOption) int {
|
||||
rowErrors := 0
|
||||
config := NewDisplayConfiguration()
|
||||
config := newDisplayConfiguration()
|
||||
for _, o := range opts {
|
||||
o(config)
|
||||
}
|
||||
|
||||
switch cmdconfig.Viper().GetString(constants.ArgOutput) {
|
||||
var timingResult *queryresult.TimingResult
|
||||
|
||||
outputFormat := cmdconfig.Viper().GetString(constants.ArgOutput)
|
||||
switch outputFormat {
|
||||
case constants.OutputFormatJSON:
|
||||
rowErrors = displayJSON(ctx, result)
|
||||
rowErrors, timingResult = displayJSON(ctx, result)
|
||||
case constants.OutputFormatCSV:
|
||||
rowErrors = displayCSV(ctx, result)
|
||||
rowErrors, timingResult = displayCSV(ctx, result)
|
||||
case constants.OutputFormatLine:
|
||||
rowErrors = displayLine(ctx, result)
|
||||
rowErrors, timingResult = displayLine(ctx, result)
|
||||
case constants.OutputFormatTable:
|
||||
rowErrors = displayTable(ctx, result)
|
||||
rowErrors, timingResult = displayTable(ctx, result)
|
||||
}
|
||||
|
||||
if config.timing {
|
||||
fmt.Println(buildTimingString(result))
|
||||
// show timing
|
||||
if config.timing != constants.ArgOff && timingResult != nil {
|
||||
str := buildTimingString(timingResult)
|
||||
if viper.GetBool(constants.ConfigKeyInteractive) {
|
||||
fmt.Println(str)
|
||||
} else {
|
||||
fmt.Fprintln(os.Stderr, str)
|
||||
}
|
||||
}
|
||||
// return the number of rows that returned errors
|
||||
return rowErrors
|
||||
@@ -57,6 +67,7 @@ type ShowWrappedTableOptions struct {
|
||||
AutoMerge bool
|
||||
HideEmptyColumns bool
|
||||
Truncate bool
|
||||
OutputMirror io.Writer
|
||||
}
|
||||
|
||||
func ShowWrappedTable(headers []string, rows [][]string, opts *ShowWrappedTableOptions) {
|
||||
@@ -67,7 +78,11 @@ func ShowWrappedTable(headers []string, rows [][]string, opts *ShowWrappedTableO
|
||||
|
||||
t.SetStyle(table.StyleDefault)
|
||||
t.Style().Format.Header = text.FormatDefault
|
||||
t.SetOutputMirror(os.Stdout)
|
||||
if opts.OutputMirror == nil {
|
||||
t.SetOutputMirror(os.Stdout)
|
||||
} else {
|
||||
t.SetOutputMirror(opts.OutputMirror)
|
||||
}
|
||||
|
||||
rowConfig := table.RowConfig{AutoMerge: opts.AutoMerge}
|
||||
colConfigs, headerRow := getColumnSettings(headers, rows, opts)
|
||||
@@ -153,7 +168,104 @@ func getColumnSettings(headers []string, rows [][]string, opts *ShowWrappedTable
|
||||
return colConfigs, headerRow
|
||||
}
|
||||
|
||||
func displayLine(ctx context.Context, result *queryresult.Result) int {
|
||||
// getTerminalColumnsRequiredForString returns the length of the longest line in the string
|
||||
func getTerminalColumnsRequiredForString(str string) int {
|
||||
colsRequired := 0
|
||||
scanner := bufio.NewScanner(bytes.NewBufferString(str))
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
runeCount := utf8.RuneCountInString(line)
|
||||
if runeCount > colsRequired {
|
||||
colsRequired = runeCount
|
||||
}
|
||||
}
|
||||
return colsRequired
|
||||
}
|
||||
|
||||
type jsonOutput struct {
|
||||
Rows []map[string]interface{} `json:"rows"`
|
||||
Metadata *queryresult.TimingResult `json:"metadata"`
|
||||
}
|
||||
|
||||
func newJSONOutput() *jsonOutput {
|
||||
return &jsonOutput{
|
||||
Rows: make([]map[string]interface{}, 0),
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func displayJSON(ctx context.Context, result *queryresult.Result) (int, *queryresult.TimingResult) {
|
||||
rowErrors := 0
|
||||
jsonOutput := newJSONOutput()
|
||||
|
||||
// define function to add each row to the JSON output
|
||||
rowFunc := func(row []interface{}, result *queryresult.Result) {
|
||||
record := map[string]interface{}{}
|
||||
for idx, col := range result.Cols {
|
||||
value, _ := ParseJSONOutputColumnValue(row[idx], col)
|
||||
record[col.Name] = value
|
||||
}
|
||||
jsonOutput.Rows = append(jsonOutput.Rows, record)
|
||||
}
|
||||
|
||||
// call this function for each row
|
||||
count, err := iterateResults(result, rowFunc)
|
||||
if err != nil {
|
||||
error_helpers.ShowError(ctx, err)
|
||||
rowErrors++
|
||||
return rowErrors, nil
|
||||
}
|
||||
|
||||
// now we have iterated the rows, get the timing
|
||||
jsonOutput.Metadata = getTiming(result, count)
|
||||
|
||||
// display the JSON
|
||||
encoder := json.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent("", " ")
|
||||
encoder.SetEscapeHTML(false)
|
||||
if err := encoder.Encode(jsonOutput); err != nil {
|
||||
fmt.Print("Error displaying result as JSON", err)
|
||||
return 0, nil
|
||||
}
|
||||
return rowErrors, jsonOutput.Metadata
|
||||
}
|
||||
|
||||
func displayCSV(ctx context.Context, result *queryresult.Result) (int, *queryresult.TimingResult) {
|
||||
rowErrors := 0
|
||||
csvWriter := csv.NewWriter(os.Stdout)
|
||||
csvWriter.Comma = []rune(cmdconfig.Viper().GetString(constants.ArgSeparator))[0]
|
||||
|
||||
if cmdconfig.Viper().GetBool(constants.ArgHeader) {
|
||||
_ = csvWriter.Write(ColumnNames(result.Cols))
|
||||
}
|
||||
|
||||
// print the data as it comes
|
||||
// define function display each csv row
|
||||
rowFunc := func(row []interface{}, result *queryresult.Result) {
|
||||
rowAsString, _ := ColumnValuesAsString(row, result.Cols, WithNullString(""))
|
||||
_ = csvWriter.Write(rowAsString)
|
||||
}
|
||||
|
||||
// call this function for each row
|
||||
count, err := iterateResults(result, rowFunc)
|
||||
if err != nil {
|
||||
error_helpers.ShowError(ctx, err)
|
||||
rowErrors++
|
||||
return rowErrors, nil
|
||||
}
|
||||
|
||||
csvWriter.Flush()
|
||||
if csvWriter.Error() != nil {
|
||||
error_helpers.ShowErrorWithMessage(ctx, csvWriter.Error(), "unable to print csv")
|
||||
}
|
||||
|
||||
// now we have iterated the rows, get the timing
|
||||
timingResult := getTiming(result, count)
|
||||
|
||||
return rowErrors, timingResult
|
||||
}
|
||||
|
||||
func displayLine(ctx context.Context, result *queryresult.Result) (int, *queryresult.TimingResult) {
|
||||
|
||||
maxColNameLength, rowErrors := 0, 0
|
||||
for _, col := range result.Cols {
|
||||
@@ -208,90 +320,19 @@ func displayLine(ctx context.Context, result *queryresult.Result) int {
|
||||
}
|
||||
|
||||
// call this function for each row
|
||||
if err := iterateResults(result, rowFunc); err != nil {
|
||||
count, err := iterateResults(result, rowFunc)
|
||||
if err != nil {
|
||||
error_helpers.ShowError(ctx, err)
|
||||
rowErrors++
|
||||
return rowErrors
|
||||
return rowErrors, nil
|
||||
}
|
||||
return rowErrors
|
||||
|
||||
// now we have iterated the rows, get the timing
|
||||
timingResult := getTiming(result, count)
|
||||
return rowErrors, timingResult
|
||||
}
|
||||
|
||||
// getTerminalColumnsRequiredForString returns the length of the longest line in the string
|
||||
func getTerminalColumnsRequiredForString(str string) int {
|
||||
colsRequired := 0
|
||||
scanner := bufio.NewScanner(bytes.NewBufferString(str))
|
||||
for scanner.Scan() {
|
||||
line := scanner.Text()
|
||||
runeCount := utf8.RuneCountInString(line)
|
||||
if runeCount > colsRequired {
|
||||
colsRequired = runeCount
|
||||
}
|
||||
}
|
||||
return colsRequired
|
||||
}
|
||||
|
||||
func displayJSON(ctx context.Context, result *queryresult.Result) int {
|
||||
rowErrors := 0
|
||||
jsonOutput := make([]map[string]interface{}, 0)
|
||||
|
||||
// define function to add each row to the JSON output
|
||||
rowFunc := func(row []interface{}, result *queryresult.Result) {
|
||||
record := map[string]interface{}{}
|
||||
for idx, col := range result.Cols {
|
||||
value, _ := ParseJSONOutputColumnValue(row[idx], col)
|
||||
record[col.Name] = value
|
||||
}
|
||||
jsonOutput = append(jsonOutput, record)
|
||||
}
|
||||
|
||||
// call this function for each row
|
||||
if err := iterateResults(result, rowFunc); err != nil {
|
||||
error_helpers.ShowError(ctx, err)
|
||||
rowErrors++
|
||||
return rowErrors
|
||||
}
|
||||
// display the JSON
|
||||
encoder := json.NewEncoder(os.Stdout)
|
||||
encoder.SetIndent("", " ")
|
||||
encoder.SetEscapeHTML(false)
|
||||
if err := encoder.Encode(jsonOutput); err != nil {
|
||||
fmt.Print("Error displaying result as JSON", err)
|
||||
return 0
|
||||
}
|
||||
return rowErrors
|
||||
}
|
||||
|
||||
func displayCSV(ctx context.Context, result *queryresult.Result) int {
|
||||
rowErrors := 0
|
||||
csvWriter := csv.NewWriter(os.Stdout)
|
||||
csvWriter.Comma = []rune(cmdconfig.Viper().GetString(constants.ArgSeparator))[0]
|
||||
|
||||
if cmdconfig.Viper().GetBool(constants.ArgHeader) {
|
||||
_ = csvWriter.Write(ColumnNames(result.Cols))
|
||||
}
|
||||
|
||||
// print the data as it comes
|
||||
// define function display each csv row
|
||||
rowFunc := func(row []interface{}, result *queryresult.Result) {
|
||||
rowAsString, _ := ColumnValuesAsString(row, result.Cols, WithNullString(""))
|
||||
_ = csvWriter.Write(rowAsString)
|
||||
}
|
||||
|
||||
// call this function for each row
|
||||
if err := iterateResults(result, rowFunc); err != nil {
|
||||
error_helpers.ShowError(ctx, err)
|
||||
rowErrors++
|
||||
return rowErrors
|
||||
}
|
||||
|
||||
csvWriter.Flush()
|
||||
if csvWriter.Error() != nil {
|
||||
error_helpers.ShowErrorWithMessage(ctx, csvWriter.Error(), "unable to print csv")
|
||||
}
|
||||
return rowErrors
|
||||
}
|
||||
|
||||
func displayTable(ctx context.Context, result *queryresult.Result) int {
|
||||
func displayTable(ctx context.Context, result *queryresult.Result) (int, *queryresult.TimingResult) {
|
||||
rowErrors := 0
|
||||
// the buffer to put the output data in
|
||||
outbuf := bytes.NewBufferString("")
|
||||
@@ -302,7 +343,7 @@ func displayTable(ctx context.Context, result *queryresult.Result) int {
|
||||
t.SetStyle(table.StyleDefault)
|
||||
t.Style().Format.Header = text.FormatDefault
|
||||
|
||||
colConfigs := []table.ColumnConfig{}
|
||||
var colConfigs []table.ColumnConfig
|
||||
headers := make(table.Row, len(result.Cols))
|
||||
|
||||
for idx, column := range result.Cols {
|
||||
@@ -339,7 +380,7 @@ func displayTable(ctx context.Context, result *queryresult.Result) int {
|
||||
}
|
||||
|
||||
// iterate each row, adding each to the table
|
||||
err := iterateResults(result, rowFunc)
|
||||
count, err := iterateResults(result, rowFunc)
|
||||
if err != nil {
|
||||
// display the error
|
||||
fmt.Println()
|
||||
@@ -352,64 +393,142 @@ func displayTable(ctx context.Context, result *queryresult.Result) int {
|
||||
|
||||
// page out the table
|
||||
ShowPaged(ctx, outbuf.String())
|
||||
return rowErrors
|
||||
|
||||
// now we have iterated the rows, get the timing
|
||||
timingResult := getTiming(result, count)
|
||||
|
||||
return rowErrors, timingResult
|
||||
}
|
||||
|
||||
func buildTimingString(result *queryresult.Result) string {
|
||||
func getTiming(result *queryresult.Result, count int) *queryresult.TimingResult {
|
||||
// now we have iterated the rows, get the timing
|
||||
timingResult := <-result.TimingResult
|
||||
if timingResult == nil {
|
||||
return ""
|
||||
}
|
||||
// set rows returned
|
||||
timingResult.RowsReturned = int64(count)
|
||||
return timingResult
|
||||
}
|
||||
|
||||
func buildTimingString(timingResult *queryresult.TimingResult) string {
|
||||
var sb strings.Builder
|
||||
// large numbers should be formatted with commas
|
||||
p := message.NewPrinter(language.English)
|
||||
|
||||
milliseconds := float64(timingResult.Duration.Microseconds()) / 1000
|
||||
seconds := timingResult.Duration.Seconds()
|
||||
if seconds < 0.5 {
|
||||
sb.WriteString(p.Sprintf("\nTime: %dms.", int64(milliseconds)))
|
||||
} else {
|
||||
sb.WriteString(p.Sprintf("\nTime: %.1fs.", seconds))
|
||||
sb.WriteString(fmt.Sprintf("\nTime: %s.", getDurationString(timingResult.DurationMs, p)))
|
||||
sb.WriteString(p.Sprintf(" Rows returned: %d.", timingResult.RowsReturned))
|
||||
totalRowsFetched := timingResult.UncachedRowsFetched + timingResult.CachedRowsFetched
|
||||
if totalRowsFetched == 0 {
|
||||
// maybe there was an error retrieving timing - just display the basics
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
if timingMetadata := timingResult.Metadata; timingMetadata != nil {
|
||||
totalRows := timingMetadata.RowsFetched + timingMetadata.CachedRowsFetched
|
||||
sb.WriteString(" Rows fetched: ")
|
||||
if totalRows == 0 {
|
||||
sb.WriteString("0")
|
||||
} else {
|
||||
if totalRows > 0 {
|
||||
sb.WriteString(p.Sprintf("%d", timingMetadata.RowsFetched+timingMetadata.CachedRowsFetched))
|
||||
}
|
||||
if timingMetadata.CachedRowsFetched > 0 {
|
||||
if timingMetadata.RowsFetched == 0 {
|
||||
sb.WriteString(" (cached)")
|
||||
} else {
|
||||
sb.WriteString(p.Sprintf(" (%d cached)", timingMetadata.CachedRowsFetched))
|
||||
}
|
||||
}
|
||||
sb.WriteString(" Rows fetched: ")
|
||||
if totalRowsFetched == 0 {
|
||||
sb.WriteString("0")
|
||||
} else {
|
||||
|
||||
// calculate the number of cached rows fetched
|
||||
|
||||
sb.WriteString(p.Sprintf("%d", totalRowsFetched))
|
||||
|
||||
// were all cached
|
||||
if timingResult.UncachedRowsFetched == 0 {
|
||||
sb.WriteString(" (cached)")
|
||||
} else if timingResult.CachedRowsFetched > 0 {
|
||||
sb.WriteString(p.Sprintf(" (%d cached)", timingResult.CachedRowsFetched))
|
||||
}
|
||||
}
|
||||
|
||||
sb.WriteString(p.Sprintf(". Hydrate calls: %d.", timingResult.HydrateCalls))
|
||||
if timingResult.ScanCount > 1 {
|
||||
sb.WriteString(p.Sprintf(" Scans: %d.", timingResult.ScanCount))
|
||||
}
|
||||
if timingResult.ConnectionCount > 1 {
|
||||
sb.WriteString(p.Sprintf(" Connections: %d.", timingResult.ConnectionCount))
|
||||
}
|
||||
|
||||
if viper.GetString(constants.ArgTiming) == constants.ArgVerbose && len(timingResult.Scans) > 0 {
|
||||
if err := getVerboseTimingString(&sb, p, timingResult); err != nil {
|
||||
log.Printf("[WARN] Error getting verbose timing: %v", err)
|
||||
}
|
||||
sb.WriteString(p.Sprintf(". Hydrate calls: %d.", timingMetadata.HydrateCalls))
|
||||
}
|
||||
|
||||
return sb.String()
|
||||
}
|
||||
|
||||
func getDurationString(durationMs int64, p *message.Printer) string {
|
||||
if durationMs < 500 {
|
||||
return p.Sprintf("%dms", durationMs)
|
||||
} else {
|
||||
seconds := float64(durationMs) / 1000
|
||||
return p.Sprintf("%.1fs", seconds)
|
||||
}
|
||||
}
|
||||
|
||||
func getVerboseTimingString(sb *strings.Builder, p *message.Printer, timingResult *queryresult.TimingResult) error {
|
||||
scans := timingResult.Scans
|
||||
|
||||
// keep track of empty scans and do not include them separately in scan list
|
||||
emptyScanCount := 0
|
||||
scanCount := 0
|
||||
// is this all scans or just the slowest
|
||||
if len(scans) == int(timingResult.ScanCount) {
|
||||
sb.WriteString("\n\nScans:\n")
|
||||
} else {
|
||||
sb.WriteString(fmt.Sprintf("\n\nSlowest %d scans:\n", len(scans)))
|
||||
}
|
||||
|
||||
for _, scan := range scans {
|
||||
if scan.RowsFetched == 0 {
|
||||
emptyScanCount++
|
||||
continue
|
||||
}
|
||||
scanCount++
|
||||
|
||||
cacheString := ""
|
||||
if scan.CacheHit {
|
||||
cacheString = " (cached)"
|
||||
}
|
||||
qualsString := ""
|
||||
if len(scan.Quals) > 0 {
|
||||
qualsJson, err := json.Marshal(scan.Quals)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
qualsString = fmt.Sprintf(" Quals: %s.", string(qualsJson))
|
||||
}
|
||||
limitString := ""
|
||||
if scan.Limit != nil {
|
||||
limitString = fmt.Sprintf(" Limit: %d.", *scan.Limit)
|
||||
}
|
||||
|
||||
timeString := getDurationString(scan.DurationMs, p)
|
||||
rowsFetchedString := p.Sprintf("%d", scan.RowsFetched)
|
||||
|
||||
sb.WriteString(fmt.Sprintf(" %d) Table: %s. Connection: %s. Time: %s. Rows fetched: %s%s. Hydrate calls: %d.%s%s\n", scanCount, scan.Table, scan.Connection, timeString, rowsFetchedString, cacheString, scan.HydrateCalls, qualsString, limitString))
|
||||
}
|
||||
if emptyScanCount > 0 {
|
||||
sb.WriteString(fmt.Sprintf(" %d…%d) Zero rows fetched.\n", scanCount+1, scanCount+emptyScanCount))
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type displayResultsFunc func(row []interface{}, result *queryresult.Result)
|
||||
|
||||
// call func displayResult for each row of results
|
||||
func iterateResults(result *queryresult.Result, displayResult displayResultsFunc) error {
|
||||
func iterateResults(result *queryresult.Result, displayResult displayResultsFunc) (int, error) {
|
||||
count := 0
|
||||
for row := range *result.RowChan {
|
||||
if row == nil {
|
||||
return nil
|
||||
return count, nil
|
||||
}
|
||||
if row.Error != nil {
|
||||
return row.Error
|
||||
return count, row.Error
|
||||
}
|
||||
displayResult(row.Data, result)
|
||||
count++
|
||||
}
|
||||
// we will not get here
|
||||
return nil
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// DisplayErrorTiming shows the time taken for the query to fail
|
||||
|
||||
@@ -6,20 +6,14 @@ import (
|
||||
)
|
||||
|
||||
type displayConfiguration struct {
|
||||
timing bool
|
||||
timing string
|
||||
}
|
||||
|
||||
// NewDisplayConfiguration creates a default configuration with timing set to
|
||||
// true if both --timing is true and --output is table
|
||||
func NewDisplayConfiguration() *displayConfiguration {
|
||||
timingFlag := cmdconfig.Viper().GetBool(constants.ArgTiming)
|
||||
isInteractive := cmdconfig.Viper().GetBool(constants.ConfigKeyInteractive)
|
||||
outputTable := cmdconfig.Viper().GetString(constants.ArgOutput) == constants.OutputFormatTable
|
||||
|
||||
timing := timingFlag && (outputTable || isInteractive)
|
||||
|
||||
// newDisplayConfiguration creates a default configuration with timing set to
|
||||
// true if both --timing is not 'off' and --output is table
|
||||
func newDisplayConfiguration() *displayConfiguration {
|
||||
return &displayConfiguration{
|
||||
timing: timing,
|
||||
timing: cmdconfig.Viper().GetString(constants.ArgTiming),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +22,6 @@ type DisplayOption = func(config *displayConfiguration)
|
||||
// WithTimingDisabled forcefully disables display of timing data
|
||||
func WithTimingDisabled() DisplayOption {
|
||||
return func(o *displayConfiguration) {
|
||||
o.timing = false
|
||||
o.timing = constants.ArgOff
|
||||
}
|
||||
}
|
||||
|
||||
@@ -397,7 +397,7 @@ func (c *InteractiveClient) executeQuery(ctx context.Context, queryCtx context.C
|
||||
if err != nil {
|
||||
error_helpers.ShowError(ctx, error_helpers.HandleCancelError(err))
|
||||
// if timing flag is enabled, show the time taken for the query to fail
|
||||
if cmdconfig.Viper().GetBool(constants.ArgTiming) {
|
||||
if cmdconfig.Viper().GetString(constants.ArgTiming) != constants.ArgOff {
|
||||
display.DisplayErrorTiming(t)
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -77,11 +77,12 @@ func init() {
|
||||
constants.CmdTiming: {
|
||||
title: "timing",
|
||||
handler: setTiming,
|
||||
validator: booleanValidator(constants.CmdTiming, validatorFromArgsOf(constants.CmdTiming)),
|
||||
validator: validatorFromArgsOf(constants.CmdTiming),
|
||||
description: "Enable or disable query execution timing",
|
||||
args: []metaQueryArg{
|
||||
{value: constants.ArgOn, description: "Display time elapsed after every query"},
|
||||
{value: constants.ArgOff, description: "Turn off query timer"},
|
||||
{value: constants.ArgOn, description: "Display time elapsed after every query"},
|
||||
{value: constants.ArgVerbose, description: "Display time elapsed and details of each scan"},
|
||||
},
|
||||
completer: completerFromArgsOf(constants.CmdTiming),
|
||||
},
|
||||
|
||||
@@ -3,9 +3,12 @@ package metaquery
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
typeHelpers "github.com/turbot/go-kit/types"
|
||||
"github.com/turbot/steampipe/pkg/cmdconfig"
|
||||
"github.com/turbot/steampipe/pkg/constants"
|
||||
"golang.org/x/exp/maps"
|
||||
)
|
||||
|
||||
type handler func(ctx context.Context, input *HandlerInput) error
|
||||
@@ -37,11 +40,28 @@ func setMultiLine(_ context.Context, input *HandlerInput) error {
|
||||
|
||||
// .timing
|
||||
// set the ArgHeader viper key with the boolean value evaluated from arg[0]
|
||||
func setTiming(_ context.Context, input *HandlerInput) error {
|
||||
cmdconfig.Viper().Set(constants.ArgTiming, typeHelpers.StringToBool(input.args()[0]))
|
||||
func setTiming(ctx context.Context, input *HandlerInput) error {
|
||||
if len(input.args()) == 0 {
|
||||
showTimingFlag()
|
||||
return nil
|
||||
}
|
||||
|
||||
cmdconfig.Viper().Set(constants.ArgTiming, input.args()[0])
|
||||
return nil
|
||||
}
|
||||
|
||||
func showTimingFlag() {
|
||||
timing := cmdconfig.Viper().GetString(constants.ArgTiming)
|
||||
|
||||
fmt.Printf(`Timing is %s. Available options are: %s`,
|
||||
constants.Bold(timing),
|
||||
constants.Bold(strings.Join(maps.Keys(constants.QueryTimingValueLookup), ", ")))
|
||||
// add an empty line here so that the rendering buffer can start from the next line
|
||||
fmt.Println()
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// .separator and .output
|
||||
// set the value of `viperKey` in `viper` with the value from `args[0]`
|
||||
func setViperConfigFromArg(viperKey string) handler {
|
||||
|
||||
@@ -83,7 +83,7 @@ func executeQueries(ctx context.Context, initData *query.InitData) int {
|
||||
failures++
|
||||
error_helpers.ShowWarning(fmt.Sprintf("executeQueries: query %d of %d failed: %v", i+1, len(initData.Queries), error_helpers.DecodePgError(err)))
|
||||
// if timing flag is enabled, show the time taken for the query to fail
|
||||
if cmdconfig.Viper().GetBool(constants.ArgTiming) {
|
||||
if cmdconfig.Viper().GetString(constants.ArgTiming) != constants.ArgOff {
|
||||
display.DisplayErrorTiming(t)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,26 @@
|
||||
package queryresult
|
||||
|
||||
import (
|
||||
"time"
|
||||
)
|
||||
|
||||
type TimingMetadata struct {
|
||||
RowsFetched int64
|
||||
CachedRowsFetched int64
|
||||
HydrateCalls int64
|
||||
}
|
||||
|
||||
type TimingResult struct {
|
||||
Duration time.Duration
|
||||
Metadata *TimingMetadata
|
||||
DurationMs int64 `json:"duration_ms"`
|
||||
Scans []*ScanMetadataRow `json:"scans"`
|
||||
ScanCount int64 `json:"scan_count,omitempty"`
|
||||
RowsReturned int64 `json:"rows_returned"`
|
||||
UncachedRowsFetched int64 `json:"uncached_rows_fetched"`
|
||||
CachedRowsFetched int64 `json:"cached_rows_fetched"`
|
||||
HydrateCalls int64 `json:"hydrate_calls"`
|
||||
ConnectionCount int64 `json:"connection_count"`
|
||||
}
|
||||
|
||||
func (r *TimingResult) Initialise(summary *QueryRowSummary, scans []*ScanMetadataRow) {
|
||||
r.ScanCount = summary.ScanCount
|
||||
r.ConnectionCount = summary.ConnectionCount
|
||||
r.UncachedRowsFetched = summary.UncachedRowsFetched
|
||||
r.CachedRowsFetched = summary.CachedRowsFetched
|
||||
r.HydrateCalls = summary.HydrateCalls
|
||||
// populate scans - note this may not be all scans
|
||||
r.Scans = scans
|
||||
}
|
||||
|
||||
type RowResult struct {
|
||||
Data []interface{}
|
||||
Error error
|
||||
@@ -25,7 +32,6 @@ type Result struct {
|
||||
}
|
||||
|
||||
func NewResult(cols []*ColumnDef) *Result {
|
||||
|
||||
rowChan := make(chan *RowResult)
|
||||
return &Result{
|
||||
RowChan: &rowChan,
|
||||
|
||||
104
pkg/query/queryresult/scan_metadata.go
Normal file
104
pkg/query/queryresult/scan_metadata.go
Normal file
@@ -0,0 +1,104 @@
|
||||
package queryresult
|
||||
|
||||
import (
|
||||
"github.com/turbot/steampipe-plugin-sdk/v5/grpc"
|
||||
"github.com/turbot/steampipe-plugin-sdk/v5/grpc/proto"
|
||||
"time"
|
||||
)
|
||||
|
||||
type ScanMetadataRow struct {
|
||||
// the fields of this struct need to be public since these are populated by pgx using RowsToStruct
|
||||
Connection string `db:"connection,optional" json:"connection"`
|
||||
Table string `db:"table" json:"table"`
|
||||
CacheHit bool `db:"cache_hit" json:"cache_hit"`
|
||||
RowsFetched int64 `db:"rows_fetched" json:"rows_fetched"`
|
||||
HydrateCalls int64 `db:"hydrate_calls" json:"hydrate_calls"`
|
||||
StartTime time.Time `db:"start_time" json:"start_time"`
|
||||
DurationMs int64 `db:"duration_ms" json:"duration_ms"`
|
||||
Columns []string `db:"columns" json:"columns"`
|
||||
Limit *int64 `db:"limit" json:"limit,omitempty"`
|
||||
Quals []grpc.SerializableQual `db:"quals" json:"quals,omitempty"`
|
||||
}
|
||||
|
||||
func NewScanMetadataRow(connection string, table string, columns []string, quals map[string]*proto.Quals, startTime time.Time, diration time.Duration, limit int64, m *proto.QueryMetadata) ScanMetadataRow {
|
||||
res := ScanMetadataRow{
|
||||
Connection: connection,
|
||||
Table: table,
|
||||
StartTime: startTime,
|
||||
DurationMs: diration.Milliseconds(),
|
||||
Columns: columns,
|
||||
Quals: grpc.QualMapToSerializableSlice(quals),
|
||||
}
|
||||
if limit == -1 {
|
||||
res.Limit = nil
|
||||
} else {
|
||||
res.Limit = &limit
|
||||
}
|
||||
if m != nil {
|
||||
res.CacheHit = m.CacheHit
|
||||
res.RowsFetched = m.RowsFetched
|
||||
res.HydrateCalls = m.HydrateCalls
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// AsResultRow returns the ScanMetadata as a map[string]interface which can be returned as a query result
|
||||
func (m ScanMetadataRow) AsResultRow() map[string]any {
|
||||
res := map[string]any{
|
||||
"connection": m.Connection,
|
||||
"table": m.Table,
|
||||
"cache_hit": m.CacheHit,
|
||||
"rows_fetched": m.RowsFetched,
|
||||
"hydrate_calls": m.HydrateCalls,
|
||||
"start_time": m.StartTime,
|
||||
"duration_ms": m.DurationMs,
|
||||
"columns": m.Columns,
|
||||
"quals": m.Quals,
|
||||
}
|
||||
// explicitly set limit to nil if needed (otherwise postgres returns `1`)
|
||||
if m.Limit != nil {
|
||||
res["limit"] = *m.Limit
|
||||
} else {
|
||||
res["limit"] = nil // Explicitly set nil
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
type QueryRowSummary struct {
|
||||
UncachedRowsFetched int64 `db:"uncached_rows_fetched" json:"uncached_rows_fetched"`
|
||||
CachedRowsFetched int64 `db:"cached_rows_fetched" json:"cached_rows_fetched"`
|
||||
HydrateCalls int64 `db:"hydrate_calls" json:"hydrate_calls"`
|
||||
ScanCount int64 `db:"scan_count" json:"scan_count"`
|
||||
ConnectionCount int64 `db:"connection_count" json:"connection_count"`
|
||||
// map connections to the scans
|
||||
connections map[string]struct{}
|
||||
}
|
||||
|
||||
func NewQueryRowSummary() *QueryRowSummary {
|
||||
return &QueryRowSummary{
|
||||
connections: make(map[string]struct{}),
|
||||
}
|
||||
}
|
||||
func (s *QueryRowSummary) AsResultRow() map[string]any {
|
||||
res := map[string]any{
|
||||
"uncached_rows_fetched": s.UncachedRowsFetched,
|
||||
"cached_rows_fetched": s.CachedRowsFetched,
|
||||
"hydrate_calls": s.HydrateCalls,
|
||||
"scan_count": s.ScanCount,
|
||||
"connection_count": s.ConnectionCount,
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
func (s *QueryRowSummary) Update(m ScanMetadataRow) {
|
||||
if m.CacheHit {
|
||||
s.CachedRowsFetched += m.RowsFetched
|
||||
} else {
|
||||
s.UncachedRowsFetched += m.RowsFetched
|
||||
}
|
||||
s.HydrateCalls += m.HydrateCalls
|
||||
s.ScanCount++
|
||||
s.connections[m.Connection] = struct{}{}
|
||||
s.ConnectionCount = int64(len(s.connections))
|
||||
}
|
||||
@@ -2,8 +2,10 @@ package options
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"golang.org/x/exp/maps"
|
||||
"strings"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/turbot/go-kit/helpers"
|
||||
"github.com/turbot/steampipe/pkg/constants"
|
||||
)
|
||||
@@ -13,7 +15,7 @@ type Check struct {
|
||||
Output *string `hcl:"output" cty:"check_output"`
|
||||
Separator *string `hcl:"separator" cty:"check_separator"`
|
||||
Header *bool `hcl:"header" cty:"check_header"`
|
||||
Timing *bool `hcl:"timing" cty:"check_timing"`
|
||||
Timing *string `hcl:"timing" cty:"check_timing"`
|
||||
}
|
||||
|
||||
func (t *Check) SetBaseProperties(otherOptions Options) {
|
||||
@@ -58,11 +60,8 @@ func (t *Check) ConfigMap() map[string]interface{} {
|
||||
// Merge :: merge other options over the the top of this options object
|
||||
// i.e. if a property is set in otherOptions, it takes precedence
|
||||
func (t *Check) Merge(otherOptions Options) {
|
||||
if _, ok := otherOptions.(*Query); !ok {
|
||||
return
|
||||
}
|
||||
switch o := otherOptions.(type) {
|
||||
case *Query:
|
||||
case *Check:
|
||||
if o.Output != nil {
|
||||
t.Output = o.Output
|
||||
}
|
||||
@@ -105,3 +104,19 @@ func (t *Check) String() string {
|
||||
}
|
||||
return strings.Join(str, "\n")
|
||||
}
|
||||
|
||||
func (t *Check) SetTiming(flag string, r hcl.Range) hcl.Diagnostics {
|
||||
// check the value is valid
|
||||
if _, ok := constants.CheckTimingValueLookup[flag]; !ok {
|
||||
return hcl.Diagnostics{
|
||||
&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: fmt.Sprintf("Invalid timing value '%s', check options support: %s", flag, strings.Join(maps.Keys(constants.CheckTimingValueLookup), ", ")),
|
||||
Subject: &r,
|
||||
},
|
||||
}
|
||||
}
|
||||
t.Timing = &flag
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -89,9 +89,6 @@ func (d *GlobalDashboard) ConfigMap() map[string]interface{} {
|
||||
// Merge :: merge other options over the the top of this options object
|
||||
// i.e. if a property is set in otherOptions, it takes precedence
|
||||
func (d *GlobalDashboard) Merge(otherOptions Options) {
|
||||
if _, ok := otherOptions.(*GlobalDashboard); !ok {
|
||||
return
|
||||
}
|
||||
switch o := otherOptions.(type) {
|
||||
case *GlobalDashboard:
|
||||
if o.Port != nil {
|
||||
|
||||
@@ -2,19 +2,20 @@ package options
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"golang.org/x/exp/maps"
|
||||
"strings"
|
||||
|
||||
"github.com/turbot/go-kit/helpers"
|
||||
"github.com/turbot/steampipe/pkg/constants"
|
||||
)
|
||||
|
||||
// General
|
||||
type Query struct {
|
||||
Output *string `hcl:"output" cty:"query_output"`
|
||||
Separator *string `hcl:"separator" cty:"query_separator"`
|
||||
Header *bool `hcl:"header" cty:"query_header"`
|
||||
Multi *bool `hcl:"multi" cty:"query_multi"`
|
||||
Timing *bool `hcl:"timing" cty:"query_timing"`
|
||||
Timing *string `cty:"query_timing"` // parsed manually
|
||||
AutoComplete *bool `hcl:"autocomplete" cty:"query_autocomplete"`
|
||||
}
|
||||
|
||||
@@ -61,7 +62,7 @@ func (t *Query) ConfigMap() map[string]interface{} {
|
||||
res[constants.ArgMultiLine] = t.Multi
|
||||
}
|
||||
if t.Timing != nil {
|
||||
res[constants.ArgTiming] = t.Timing
|
||||
res[constants.ArgTiming] = *t.Timing
|
||||
}
|
||||
if t.AutoComplete != nil {
|
||||
res[constants.ArgAutoComplete] = t.AutoComplete
|
||||
@@ -135,3 +136,19 @@ func (t *Query) String() string {
|
||||
}
|
||||
return strings.Join(str, "\n")
|
||||
}
|
||||
|
||||
func (t *Query) SetTiming(flag string, r hcl.Range) hcl.Diagnostics {
|
||||
// check the value is valid
|
||||
if _, ok := constants.QueryTimingValueLookup[flag]; !ok {
|
||||
return hcl.Diagnostics{
|
||||
&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: fmt.Sprintf("Invalid timing value '%s', query options support: %s", flag, strings.Join(maps.Keys(constants.QueryTimingValueLookup), ", ")),
|
||||
Subject: &r,
|
||||
},
|
||||
}
|
||||
}
|
||||
t.Timing = &flag
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
7
pkg/steampipeconfig/options/set_timing.go
Normal file
7
pkg/steampipeconfig/options/set_timing.go
Normal file
@@ -0,0 +1,7 @@
|
||||
package options
|
||||
|
||||
import "github.com/hashicorp/hcl/v2"
|
||||
|
||||
type CanSetTiming interface {
|
||||
SetTiming(flag string, r hcl.Range) hcl.Diagnostics
|
||||
}
|
||||
@@ -2,11 +2,13 @@ package parse
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/hashicorp/hcl/v2"
|
||||
"github.com/hashicorp/hcl/v2/gohcl"
|
||||
"github.com/hashicorp/hcl/v2/hclsyntax"
|
||||
"github.com/turbot/pipe-fittings/hclhelpers"
|
||||
"github.com/turbot/steampipe/pkg/constants"
|
||||
"github.com/turbot/steampipe/pkg/steampipeconfig/options"
|
||||
"github.com/zclconf/go-cty/cty"
|
||||
)
|
||||
|
||||
// DecodeOptions decodes an options block
|
||||
@@ -16,8 +18,9 @@ func DecodeOptions(block *hcl.Block, overrides ...BlockMappingOverride) (options
|
||||
for _, applyOverride := range overrides {
|
||||
applyOverride(mapping)
|
||||
}
|
||||
optionsType := block.Labels[0]
|
||||
|
||||
destination, ok := mapping[block.Labels[0]]
|
||||
destination, ok := mapping[optionsType]
|
||||
if !ok {
|
||||
diags = append(diags, &hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
@@ -27,6 +30,13 @@ func DecodeOptions(block *hcl.Block, overrides ...BlockMappingOverride) (options
|
||||
return nil, diags
|
||||
}
|
||||
|
||||
if timingOptions, ok := destination.(options.CanSetTiming); ok {
|
||||
morediags := decodeTimingFlag(block, timingOptions)
|
||||
if morediags.HasErrors() {
|
||||
diags = append(diags, morediags...)
|
||||
return nil, diags
|
||||
}
|
||||
}
|
||||
diags = gohcl.DecodeBody(block.Body, nil, destination)
|
||||
if diags.HasErrors() {
|
||||
return nil, diags
|
||||
@@ -35,6 +45,30 @@ func DecodeOptions(block *hcl.Block, overrides ...BlockMappingOverride) (options
|
||||
return destination, nil
|
||||
}
|
||||
|
||||
// for Query options block, if timing attribute is set to "verbose", replace with true and set verbose to true
|
||||
func decodeTimingFlag(block *hcl.Block, timingOptions options.CanSetTiming) hcl.Diagnostics {
|
||||
body := block.Body.(*hclsyntax.Body)
|
||||
timingAttribute := body.Attributes["timing"]
|
||||
if timingAttribute == nil {
|
||||
return nil
|
||||
}
|
||||
// remove the attribute so subsequent decoding does not see it
|
||||
delete(body.Attributes, "timing")
|
||||
|
||||
val, diags := timingAttribute.Expr.Value(&hcl.EvalContext{
|
||||
Variables: map[string]cty.Value{
|
||||
constants.ArgOn: cty.StringVal(constants.ArgOn),
|
||||
constants.ArgOff: cty.StringVal(constants.ArgOff),
|
||||
constants.ArgVerbose: cty.StringVal(constants.ArgVerbose),
|
||||
},
|
||||
})
|
||||
if diags.HasErrors() {
|
||||
return diags
|
||||
}
|
||||
return timingOptions.SetTiming(val.AsString(), timingAttribute.Range())
|
||||
|
||||
}
|
||||
|
||||
type OptionsBlockMapping = map[string]options.Options
|
||||
|
||||
func defaultOptionsBlockMapping() OptionsBlockMapping {
|
||||
|
||||
@@ -23,7 +23,7 @@ var steampipeVersion = "0.23.0"
|
||||
// A pre-release marker for the version. If this is "" (empty string)
|
||||
// then it means that it is a final release. Otherwise, this is a pre-release
|
||||
// such as "dev" (in development), "beta", "rc1", etc.
|
||||
var prerelease = "alpha.0"
|
||||
var prerelease = "alpha.2"
|
||||
|
||||
// SteampipeVersion is an instance of semver.Version. This has the secondary
|
||||
// benefit of verifying during tests and init time that our version is a
|
||||
|
||||
@@ -3,17 +3,17 @@
|
||||
# This script accepts a patch format and evaluates the diffs if any.
|
||||
patch_file=$1
|
||||
|
||||
patch_keys=$(echo $patch_file | jq '. | keys[]')
|
||||
patch_keys=$(echo $patch_file | jq -r '. | keys[]')
|
||||
|
||||
for i in $patch_keys; do
|
||||
op=$(echo $patch_file | jq -c ".[${i}]" | jq ".op")
|
||||
path=$(echo $patch_file | jq -c ".[${i}]" | jq ".path")
|
||||
value=$(echo $patch_file | jq -c ".[${i}]" | jq ".value")
|
||||
op=$(echo $patch_file | jq -r -c ".[${i}]" | jq -r ".op")
|
||||
path=$(echo $patch_file | jq -r -c ".[${i}]" | jq -r ".path")
|
||||
value=$(echo $patch_file | jq -r -c ".[${i}]" | jq -r ".value")
|
||||
|
||||
# ignore the diff of paths 'end_time', 'start_time' and 'schema_version',
|
||||
# print the rest
|
||||
if [[ $op != '"test"' ]] && [[ $path != '"/end_time"' ]] && [[ $path != '"/start_time"' ]] && [[ $path != '"/schema_version"' ]]; then
|
||||
if [[ $op == '"remove"' ]]; then
|
||||
if [[ $op != "test" ]] && [[ $path != "/end_time" ]] && [[ $path != "/start_time" ]] && [[ $path != "/schema_version" ]] && [[ $path != "/metadata"* ]]; then
|
||||
if [[ $op == "remove" ]]; then
|
||||
echo "key: $path"
|
||||
echo "expected: $value"
|
||||
else
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[
|
||||
{
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"column_0": "column_0-0",
|
||||
"column_1": "column_1-0",
|
||||
"column_2": "column_2-0",
|
||||
@@ -11,8 +12,8 @@
|
||||
"column_8": "column_8-0",
|
||||
"column_9": "column_9-0",
|
||||
"id": 0
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-1",
|
||||
"column_1": "column_1-1",
|
||||
"column_2": "column_2-1",
|
||||
@@ -24,8 +25,8 @@
|
||||
"column_8": "column_8-1",
|
||||
"column_9": "column_9-1",
|
||||
"id": 1
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-10",
|
||||
"column_1": "column_1-10",
|
||||
"column_2": "column_2-10",
|
||||
@@ -37,8 +38,8 @@
|
||||
"column_8": "column_8-10",
|
||||
"column_9": "column_9-10",
|
||||
"id": 10
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-100",
|
||||
"column_1": "column_1-100",
|
||||
"column_2": "column_2-100",
|
||||
@@ -50,8 +51,8 @@
|
||||
"column_8": "column_8-100",
|
||||
"column_9": "column_9-100",
|
||||
"id": 100
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-1000",
|
||||
"column_1": "column_1-1000",
|
||||
"column_2": "column_2-1000",
|
||||
@@ -63,8 +64,8 @@
|
||||
"column_8": "column_8-1000",
|
||||
"column_9": "column_9-1000",
|
||||
"id": 1000
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-1001",
|
||||
"column_1": "column_1-1001",
|
||||
"column_2": "column_2-1001",
|
||||
@@ -76,8 +77,8 @@
|
||||
"column_8": "column_8-1001",
|
||||
"column_9": "column_9-1001",
|
||||
"id": 1001
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-1002",
|
||||
"column_1": "column_1-1002",
|
||||
"column_2": "column_2-1002",
|
||||
@@ -89,8 +90,8 @@
|
||||
"column_8": "column_8-1002",
|
||||
"column_9": "column_9-1002",
|
||||
"id": 1002
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-1003",
|
||||
"column_1": "column_1-1003",
|
||||
"column_2": "column_2-1003",
|
||||
@@ -102,8 +103,8 @@
|
||||
"column_8": "column_8-1003",
|
||||
"column_9": "column_9-1003",
|
||||
"id": 1003
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-1004",
|
||||
"column_1": "column_1-1004",
|
||||
"column_2": "column_2-1004",
|
||||
@@ -115,8 +116,8 @@
|
||||
"column_8": "column_8-1004",
|
||||
"column_9": "column_9-1004",
|
||||
"id": 1004
|
||||
},
|
||||
{
|
||||
},
|
||||
{
|
||||
"column_0": "column_0-1005",
|
||||
"column_1": "column_1-1005",
|
||||
"column_2": "column_2-1005",
|
||||
@@ -128,5 +129,39 @@
|
||||
"column_8": "column_8-1005",
|
||||
"column_9": "column_9-1005",
|
||||
"id": 1005
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 201108250,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_high_row_count",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 5000,
|
||||
"hydrate_calls": 0,
|
||||
"start_time": "2024-04-11T11:17:24+05:30",
|
||||
"duration": 155,
|
||||
"columns": [
|
||||
"column_0",
|
||||
"column_1",
|
||||
"column_2",
|
||||
"column_3",
|
||||
"column_4",
|
||||
"column_5",
|
||||
"column_6",
|
||||
"column_7",
|
||||
"column_8",
|
||||
"column_9",
|
||||
"id"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": null
|
||||
}
|
||||
],
|
||||
"rows_returned": 10,
|
||||
"rows_fetched": 5000,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,25 +1,76 @@
|
||||
[
|
||||
{
|
||||
"column_1": "parallelHydrate1",
|
||||
"column_10": "parallelHydrate10",
|
||||
"column_11": "parallelHydrate11",
|
||||
"column_12": "parallelHydrate12",
|
||||
"column_13": "parallelHydrate13",
|
||||
"column_14": "parallelHydrate14",
|
||||
"column_15": "parallelHydrate15",
|
||||
"column_16": "parallelHydrate16",
|
||||
"column_17": "parallelHydrate17",
|
||||
"column_18": "parallelHydrate18",
|
||||
"column_19": "parallelHydrate19",
|
||||
"column_2": "parallelHydrate2",
|
||||
"column_20": "parallelHydrate20",
|
||||
"column_3": "parallelHydrate3",
|
||||
"column_4": "parallelHydrate4",
|
||||
"column_5": "parallelHydrate5",
|
||||
"column_6": "parallelHydrate6",
|
||||
"column_7": "parallelHydrate7",
|
||||
"column_8": "parallelHydrate8",
|
||||
"column_9": "parallelHydrate9",
|
||||
"id": 0
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"column_1": "parallelHydrate1",
|
||||
"column_10": "parallelHydrate10",
|
||||
"column_11": "parallelHydrate11",
|
||||
"column_12": "parallelHydrate12",
|
||||
"column_13": "parallelHydrate13",
|
||||
"column_14": "parallelHydrate14",
|
||||
"column_15": "parallelHydrate15",
|
||||
"column_16": "parallelHydrate16",
|
||||
"column_17": "parallelHydrate17",
|
||||
"column_18": "parallelHydrate18",
|
||||
"column_19": "parallelHydrate19",
|
||||
"column_2": "parallelHydrate2",
|
||||
"column_20": "parallelHydrate20",
|
||||
"column_3": "parallelHydrate3",
|
||||
"column_4": "parallelHydrate4",
|
||||
"column_5": "parallelHydrate5",
|
||||
"column_6": "parallelHydrate6",
|
||||
"column_7": "parallelHydrate7",
|
||||
"column_8": "parallelHydrate8",
|
||||
"column_9": "parallelHydrate9",
|
||||
"id": 0
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 1076604791,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_parallel_hydrate_columns",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 500,
|
||||
"hydrate_calls": 10000,
|
||||
"start_time": "2024-04-11T11:43:26+05:30",
|
||||
"duration": 1039,
|
||||
"columns": [
|
||||
"column_1",
|
||||
"column_10",
|
||||
"column_11",
|
||||
"column_12",
|
||||
"column_13",
|
||||
"column_14",
|
||||
"column_15",
|
||||
"column_16",
|
||||
"column_17",
|
||||
"column_18",
|
||||
"column_19",
|
||||
"column_2",
|
||||
"column_20",
|
||||
"column_3",
|
||||
"column_4",
|
||||
"column_5",
|
||||
"column_6",
|
||||
"column_7",
|
||||
"column_8",
|
||||
"column_9",
|
||||
"id"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": [
|
||||
{
|
||||
"column": "id",
|
||||
"operator": "=",
|
||||
"value": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 500,
|
||||
"hydrate_calls": 10000
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,10 +1,42 @@
|
||||
[
|
||||
{
|
||||
"float32_data": 4.4285712242126465,
|
||||
"id": 31,
|
||||
"int64_data": 465,
|
||||
"uint16_data": 341
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"float32_data": 4.4285712242126465,
|
||||
"id": 31,
|
||||
"int64_data": 465,
|
||||
"uint16_data": 341
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 39542459,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_all_numeric_column",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 10,
|
||||
"hydrate_calls": 30,
|
||||
"start_time": "2024-04-11T11:44:55+05:30",
|
||||
"duration": 2,
|
||||
"columns": [
|
||||
"float32_data",
|
||||
"id",
|
||||
"int64_data",
|
||||
"uint16_data"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": [
|
||||
{
|
||||
"column": "id",
|
||||
"operator": "=",
|
||||
"value": 31
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 10,
|
||||
"hydrate_calls": 30
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,37 @@
|
||||
[
|
||||
{
|
||||
"from_qual_column": "2"
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"from_qual_column": "2"
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 40179750,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_transforms",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 1,
|
||||
"hydrate_calls": 0,
|
||||
"start_time": "2024-04-11T11:49:42+05:30",
|
||||
"duration": 3,
|
||||
"columns": [
|
||||
"from_qual_column",
|
||||
"id"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": [
|
||||
{
|
||||
"column": "id",
|
||||
"operator": "=",
|
||||
"value": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 1,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,10 +1,34 @@
|
||||
[
|
||||
{
|
||||
"transform_method_column": "Transform method"
|
||||
},
|
||||
{
|
||||
"transform_method_column": "Transform method"
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"transform_method_column": "Transform method"
|
||||
},
|
||||
{
|
||||
"transform_method_column": "Transform method"
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 40391583,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_transforms",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 2,
|
||||
"hydrate_calls": 0,
|
||||
"start_time": "2024-04-11T11:45:30+05:30",
|
||||
"duration": 3,
|
||||
"columns": [
|
||||
"transform_method_column",
|
||||
"id"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": null
|
||||
}
|
||||
],
|
||||
"rows_returned": 2,
|
||||
"rows_fetched": 2,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
[
|
||||
{
|
||||
"a": 1
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"a": 1
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 141791,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,17 +1,49 @@
|
||||
[
|
||||
{
|
||||
"boolean_column": true,
|
||||
"id": 0,
|
||||
"json_column": {
|
||||
"Id": 0,
|
||||
"Name": "stringValuesomething-0",
|
||||
"Statement": {
|
||||
"Action": "iam:GetContextKeysForCustomPolicy",
|
||||
"Effect": "Allow"
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"boolean_column": true,
|
||||
"id": 0,
|
||||
"json_column": {
|
||||
"Id": 0,
|
||||
"Name": "stringValuesomething-0",
|
||||
"Statement": {
|
||||
"Action": "iam:GetContextKeysForCustomPolicy",
|
||||
"Effect": "Allow"
|
||||
}
|
||||
},
|
||||
"string_column": "stringValuesomething-0"
|
||||
}
|
||||
},
|
||||
"string_column": "stringValuesomething-0"
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 52987458,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_all_column_types",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 100,
|
||||
"hydrate_calls": 200,
|
||||
"start_time": "2024-04-11T11:39:03+05:30",
|
||||
"duration": 14,
|
||||
"columns": [
|
||||
"id",
|
||||
"string_column",
|
||||
"json_column",
|
||||
"boolean_column"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": [
|
||||
{
|
||||
"column": "id",
|
||||
"operator": "=",
|
||||
"value": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 100,
|
||||
"hydrate_calls": 200
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
[]
|
||||
|
||||
|
||||
{
|
||||
"rows": [],
|
||||
"metadata": {
|
||||
"Duration": 42311542,
|
||||
"scans": [],
|
||||
"rows_returned": 0,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,46 @@
|
||||
[
|
||||
{
|
||||
"hydrate_column_1": "hydrate1-0",
|
||||
"hydrate_column_2": "hydrate2-0-hydrate1-0",
|
||||
"hydrate_column_3": "hydrate3-0-hydrate2-0-hydrate1-0",
|
||||
"hydrate_column_4": "hydrate4-0",
|
||||
"hydrate_column_5": "hydrate5-0-hydrate4-0-hydrate1-0",
|
||||
"id": 0
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"hydrate_column_1": "hydrate1-0",
|
||||
"hydrate_column_2": "hydrate2-0-hydrate1-0",
|
||||
"hydrate_column_3": "hydrate3-0-hydrate2-0-hydrate1-0",
|
||||
"hydrate_column_4": "hydrate4-0",
|
||||
"hydrate_column_5": "hydrate5-0-hydrate4-0-hydrate1-0",
|
||||
"id": 0
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 2085913625,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_hydrate_columns_dependency",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 2,
|
||||
"hydrate_calls": 10,
|
||||
"start_time": "2024-04-11T12:34:06+05:30",
|
||||
"duration": 2045,
|
||||
"columns": [
|
||||
"hydrate_column_1",
|
||||
"hydrate_column_2",
|
||||
"hydrate_column_3",
|
||||
"hydrate_column_4",
|
||||
"hydrate_column_5",
|
||||
"id"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": [
|
||||
{
|
||||
"column": "id",
|
||||
"operator": "=",
|
||||
"value": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 2,
|
||||
"hydrate_calls": 10
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,43 +1,53 @@
|
||||
[
|
||||
{
|
||||
"array1": "(408)-589-5841",
|
||||
"booleancolumn": true,
|
||||
"cidrrange": "10.1.2.3/32",
|
||||
"currency": "$922,337,203,685,477.57",
|
||||
"date1": "1978-02-05 00:00:00",
|
||||
"floatcolumn": 4.681642125488754,
|
||||
"integercolumn1": 3278,
|
||||
"integercolumn2": 21445454,
|
||||
"integercolumn3": 2147483645,
|
||||
"integercolumn4": 92233720368547758,
|
||||
"integercolumn5": 922337203685477580,
|
||||
"interval1": "1 year 2 mons 3 days ",
|
||||
"ipaddress": "192.168.0.0",
|
||||
"jsondata": {
|
||||
"customer": "John Doe",
|
||||
"items": {
|
||||
"product": "Beer",
|
||||
"qty": 6
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"array1": "(408)-589-5841",
|
||||
"booleancolumn": true,
|
||||
"cidrrange": "10.1.2.3/32",
|
||||
"currency": "$922,337,203,685,477.57",
|
||||
"date1": "1978-02-05 00:00:00",
|
||||
"floatcolumn": 4.681642125488754,
|
||||
"integercolumn1": 3278,
|
||||
"integercolumn2": 21445454,
|
||||
"integercolumn3": 2147483645,
|
||||
"integercolumn4": 92233720368547758,
|
||||
"integercolumn5": 922337203685477580,
|
||||
"interval1": "1 year 2 mons 3 days ",
|
||||
"ipaddress": "192.168.0.0",
|
||||
"jsondata": {
|
||||
"customer": "John Doe",
|
||||
"items": {
|
||||
"product": "Beer",
|
||||
"qty": 6
|
||||
}
|
||||
},
|
||||
"jsondata2": {
|
||||
"customer": "John Doe",
|
||||
"items": {
|
||||
"product": "Beer",
|
||||
"qty": 6
|
||||
}
|
||||
},
|
||||
"macaddress": "08:00:2b:01:02:03",
|
||||
"nullcolumn": null,
|
||||
"numericcolumn": "23.5142",
|
||||
"realcolumn": 4660.338,
|
||||
"textcolumn1": "Yes ",
|
||||
"textcolumn2": "test for varchar",
|
||||
"textcolumn3": "This is a very long text for the PostgreSQL text column",
|
||||
"time1": "08:00:00",
|
||||
"timestamp1": "2016-06-22 19:10:25",
|
||||
"timestamp2": "2016-06-23T07:40:25+05:30",
|
||||
"uuidcolumn": "6948df80-14bd-4e04-8842-7668d9c001f5",
|
||||
"xmldata": "<book><title>Manual</title><chapter>...</chapter></book>"
|
||||
}
|
||||
},
|
||||
"jsondata2": {
|
||||
"customer": "John Doe",
|
||||
"items": {
|
||||
"product": "Beer",
|
||||
"qty": 6
|
||||
}
|
||||
},
|
||||
"macaddress": "08:00:2b:01:02:03",
|
||||
"nullcolumn": null,
|
||||
"numericcolumn": "23.5142",
|
||||
"realcolumn": 4660.338,
|
||||
"textcolumn1": "Yes ",
|
||||
"textcolumn2": "test for varchar",
|
||||
"textcolumn3": "This is a very long text for the PostgreSQL text column",
|
||||
"time1": "08:00:00",
|
||||
"timestamp1": "2016-06-22 19:10:25",
|
||||
"timestamp2": "2016-06-23T02:10:25Z",
|
||||
"uuidcolumn": "6948df80-14bd-4e04-8842-7668d9c001f5",
|
||||
"xmldata": "<book><title>Manual</title><chapter>...</chapter></book>"
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 2523750,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,27 +1,37 @@
|
||||
[
|
||||
{
|
||||
"auto_generated": false,
|
||||
"children": [
|
||||
"introspection_table_mod.control.sample_control_1"
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"auto_generated": false,
|
||||
"children": [
|
||||
"introspection_table_mod.control.sample_control_1"
|
||||
],
|
||||
"description": "Sample benchmark to test introspection functionality",
|
||||
"documentation": null,
|
||||
"end_line_number": 41,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.benchmark.sample_benchmark_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.benchmark.sample_benchmark_1",
|
||||
"resource_name": "sample_benchmark_1",
|
||||
"source_definition": "benchmark \"sample_benchmark_1\" {\n\ttitle = \"Sample benchmark 1\"\n\tdescription = \"Sample benchmark to test introspection functionality\"\n\tchildren = [\n\t\tcontrol.sample_control_1\n\t]\n}",
|
||||
"start_line_number": 35,
|
||||
"tags": null,
|
||||
"title": "Sample benchmark 1",
|
||||
"type": null,
|
||||
"width": null
|
||||
}
|
||||
],
|
||||
"description": "Sample benchmark to test introspection functionality",
|
||||
"documentation": null,
|
||||
"end_line_number": 41,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.benchmark.sample_benchmark_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.benchmark.sample_benchmark_1",
|
||||
"resource_name": "sample_benchmark_1",
|
||||
"source_definition": "benchmark \"sample_benchmark_1\" {\n\ttitle = \"Sample benchmark 1\"\n\tdescription = \"Sample benchmark to test introspection functionality\"\n\tchildren = [\n\t\tcontrol.sample_control_1\n\t]\n}",
|
||||
"start_line_number": 35,
|
||||
"tags": null,
|
||||
"title": "Sample benchmark 1",
|
||||
"type": null,
|
||||
"width": null
|
||||
"metadata": {
|
||||
"Duration": 246208,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,35 +1,45 @@
|
||||
[
|
||||
{
|
||||
"args": {
|
||||
"args_list": null,
|
||||
"refs": null
|
||||
},
|
||||
"auto_generated": false,
|
||||
"description": "Sample control to test introspection functionality",
|
||||
"documentation": null,
|
||||
"end_line_number": 33,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.benchmark.sample_benchmark_1",
|
||||
"introspection_table_mod.control.sample_control_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"args": {
|
||||
"args_list": null,
|
||||
"refs": null
|
||||
},
|
||||
"auto_generated": false,
|
||||
"description": "Sample control to test introspection functionality",
|
||||
"documentation": null,
|
||||
"end_line_number": 33,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.benchmark.sample_benchmark_1",
|
||||
"introspection_table_mod.control.sample_control_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.control.sample_control_1",
|
||||
"query": "introspection_table_mod.query.sample_query_1",
|
||||
"resource_name": "sample_control_1",
|
||||
"severity": "high",
|
||||
"source_definition": "control \"sample_control_1\" {\n title = \"Sample control 1\"\n description = \"Sample control to test introspection functionality\"\n query = query.sample_query_1\n severity = \"high\"\n tags = {\n \"foo\": \"bar\"\n }\n}",
|
||||
"sql": null,
|
||||
"start_line_number": 25,
|
||||
"tags": {
|
||||
"foo": "bar"
|
||||
},
|
||||
"title": "Sample control 1",
|
||||
"type": null,
|
||||
"width": null
|
||||
}
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.control.sample_control_1",
|
||||
"query": "introspection_table_mod.query.sample_query_1",
|
||||
"resource_name": "sample_control_1",
|
||||
"severity": "high",
|
||||
"source_definition": "control \"sample_control_1\" {\n title = \"Sample control 1\"\n description = \"Sample control to test introspection functionality\"\n query = query.sample_query_1\n severity = \"high\"\n tags = {\n \"foo\": \"bar\"\n }\n}",
|
||||
"sql": null,
|
||||
"start_line_number": 25,
|
||||
"tags": {
|
||||
"foo": "bar"
|
||||
},
|
||||
"title": "Sample control 1",
|
||||
"type": null,
|
||||
"width": null
|
||||
"metadata": {
|
||||
"Duration": 280292,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,34 +1,44 @@
|
||||
[
|
||||
{
|
||||
"auto_generated": false,
|
||||
"children": [
|
||||
"introspection_table_mod.container.sample_conatiner_1"
|
||||
],
|
||||
"description": "Sample dashboard to test introspection functionality",
|
||||
"display": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 129,
|
||||
"inputs": [
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"name": "sample_input_1",
|
||||
"unqualified_name": "input.sample_input_1"
|
||||
"auto_generated": false,
|
||||
"children": [
|
||||
"introspection_table_mod.container.sample_conatiner_1"
|
||||
],
|
||||
"description": "Sample dashboard to test introspection functionality",
|
||||
"display": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 129,
|
||||
"inputs": [
|
||||
{
|
||||
"name": "sample_input_1",
|
||||
"unqualified_name": "input.sample_input_1"
|
||||
}
|
||||
],
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"resource_name": "sample_dashboard_1",
|
||||
"source_definition": "dashboard \"sample_dashboard_1\" {\n title = \"Sample dashboard 1\"\n description = \"Sample dashboard to test introspection functionality\"\n\n container \"sample_conatiner_1\" {\n\t\tcard \"sample_card_1\" {\n\t\t\ttitle = \"Sample card 1\"\n\t\t}\n\n\t\timage \"sample_image_1\" {\n\t\t\ttitle = \"Sample image 1\"\n\t\t\twidth = 3\n \t\tsrc = \"https://steampipe.io/images/logo.png\"\n \t\talt = \"steampipe\"\n\t\t}\n\n\t\ttext \"sample_text_1\" {\n\t\t\ttitle = \"Sample text 1\"\n\t\t}\n\n chart \"sample_chart_1\" {\n sql = \"select 1 as chart\"\n width = 5\n title = \"Sample chart 1\"\n }\n\n flow \"sample_flow_1\" {\n title = \"Sample flow 1\"\n width = 3\n\n node \"sample_node_1\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_1\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }\n\n graph \"sample_graph_1\" {\n title = \"Sample graph 1\"\n width = 5\n\n node \"sample_node_2\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_2\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }\n\n hierarchy \"sample_hierarchy_1\" {\n title = \"Sample hierarchy 1\"\n width = 5\n\n node \"sample_node_3\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_3\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }\n\n table \"sample_table_1\" {\n sql = \"select 1 as table\"\n width = 4\n title = \"Sample table 1\"\n }\n\n input \"sample_input_1\" {\n sql = \"select 1 as input\"\n width = 2\n title = \"Sample input 1\"\n }\n }\n}",
|
||||
"start_line_number": 43,
|
||||
"tags": null,
|
||||
"title": "Sample dashboard 1",
|
||||
"url_path": "/introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"width": null
|
||||
}
|
||||
],
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"resource_name": "sample_dashboard_1",
|
||||
"source_definition": "dashboard \"sample_dashboard_1\" {\n title = \"Sample dashboard 1\"\n description = \"Sample dashboard to test introspection functionality\"\n\n container \"sample_conatiner_1\" {\n\t\tcard \"sample_card_1\" {\n\t\t\ttitle = \"Sample card 1\"\n\t\t}\n\n\t\timage \"sample_image_1\" {\n\t\t\ttitle = \"Sample image 1\"\n\t\t\twidth = 3\n \t\tsrc = \"https://steampipe.io/images/logo.png\"\n \t\talt = \"steampipe\"\n\t\t}\n\n\t\ttext \"sample_text_1\" {\n\t\t\ttitle = \"Sample text 1\"\n\t\t}\n\n chart \"sample_chart_1\" {\n sql = \"select 1 as chart\"\n width = 5\n title = \"Sample chart 1\"\n }\n\n flow \"sample_flow_1\" {\n title = \"Sample flow 1\"\n width = 3\n\n node \"sample_node_1\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_1\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }\n\n graph \"sample_graph_1\" {\n title = \"Sample graph 1\"\n width = 5\n\n node \"sample_node_2\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_2\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }\n\n hierarchy \"sample_hierarchy_1\" {\n title = \"Sample hierarchy 1\"\n width = 5\n\n node \"sample_node_3\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_3\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }\n\n table \"sample_table_1\" {\n sql = \"select 1 as table\"\n width = 4\n title = \"Sample table 1\"\n }\n\n input \"sample_input_1\" {\n sql = \"select 1 as input\"\n width = 2\n title = \"Sample input 1\"\n }\n }\n}",
|
||||
"start_line_number": 43,
|
||||
"tags": null,
|
||||
"title": "Sample dashboard 1",
|
||||
"url_path": "/introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"width": null
|
||||
"metadata": {
|
||||
"Duration": 292708,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,33 +1,43 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 50,
|
||||
"icon": null,
|
||||
"is_anonymous": false,
|
||||
"label": null,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 50,
|
||||
"icon": null,
|
||||
"is_anonymous": false,
|
||||
"label": null,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.card.sample_card_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_card_1",
|
||||
"source_definition": "\t\tcard \"sample_card_1\" {\n\t\t\ttitle = \"Sample card 1\"\n\t\t}",
|
||||
"sql": null,
|
||||
"start_line_number": 48,
|
||||
"tags": null,
|
||||
"title": "Sample card 1",
|
||||
"type": null,
|
||||
"value": null,
|
||||
"width": null
|
||||
}
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.card.sample_card_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_card_1",
|
||||
"source_definition": "\t\tcard \"sample_card_1\" {\n\t\t\ttitle = \"Sample card 1\"\n\t\t}",
|
||||
"sql": null,
|
||||
"start_line_number": 48,
|
||||
"tags": null,
|
||||
"title": "Sample card 1",
|
||||
"type": null,
|
||||
"value": null,
|
||||
"width": null
|
||||
"metadata": {
|
||||
"Duration": 263667,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,33 +1,43 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"axes": null,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 67,
|
||||
"is_anonymous": false,
|
||||
"legend": null,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"axes": null,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 67,
|
||||
"is_anonymous": false,
|
||||
"legend": null,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.chart.sample_chart_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_chart_1",
|
||||
"series": null,
|
||||
"source_definition": " chart \"sample_chart_1\" {\n sql = \"select 1 as chart\"\n width = 5\n title = \"Sample chart 1\"\n }",
|
||||
"sql": "select 1 as chart",
|
||||
"start_line_number": 63,
|
||||
"tags": null,
|
||||
"title": "Sample chart 1",
|
||||
"type": null,
|
||||
"width": "5"
|
||||
}
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.chart.sample_chart_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_chart_1",
|
||||
"series": null,
|
||||
"source_definition": " chart \"sample_chart_1\" {\n sql = \"select 1 as chart\"\n width = 5\n title = \"Sample chart 1\"\n }",
|
||||
"sql": "select 1 as chart",
|
||||
"start_line_number": 63,
|
||||
"tags": null,
|
||||
"title": "Sample chart 1",
|
||||
"type": null,
|
||||
"width": "5"
|
||||
"metadata": {
|
||||
"Duration": 284709,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,33 +1,43 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"edges": [
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"name": "sample_edge_1"
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"edges": [
|
||||
{
|
||||
"name": "sample_edge_1"
|
||||
}
|
||||
],
|
||||
"end_line_number": 83,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "sample_node_1"
|
||||
}
|
||||
],
|
||||
"params": null,
|
||||
"path": null,
|
||||
"qualified_name": "introspection_table_mod.flow.sample_flow_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_flow_1",
|
||||
"source_definition": " flow \"sample_flow_1\" {\n title = \"Sample flow 1\"\n width = 3\n\n node \"sample_node_1\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_1\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }",
|
||||
"sql": null,
|
||||
"start_line_number": 69,
|
||||
"tags": null,
|
||||
"title": "Sample flow 1",
|
||||
"type": null,
|
||||
"width": "3"
|
||||
}
|
||||
],
|
||||
"end_line_number": 83,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "sample_node_1"
|
||||
}
|
||||
],
|
||||
"params": null,
|
||||
"path": null,
|
||||
"qualified_name": "introspection_table_mod.flow.sample_flow_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_flow_1",
|
||||
"source_definition": " flow \"sample_flow_1\" {\n title = \"Sample flow 1\"\n width = 3\n\n node \"sample_node_1\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_1\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }",
|
||||
"sql": null,
|
||||
"start_line_number": 69,
|
||||
"tags": null,
|
||||
"title": "Sample flow 1",
|
||||
"type": null,
|
||||
"width": "3"
|
||||
"metadata": {
|
||||
"Duration": 278667,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,34 +1,44 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"direction": null,
|
||||
"documentation": null,
|
||||
"edges": [
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"name": "sample_edge_2"
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"direction": null,
|
||||
"documentation": null,
|
||||
"edges": [
|
||||
{
|
||||
"name": "sample_edge_2"
|
||||
}
|
||||
],
|
||||
"end_line_number": 99,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "sample_node_2"
|
||||
}
|
||||
],
|
||||
"params": null,
|
||||
"path": null,
|
||||
"qualified_name": "introspection_table_mod.graph.sample_graph_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_graph_1",
|
||||
"source_definition": " graph \"sample_graph_1\" {\n title = \"Sample graph 1\"\n width = 5\n\n node \"sample_node_2\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_2\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }",
|
||||
"sql": null,
|
||||
"start_line_number": 85,
|
||||
"tags": null,
|
||||
"title": "Sample graph 1",
|
||||
"type": null,
|
||||
"width": "5"
|
||||
}
|
||||
],
|
||||
"end_line_number": 99,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "sample_node_2"
|
||||
}
|
||||
],
|
||||
"params": null,
|
||||
"path": null,
|
||||
"qualified_name": "introspection_table_mod.graph.sample_graph_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_graph_1",
|
||||
"source_definition": " graph \"sample_graph_1\" {\n title = \"Sample graph 1\"\n width = 5\n\n node \"sample_node_2\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_2\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }",
|
||||
"sql": null,
|
||||
"start_line_number": 85,
|
||||
"tags": null,
|
||||
"title": "Sample graph 1",
|
||||
"type": null,
|
||||
"width": "5"
|
||||
"metadata": {
|
||||
"Duration": 265750,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,33 +1,43 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"edges": [
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"name": "sample_edge_3"
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"edges": [
|
||||
{
|
||||
"name": "sample_edge_3"
|
||||
}
|
||||
],
|
||||
"end_line_number": 115,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "sample_node_3"
|
||||
}
|
||||
],
|
||||
"params": null,
|
||||
"path": null,
|
||||
"qualified_name": "introspection_table_mod.hierarchy.sample_hierarchy_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_hierarchy_1",
|
||||
"source_definition": " hierarchy \"sample_hierarchy_1\" {\n title = \"Sample hierarchy 1\"\n width = 5\n\n node \"sample_node_3\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_3\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }",
|
||||
"sql": null,
|
||||
"start_line_number": 101,
|
||||
"tags": null,
|
||||
"title": "Sample hierarchy 1",
|
||||
"type": null,
|
||||
"width": "5"
|
||||
}
|
||||
],
|
||||
"end_line_number": 115,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"nodes": [
|
||||
{
|
||||
"name": "sample_node_3"
|
||||
}
|
||||
],
|
||||
"params": null,
|
||||
"path": null,
|
||||
"qualified_name": "introspection_table_mod.hierarchy.sample_hierarchy_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_hierarchy_1",
|
||||
"source_definition": " hierarchy \"sample_hierarchy_1\" {\n title = \"Sample hierarchy 1\"\n width = 5\n\n node \"sample_node_3\" {\n sql = <<-EOQ\n select 1 as node\n EOQ\n }\n edge \"sample_edge_3\" {\n sql = <<-EOQ\n select 1 as edge\n EOQ\n }\n }",
|
||||
"sql": null,
|
||||
"start_line_number": 101,
|
||||
"tags": null,
|
||||
"title": "Sample hierarchy 1",
|
||||
"type": null,
|
||||
"width": "5"
|
||||
"metadata": {
|
||||
"Duration": 278250,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,31 +1,41 @@
|
||||
[
|
||||
{
|
||||
"alt": "steampipe",
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 57,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"alt": "steampipe",
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 57,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.image.sample_image_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_image_1",
|
||||
"source_definition": "\t\timage \"sample_image_1\" {\n\t\t\ttitle = \"Sample image 1\"\n\t\t\twidth = 3\n \t\tsrc = \"https://steampipe.io/images/logo.png\"\n \t\talt = \"steampipe\"\n\t\t}",
|
||||
"sql": null,
|
||||
"src": "https://steampipe.io/images/logo.png",
|
||||
"start_line_number": 52,
|
||||
"tags": null,
|
||||
"title": "Sample image 1",
|
||||
"width": "3"
|
||||
}
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.image.sample_image_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_image_1",
|
||||
"source_definition": "\t\timage \"sample_image_1\" {\n\t\t\ttitle = \"Sample image 1\"\n\t\t\twidth = 3\n \t\tsrc = \"https://steampipe.io/images/logo.png\"\n \t\talt = \"steampipe\"\n\t\t}",
|
||||
"sql": null,
|
||||
"src": "https://steampipe.io/images/logo.png",
|
||||
"start_line_number": 52,
|
||||
"tags": null,
|
||||
"title": "Sample image 1",
|
||||
"width": "3"
|
||||
"metadata": {
|
||||
"Duration": 246792,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,33 +1,43 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"dashboard": "introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 127,
|
||||
"is_anonymous": false,
|
||||
"label": null,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"dashboard": "introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 127,
|
||||
"is_anonymous": false,
|
||||
"label": null,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
],
|
||||
"placeholder": null,
|
||||
"qualified_name": "introspection_table_mod.input.sample_input_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_input_1",
|
||||
"source_definition": " input \"sample_input_1\" {\n sql = \"select 1 as input\"\n width = 2\n title = \"Sample input 1\"\n }",
|
||||
"sql": "select 1 as input",
|
||||
"start_line_number": 123,
|
||||
"tags": null,
|
||||
"title": "Sample input 1",
|
||||
"type": null,
|
||||
"width": "2"
|
||||
}
|
||||
],
|
||||
"placeholder": null,
|
||||
"qualified_name": "introspection_table_mod.input.sample_input_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_input_1",
|
||||
"source_definition": " input \"sample_input_1\" {\n sql = \"select 1 as input\"\n width = 2\n title = \"Sample input 1\"\n }",
|
||||
"sql": "select 1 as input",
|
||||
"start_line_number": 123,
|
||||
"tags": null,
|
||||
"title": "Sample input 1",
|
||||
"type": null,
|
||||
"width": "2"
|
||||
"metadata": {
|
||||
"Duration": 253667,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,31 +1,41 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"columns": null,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 121,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"columns": null,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 121,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": null,
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.table.sample_table_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_table_1",
|
||||
"source_definition": " table \"sample_table_1\" {\n sql = \"select 1 as table\"\n width = 4\n title = \"Sample table 1\"\n }",
|
||||
"sql": "select 1 as table",
|
||||
"start_line_number": 117,
|
||||
"tags": null,
|
||||
"title": "Sample table 1",
|
||||
"type": null,
|
||||
"width": "4"
|
||||
}
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.table.sample_table_1",
|
||||
"query": null,
|
||||
"resource_name": "sample_table_1",
|
||||
"source_definition": " table \"sample_table_1\" {\n sql = \"select 1 as table\"\n width = 4\n title = \"Sample table 1\"\n }",
|
||||
"sql": "select 1 as table",
|
||||
"start_line_number": 117,
|
||||
"tags": null,
|
||||
"title": "Sample table 1",
|
||||
"type": null,
|
||||
"width": "4"
|
||||
"metadata": {
|
||||
"Duration": 247458,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,27 +1,37 @@
|
||||
[
|
||||
{
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 61,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"auto_generated": false,
|
||||
"description": null,
|
||||
"documentation": null,
|
||||
"end_line_number": 61,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.dashboard.sample_dashboard_1",
|
||||
"introspection_table_mod.container.sample_conatiner_1",
|
||||
"introspection_table_mod.text.sample_text_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.text.sample_text_1",
|
||||
"resource_name": "sample_text_1",
|
||||
"source_definition": "\t\ttext \"sample_text_1\" {\n\t\t\ttitle = \"Sample text 1\"\n\t\t}",
|
||||
"start_line_number": 59,
|
||||
"tags": null,
|
||||
"title": "Sample text 1",
|
||||
"type": null,
|
||||
"value": null,
|
||||
"width": null
|
||||
}
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.text.sample_text_1",
|
||||
"resource_name": "sample_text_1",
|
||||
"source_definition": "\t\ttext \"sample_text_1\" {\n\t\t\ttitle = \"Sample text 1\"\n\t\t}",
|
||||
"start_line_number": 59,
|
||||
"tags": null,
|
||||
"title": "Sample text 1",
|
||||
"type": null,
|
||||
"value": null,
|
||||
"width": null
|
||||
"metadata": {
|
||||
"Duration": 286625,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,41 +1,51 @@
|
||||
[
|
||||
{
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": "query 1 - 3 params all with defaults",
|
||||
"documentation": null,
|
||||
"end_line_number": 23,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": [
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"default": "steampipe_var",
|
||||
"description": "p1",
|
||||
"name": "p1"
|
||||
},
|
||||
{
|
||||
"default": "because_def ",
|
||||
"description": "p2",
|
||||
"name": "p2"
|
||||
},
|
||||
{
|
||||
"default": "string",
|
||||
"description": "p3",
|
||||
"name": "p3"
|
||||
"args": null,
|
||||
"auto_generated": false,
|
||||
"description": "query 1 - 3 params all with defaults",
|
||||
"documentation": null,
|
||||
"end_line_number": 23,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"params": [
|
||||
{
|
||||
"default": "steampipe_var",
|
||||
"description": "p1",
|
||||
"name": "p1"
|
||||
},
|
||||
{
|
||||
"default": "because_def ",
|
||||
"description": "p2",
|
||||
"name": "p2"
|
||||
},
|
||||
{
|
||||
"default": "string",
|
||||
"description": "p3",
|
||||
"name": "p3"
|
||||
}
|
||||
],
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.query.sample_query_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.query.sample_query_1",
|
||||
"resource_name": "sample_query_1",
|
||||
"source_definition": "query \"sample_query_1\"{\n\ttitle =\"Sample query 1\"\n\tdescription = \"query 1 - 3 params all with defaults\"\n\tsql = \"select 'ok' as status, 'steampipe' as resource, concat($1::text, $2::text, $3::text) as reason\"\n\tparam \"p1\"{\n\t\t\tdescription = \"p1\"\n\t\t\tdefault = var.sample_var_1\n\t}\n\tparam \"p2\"{\n\t\t\tdescription = \"p2\"\n\t\t\tdefault = \"because_def \"\n\t}\n\tparam \"p3\"{\n\t\t\tdescription = \"p3\"\n\t\t\tdefault = \"string\"\n\t}\n}",
|
||||
"sql": "select 'ok' as status, 'steampipe' as resource, concat($1::text, $2::text, $3::text) as reason",
|
||||
"start_line_number": 7,
|
||||
"tags": null,
|
||||
"title": "Sample query 1"
|
||||
}
|
||||
],
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.query.sample_query_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.query.sample_query_1",
|
||||
"resource_name": "sample_query_1",
|
||||
"source_definition": "query \"sample_query_1\"{\n\ttitle =\"Sample query 1\"\n\tdescription = \"query 1 - 3 params all with defaults\"\n\tsql = \"select 'ok' as status, 'steampipe' as resource, concat($1::text, $2::text, $3::text) as reason\"\n\tparam \"p1\"{\n\t\t\tdescription = \"p1\"\n\t\t\tdefault = var.sample_var_1\n\t}\n\tparam \"p2\"{\n\t\t\tdescription = \"p2\"\n\t\t\tdefault = \"because_def \"\n\t}\n\tparam \"p3\"{\n\t\t\tdescription = \"p3\"\n\t\t\tdefault = \"string\"\n\t}\n}",
|
||||
"sql": "select 'ok' as status, 'steampipe' as resource, concat($1::text, $2::text, $3::text) as reason",
|
||||
"start_line_number": 7,
|
||||
"tags": null,
|
||||
"title": "Sample query 1"
|
||||
"metadata": {
|
||||
"Duration": 311375,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,36 +1,46 @@
|
||||
[
|
||||
{
|
||||
"auto_generated": false,
|
||||
"default_value": "steampipe_var",
|
||||
"description": "",
|
||||
"documentation": null,
|
||||
"end_line_number": 4,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.var.sample_var_1"
|
||||
],
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.var.sample_var_1"
|
||||
],
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.var.sample_var_1"
|
||||
]
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"auto_generated": false,
|
||||
"default_value": "steampipe_var",
|
||||
"description": "",
|
||||
"documentation": null,
|
||||
"end_line_number": 4,
|
||||
"is_anonymous": false,
|
||||
"mod_name": "introspection_table_mod",
|
||||
"path": [
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.var.sample_var_1"
|
||||
],
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.var.sample_var_1"
|
||||
],
|
||||
[
|
||||
"mod.introspection_table_mod",
|
||||
"introspection_table_mod.var.sample_var_1"
|
||||
]
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.var.sample_var_1",
|
||||
"resource_name": "sample_var_1",
|
||||
"source_definition": "variable \"sample_var_1\"{\n\ttype = string\n\tdefault = \"steampipe_var\"\n}",
|
||||
"start_line_number": 1,
|
||||
"tags": null,
|
||||
"title": null,
|
||||
"value": "steampipe_var",
|
||||
"value_source": "config",
|
||||
"value_source_end_line_number": 4,
|
||||
"value_source_start_line_number": 1,
|
||||
"var_type": "string"
|
||||
}
|
||||
],
|
||||
"qualified_name": "introspection_table_mod.var.sample_var_1",
|
||||
"resource_name": "sample_var_1",
|
||||
"source_definition": "variable \"sample_var_1\"{\n\ttype = string\n\tdefault = \"steampipe_var\"\n}",
|
||||
"start_line_number": 1,
|
||||
"tags": null,
|
||||
"title": null,
|
||||
"value": "steampipe_var",
|
||||
"value_source": "config",
|
||||
"value_source_end_line_number": 4,
|
||||
"value_source_start_line_number": 1,
|
||||
"var_type": "string"
|
||||
"metadata": {
|
||||
"Duration": 249000,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,16 +1,47 @@
|
||||
[
|
||||
{
|
||||
"id": 0,
|
||||
"json_column": {
|
||||
"Id": 0,
|
||||
"Name": "stringValuesomething-0",
|
||||
"Statement": {
|
||||
"Action": "iam:GetContextKeysForCustomPolicy",
|
||||
"Effect": "Allow"
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"id": 0,
|
||||
"json_column": {
|
||||
"Id": 0,
|
||||
"Name": "stringValuesomething-0",
|
||||
"Statement": {
|
||||
"Action": "iam:GetContextKeysForCustomPolicy",
|
||||
"Effect": "Allow"
|
||||
}
|
||||
},
|
||||
"string_column": "stringValuesomething-0"
|
||||
}
|
||||
},
|
||||
"string_column": "stringValuesomething-0"
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 51849333,
|
||||
"scans": [
|
||||
{
|
||||
"connection": "chaos",
|
||||
"table": "chaos_all_column_types",
|
||||
"cache_hit": false,
|
||||
"rows_fetched": 100,
|
||||
"hydrate_calls": 100,
|
||||
"start_time": "2024-04-11T11:53:34+05:30",
|
||||
"duration": 13,
|
||||
"columns": [
|
||||
"id",
|
||||
"string_column",
|
||||
"json_column"
|
||||
],
|
||||
"limit": null,
|
||||
"quals": [
|
||||
{
|
||||
"column": "id",
|
||||
"operator": "=",
|
||||
"value": 0
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 100,
|
||||
"hydrate_calls": 100
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -1,3 +1,11 @@
|
||||
[]
|
||||
|
||||
|
||||
{
|
||||
"rows": [],
|
||||
"metadata": {
|
||||
"Duration": 377083,
|
||||
"scans": [],
|
||||
"rows_returned": 0,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
[
|
||||
{
|
||||
"col": 2,
|
||||
"val": 1
|
||||
{
|
||||
"rows": [
|
||||
{
|
||||
"col": 2,
|
||||
"val": 1
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"Duration": 149041,
|
||||
"scans": [],
|
||||
"rows_returned": 1,
|
||||
"rows_fetched": 0,
|
||||
"hydrate_calls": 0
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -55,9 +55,13 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe service stop
|
||||
|
||||
# verify that the json contents of output1 and output2 files are the same
|
||||
run jd output1.json output2.json
|
||||
run jd -f patch output1.json output2.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f output1.json
|
||||
rm -f output2.json
|
||||
@@ -84,9 +88,9 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
# stop the service
|
||||
steampipe service stop
|
||||
|
||||
unique1=$(cat out1.json | jq '.[].unique_col')
|
||||
unique2=$(cat out2.json | jq '.[].unique_col')
|
||||
unique3=$(cat out3.json | jq '.[].unique_col')
|
||||
unique1=$(cat out1.json | jq '.rows[0].unique_col')
|
||||
unique2=$(cat out2.json | jq '.rows[0].unique_col')
|
||||
unique3=$(cat out3.json | jq '.rows[0].unique_col')
|
||||
# remove the output and the config files
|
||||
rm -f out*.json
|
||||
rm -f $STEAMPIPE_INSTALL_DIR/config/chaos_no_options.spc
|
||||
@@ -160,9 +164,9 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
# stop the service
|
||||
steampipe service stop
|
||||
|
||||
unique1=$(cat out1.json | jq '.[].unique_col')
|
||||
unique2=$(cat out2.json | jq '.[].unique_col')
|
||||
unique3=$(cat out3.json | jq '.[].unique_col')
|
||||
unique1=$(cat out1.json | jq '.rows[0].unique_col')
|
||||
unique2=$(cat out2.json | jq '.rows[0].unique_col')
|
||||
unique3=$(cat out3.json | jq '.rows[0].unique_col')
|
||||
|
||||
cat $STEAMPIPE_INSTALL_DIR/config/default.spc
|
||||
cat $STEAMPIPE_INSTALL_DIR/config/chaos_no_options.spc
|
||||
@@ -250,9 +254,9 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
# stop the service
|
||||
steampipe service stop
|
||||
|
||||
unique1=$(cat out1.json | jq '.[].unique_col')
|
||||
unique2=$(cat out2.json | jq '.[].unique_col')
|
||||
unique3=$(cat out3.json | jq '.[].unique_col')
|
||||
unique1=$(cat out1.json | jq '.rows[0].unique_col')
|
||||
unique2=$(cat out2.json | jq '.rows[0].unique_col')
|
||||
unique3=$(cat out3.json | jq '.rows[0].unique_col')
|
||||
|
||||
# remove the output and the config files
|
||||
rm -f out*.json
|
||||
|
||||
@@ -4,24 +4,65 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
@test "select from chaos.chaos_high_row_count order by column_0" {
|
||||
run steampipe query --output json "select column_0,column_1,column_2,column_3,column_4,column_5,column_6,column_7,column_8,column_9,id from chaos.chaos_high_row_count order by column_0 limit 10"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
run jd $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_1 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_1.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
assert_success
|
||||
}
|
||||
|
||||
@test "select id, string_column, json_column, boolean_column from chaos.chaos_all_column_types where id='0'" {
|
||||
run steampipe query --output json "select id, string_column, json_column, boolean_column from chaos.chaos_all_column_types where id='0'"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_2.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_2 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_2.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "select from chaos.chaos_high_column_count order by column_0" {
|
||||
run steampipe query --output json "select * from chaos.chaos_high_column_count order by column_0 limit 10"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_3.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_3 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_3.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "select from chaos.chaos_hydrate_columns_dependency where id='0'" {
|
||||
run steampipe query --output json "select hydrate_column_1,hydrate_column_2,hydrate_column_3,hydrate_column_4,hydrate_column_5,id from chaos.chaos_hydrate_columns_dependency where id='0'"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_5.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_5 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_5.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "select from chaos.chaos_list_error" {
|
||||
@@ -47,17 +88,50 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
|
||||
@test "select from chaos.chaos_parallel_hydrate_columns where id='0'" {
|
||||
run steampipe query --output json "select column_1,column_10,column_11,column_12,column_13,column_14,column_15,column_16,column_17,column_18,column_19,column_2,column_20,column_3,column_4,column_5,column_6,column_7,column_8,column_9,id from chaos.chaos_parallel_hydrate_columns where id='0'"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_11.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_11 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_11.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "select float32_data, id, int64_data, uint16_data from chaos.chaos_all_numeric_column where id='31'" {
|
||||
run steampipe query --output json "select float32_data, id, int64_data, uint16_data from chaos.chaos_all_numeric_column where id='31'"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_12.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_12 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_12.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "select transform_method_column from chaos_transforms order by id" {
|
||||
run steampipe query --output json "select transform_method_column from chaos_transforms order by id"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_14.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_14 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_14.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "select parent_should_ignore_error from chaos.chaos_list_parent_child" {
|
||||
@@ -67,21 +141,55 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
|
||||
@test "select from_qual_column from chaos_transforms where id=2" {
|
||||
run steampipe query --output json "select from_qual_column from chaos_transforms where id=2"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_13.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_13 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_13.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "public schema insert select all types" {
|
||||
skip
|
||||
steampipe query "drop table if exists all_columns"
|
||||
steampipe query "create table all_columns (nullcolumn CHAR(2), booleancolumn boolean, textcolumn1 CHAR(20), textcolumn2 VARCHAR(20), textcolumn3 text, integercolumn1 smallint, integercolumn2 int, integercolumn3 SERIAL, integercolumn4 bigint, integercolumn5 bigserial, numericColumn numeric(6,4), realColumn real, floatcolumn float, date1 DATE, time1 TIME, timestamp1 TIMESTAMP, timestamp2 TIMESTAMPTZ, interval1 INTERVAL, array1 text[], jsondata jsonb, jsondata2 json, uuidcolumn UUID, ipAddress inet, macAddress macaddr, cidrRange cidr, xmlData xml, currency money)"
|
||||
steampipe query "INSERT INTO all_columns (nullcolumn, booleancolumn, textcolumn1, textcolumn2, textcolumn3, integercolumn1, integercolumn2, integercolumn3, integercolumn4, integercolumn5, numericColumn, realColumn, floatcolumn, date1, time1, timestamp1, timestamp2, interval1, array1, jsondata, jsondata2, uuidcolumn, ipAddress, macAddress, cidrRange, xmlData, currency) VALUES (NULL, TRUE, 'Yes', 'test for varchar', 'This is a very long text for the PostgreSQL text column', 3278, 21445454, 2147483645, 92233720368547758, 922337203685477580, 23.5141543, 4660.33777, 4.6816421254887534, '1978-02-05', '08:00:00', '2016-06-22 19:10:25-07', '2016-06-22 19:10:25-07', '1 year 2 months 3 days', '{\"(408)-589-5841\"}','{ \"customer\": \"John Doe\", \"items\": {\"product\": \"Beer\",\"qty\": 6}}', '{ \"customer\": \"John Doe\", \"items\": {\"product\": \"Beer\",\"qty\": 6}}', '6948DF80-14BD-4E04-8842-7668D9C001F5', '192.168.0.0', '08:00:2b:01:02:03', '10.1.2.3/32', '<book><title>Manual</title><chapter>...</chapter></book>', 922337203685477.57)"
|
||||
run steampipe query "select * from all_columns" --output json
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_6.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_6 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_6.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
run steampipe query "drop table all_columns"
|
||||
}
|
||||
|
||||
@test "query json" {
|
||||
run steampipe query "select 1 as val, 2 as col" --output json
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_query_json.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_query_json files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_query_json.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "query csv" {
|
||||
@@ -155,7 +263,18 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
|
||||
@test "json" {
|
||||
run steampipe query --output json "select id, string_column, json_column from chaos.chaos_all_column_types where id='0'"
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_json.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_json files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_json.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
@test "line" {
|
||||
@@ -203,29 +322,38 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
|
||||
@test "verify empty json result is empty list and not null" {
|
||||
run steampipe query "select * from steampipe_connection where plugin = 'random'" --output json
|
||||
assert_output "$output" "$(cat $TEST_DATA_DIR/expected_query_empty_json.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_query_empty_json files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_query_empty_json.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
#@test "sql glob" {
|
||||
# cd $FILE_PATH/test_files
|
||||
# run steampipe query *.sql
|
||||
# assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_sql_glob.txt)"
|
||||
#}
|
||||
|
||||
#@test "sql glob csv no header" {
|
||||
# cd $FILE_PATH/test_files
|
||||
# run steampipe query *.sql --header=false --output csv
|
||||
# assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_sql_glob_csv_no_header.txt)"
|
||||
#}
|
||||
|
||||
|
||||
@test "migrate legacy lock file" {
|
||||
cd $FILE_PATH/test_data/mods/dependent_mod_with_legacy_lock
|
||||
steampipe mod install
|
||||
# run steampipe query twice - the bug we are testing for caused the workspace lock to be deleted after the first query
|
||||
steampipe query "select 1 as a" --output json
|
||||
run steampipe query "select 1 as a" --output json
|
||||
assert_equal "$output" "$(cat $TEST_DATA_DIR/expected_15.json)"
|
||||
echo $output > $TEST_DATA_DIR/actual_1.json
|
||||
|
||||
# verify that the json contents of actual_1 and expected_15 files are the same
|
||||
run jd -f patch $TEST_DATA_DIR/actual_1.json $TEST_DATA_DIR/expected_15.json
|
||||
echo $output
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
|
||||
rm -f $TEST_DATA_DIR/actual_1.json
|
||||
}
|
||||
|
||||
function teardown_file() {
|
||||
|
||||
@@ -11,7 +11,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
echo $output
|
||||
|
||||
# fetch the value of account_alias to compare
|
||||
op=$(echo $output | jq '.[0].account_aliases[0]')
|
||||
op=$(echo $output | jq '.rows[0].account_aliases[0]')
|
||||
echo $op
|
||||
|
||||
# check if values match
|
||||
@@ -24,7 +24,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
echo $output
|
||||
|
||||
# fetch the value of account_alias to compare
|
||||
op=$(echo $output | jq '.[0].account_aliases[0]')
|
||||
op=$(echo $output | jq '.rows[0].account_aliases[0]')
|
||||
echo $op
|
||||
|
||||
# check if values match
|
||||
@@ -37,7 +37,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
echo $output
|
||||
|
||||
# fetch the value of account_alias to compare
|
||||
op=$(echo $output | jq '.[0].account_aliases[0]')
|
||||
op=$(echo $output | jq '.rows[0].account_aliases[0]')
|
||||
echo $op
|
||||
|
||||
# check if values match
|
||||
|
||||
@@ -4,6 +4,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
## workspace tests
|
||||
|
||||
@test "generic config precedence test" {
|
||||
skip 'disabled for now'
|
||||
cp $FILE_PATH/test_data/source_files/config_tests/default.spc $STEAMPIPE_INSTALL_DIR/config/default.spc
|
||||
|
||||
# setup test folder and read the test-cases file
|
||||
|
||||
@@ -15,16 +15,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_query" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 8th line, since it contains file location which would differ in github runners
|
||||
# removing the 9th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "8d" output.json
|
||||
run sed -i ".json" "9d" output.json
|
||||
else
|
||||
run sed -i "8d" output.json
|
||||
run sed -i "9d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_query.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_query.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -34,16 +39,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_control" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 11th line, since it contains file location which would differ in github runners
|
||||
# removing the 12th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "11d" output.json
|
||||
run sed -i ".json" "12d" output.json
|
||||
else
|
||||
run sed -i "11d" output.json
|
||||
run sed -i "12d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_control.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_control.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -55,16 +65,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 8th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "8d" output.json
|
||||
run sed -i ".json" "33d" output.json
|
||||
run sed -i ".json" "9d" output.json
|
||||
run sed -i ".json" "34d" output.json
|
||||
else
|
||||
run sed -i "8d" output.json
|
||||
run sed -i "33d" output.json
|
||||
run sed -i "9d" output.json
|
||||
run sed -i "34d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_variable.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_variable.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -74,16 +89,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_benchmark" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 10th line, since it contains file location which would differ in github runners
|
||||
# removing the 11th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "10d" output.json
|
||||
run sed -i ".json" "11d" output.json
|
||||
else
|
||||
run sed -i "10d" output.json
|
||||
run sed -i "11d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_benchmark.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_benchmark.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -93,16 +113,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 11th line, since it contains file location which would differ in github runners
|
||||
# removing the 12th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "11d" output.json
|
||||
run sed -i ".json" "12d" output.json
|
||||
else
|
||||
run sed -i "11d" output.json
|
||||
run sed -i "12d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -114,14 +139,19 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 8th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "8d" output.json
|
||||
run sed -i ".json" "9d" output.json
|
||||
else
|
||||
run sed -i "8d" output.json
|
||||
run sed -i "9d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_card.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_card.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -131,16 +161,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_image" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 9th line, since it contains file location which would differ in github runners
|
||||
# removing the 10th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "9d" output.json
|
||||
run sed -i ".json" "10d" output.json
|
||||
else
|
||||
run sed -i "9d" output.json
|
||||
run sed -i "10d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_image.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_image.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -150,16 +185,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_text" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 7th line, since it contains file location which would differ in github runners
|
||||
# removing the 8th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "7d" output.json
|
||||
run sed -i ".json" "8d" output.json
|
||||
else
|
||||
run sed -i "7d" output.json
|
||||
run sed -i "8d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_text.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_text.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -169,16 +209,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_chart" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 9th line, since it contains file location which would differ in github runners
|
||||
# removing the 10th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "9d" output.json
|
||||
run sed -i ".json" "10d" output.json
|
||||
else
|
||||
run sed -i "9d" output.json
|
||||
run sed -i "10d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_chart.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_chart.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -188,16 +233,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_flow" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 13th line, since it contains file location which would differ in github runners
|
||||
# removing the 14th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "13d" output.json
|
||||
run sed -i ".json" "14d" output.json
|
||||
else
|
||||
run sed -i "13d" output.json
|
||||
run sed -i "14d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_flow.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_flow.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -207,16 +257,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_graph" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 14th line, since it contains file location which would differ in github runners
|
||||
# removing the 15th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "14d" output.json
|
||||
run sed -i ".json" "15d" output.json
|
||||
else
|
||||
run sed -i "14d" output.json
|
||||
run sed -i "15d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_graph.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_graph.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -226,16 +281,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_hierarchy" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 13th line, since it contains file location which would differ in github runners
|
||||
# removing the 14th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "13d" output.json
|
||||
run sed -i ".json" "14d" output.json
|
||||
else
|
||||
run sed -i "13d" output.json
|
||||
run sed -i "14d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_hierarchy.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_hierarchy.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -245,16 +305,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_input" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 9th line, since it contains file location which would differ in github runners
|
||||
# removing the 10th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "9d" output.json
|
||||
run sed -i ".json" "10d" output.json
|
||||
else
|
||||
run sed -i "9d" output.json
|
||||
run sed -i "10d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_input.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_input.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -264,16 +329,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_dashboard_table" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 9th line, since it contains file location which would differ in github runners
|
||||
# removing the 10th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "9d" output.json
|
||||
run sed -i ".json" "10d" output.json
|
||||
else
|
||||
run sed -i "9d" output.json
|
||||
run sed -i "10d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_dashboard_table.json" output.json
|
||||
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_dashboard_table.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
@@ -282,7 +352,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query "select * from steampipe_query" --output json
|
||||
|
||||
# extract the first mod_name from the list
|
||||
mod_name=$(echo $output | jq '.[0].mod_name')
|
||||
mod_name=$(echo $output | jq '.rows[0].mod_name')
|
||||
|
||||
# check if mod_name starts with "mod."
|
||||
if [[ "$mod_name" == *"mod."* ]];
|
||||
@@ -299,10 +369,10 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query "select * from steampipe_query" --output json
|
||||
|
||||
# extract the first encountered sql file's file_name from the list
|
||||
sql_file_name=$(echo $output | jq '.[].file_name' | grep ".sql" | head -1)
|
||||
sql_file_name=$(echo $output | jq '.rows[0].file_name' | grep ".sql" | head -1)
|
||||
|
||||
#extract the resource_name of the above extracted file_name
|
||||
resource_name=$(echo $output | jq --arg FILENAME "$sql_file_name" '.[] | select(.file_name=="$FILENAME") | .resource_name')
|
||||
resource_name=$(echo $output | jq --arg FILENAME "$sql_file_name" '.rows[0] | select(.file_name=="$FILENAME") | .resource_name')
|
||||
|
||||
# check if resource_name starts with "query."
|
||||
if [[ "$resource_name" == *"query."* ]];
|
||||
@@ -315,11 +385,12 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
}
|
||||
|
||||
@test "ensure the reference_from column is populated correctly" {
|
||||
skip
|
||||
cd $SIMPLE_MOD_DIR
|
||||
run steampipe query "select * from steampipe_reference" --output json
|
||||
|
||||
# extract the refs and the referenced_by of the variable `sample_var_1`
|
||||
refs=$(echo $output | jq '.[] | select(.reference_to=="var.sample_var_1") | .reference_from')
|
||||
refs=$(echo $output | jq '.rows[0] | select(.reference_to=="var.sample_var_1") | .reference_from')
|
||||
echo $refs
|
||||
|
||||
assert_equal "$refs" '"query.sample_query_1"'
|
||||
@@ -330,7 +401,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query "select * from steampipe_query" --output json
|
||||
|
||||
# extracting only description from the list, which is enough to prove that there is an output
|
||||
description=$(echo $output | jq '.[].description')
|
||||
description=$(echo $output | jq '.rows[0].description')
|
||||
assert_equal "$description" '"query 1 - 3 params all with defaults"'
|
||||
}
|
||||
|
||||
@@ -340,16 +411,21 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
steampipe query "select * from steampipe_control" --output json > output.json
|
||||
|
||||
# checking for OS type, since sed command is different for linux and OSX
|
||||
# removing the 11th line, since it contains file location which would differ in github runners
|
||||
# removing the 12th line, since it contains file location which would differ in github runners
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
run sed -i ".json" "11d" output.json
|
||||
run sed -i ".json" "12d" output.json
|
||||
else
|
||||
run sed -i "11d" output.json
|
||||
run sed -i "12d" output.json
|
||||
fi
|
||||
|
||||
run jd "$TEST_DATA_DIR/expected_introspection_info_control.json" output.json
|
||||
run jd -f patch "$TEST_DATA_DIR/expected_introspection_info_control.json" output.json
|
||||
echo $output
|
||||
assert_success
|
||||
|
||||
diff=$($FILE_PATH/json_patch.sh $output)
|
||||
echo $diff
|
||||
|
||||
# check if there is no diff returned by the script
|
||||
assert_equal "$diff" ""
|
||||
rm -f output.json
|
||||
}
|
||||
|
||||
|
||||
@@ -48,7 +48,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
|
||||
# store the result of the verification statements(0.13.6)
|
||||
for ((i = 0; i < ${#verify_sql[@]}; i++)); do
|
||||
$tmpdir/steampipe --install-dir $tmpdir query "${verify_sql[$i]}" --output json > verify$i.json
|
||||
$tmpdir/steampipe --install-dir $tmpdir query "${verify_sql[$i]}" > verify$i.txt
|
||||
done
|
||||
|
||||
# stop the service
|
||||
@@ -60,7 +60,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
# store the result of the verification statements(0.14.*)
|
||||
for ((i = 0; i < ${#verify_sql[@]}; i++)); do
|
||||
echo "VerifySQL: ${verify_sql[$i]}"
|
||||
steampipe --install-dir $tmpdir query "${verify_sql[$i]}" --output json > verify$i$i.json
|
||||
steampipe --install-dir $tmpdir query "${verify_sql[$i]}" > verify$i$i.txt
|
||||
done
|
||||
|
||||
# stop the service
|
||||
@@ -68,7 +68,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
|
||||
# verify data is migrated correctly
|
||||
for ((i = 0; i < ${#verify_sql[@]}; i++)); do
|
||||
assert_equal "$(cat verify$i.json)" "$(cat verify$i$i.json)"
|
||||
assert_equal "$(cat verify$i.txt)" "$(cat verify$i$i.txt)"
|
||||
done
|
||||
|
||||
rm -rf $tmpdir
|
||||
|
||||
@@ -8,7 +8,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query query.query_params_with_all_defaults --output json
|
||||
|
||||
# store the reason field in `content`
|
||||
content=$(echo $output | jq '.[].reason')
|
||||
content=$(echo $output | jq '.rows[0].reason')
|
||||
|
||||
assert_equal "$content" '"default_parameter_1 default_parameter_2 default_parameter_3"'
|
||||
}
|
||||
@@ -18,7 +18,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query "query.query_params_with_all_defaults(\"command_param_1\")" --output json
|
||||
|
||||
# store the reason field in `content`
|
||||
content=$(echo $output | jq '.[].reason')
|
||||
content=$(echo $output | jq '.rows[0].reason')
|
||||
|
||||
assert_equal "$content" '"command_param_1 default_parameter_2 default_parameter_3"'
|
||||
}
|
||||
@@ -28,7 +28,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query "query.query_params_with_all_defaults(p1 => \"command_param_1\")" --output json
|
||||
|
||||
# store the reason field in `content`
|
||||
content=$(echo $output | jq '.[].reason')
|
||||
content=$(echo $output | jq '.rows[0].reason')
|
||||
|
||||
assert_equal "$content" '"command_param_1 default_parameter_2 default_parameter_3"'
|
||||
}
|
||||
@@ -45,7 +45,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query "query.query_params_with_all_defaults(\"command_param_1\",\"command_param_2\",\"command_param_3\")" --output json
|
||||
|
||||
# store the reason field in `content`
|
||||
content=$(echo $output | jq '.[].reason')
|
||||
content=$(echo $output | jq '.rows[0].reason')
|
||||
|
||||
assert_equal "$content" '"command_param_1 command_param_2 command_param_3"'
|
||||
}
|
||||
@@ -55,7 +55,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
# run steampipe query query.query_array_params_with_default --output json
|
||||
|
||||
# # store the reason field in `content`
|
||||
# content=$(echo $output | jq '.[].reason')
|
||||
# content=$(echo $output | jq '.rows[0].reason')
|
||||
|
||||
# assert_equal "$content" '"default_p1_element_02"'
|
||||
}
|
||||
@@ -74,7 +74,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query query.query_map_params_with_default --output json
|
||||
|
||||
# store the reason field in `content`
|
||||
content=$(echo $output | jq '.[].reason')
|
||||
content=$(echo $output | jq '.rows[0].reason')
|
||||
|
||||
assert_equal "$content" '"default_property_value_01"'
|
||||
}
|
||||
|
||||
@@ -218,7 +218,7 @@ load "$LIB_BATS_SUPPORT/load.bash"
|
||||
run steampipe query "select * from steampipe_internal.steampipe_server_settings" --output=json
|
||||
|
||||
# extract the first mod_name from the list
|
||||
fdw_version=$(echo $output | jq '.[0].fdw_version')
|
||||
fdw_version=$(echo $output | jq '.rows[0].fdw_version')
|
||||
desired_fdw_version=$(cat $STEAMPIPE_INSTALL_DIR/db/versions.json | jq '.fdw_extension.version')
|
||||
|
||||
assert_equal "$fdw_version" "$desired_fdw_version"
|
||||
|
||||
@@ -44,6 +44,7 @@ function setup() {
|
||||
}
|
||||
|
||||
@test "snapshot mode - query output json" {
|
||||
skip
|
||||
cd $FILE_PATH/test_data/mods/functionality_test_mod
|
||||
|
||||
steampipe query query.static_query_2 --snapshot --output json --pipes-token $SPIPETOOLS_TOKEN --snapshot-location turbot-ops/clitesting > output.json
|
||||
|
||||
Reference in New Issue
Block a user