Skip to content

Commit f04e97c

Browse files
It works! Needs polish, though
1 parent 65dd0c5 commit f04e97c

File tree

9 files changed

+398
-36
lines changed

9 files changed

+398
-36
lines changed

README.md

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,13 @@ If you are using VS Code, this is a good read: https://github.com/golang/vscode-
9494
* `Watchpack Error (watcher): Error: ENOSPC: System limit for number of file watchers reached, watch`
9595
* https://stackoverflow.com/a/55543310/5434860
9696

97+
### Example repos
98+
99+
Some random examples of data source plugin source code
100+
101+
* https://github.com/grafana/grafana-infinity-datasource/tree/main/pkg
102+
* https://github.com/cnosdb/grafana-datasource-plugin/blob/main/cnosdb/pkg/plugin/query_model.go
103+
97104
## Dependency Notes
98105

99106
This section contains notes about dependencies.

pkg/plugin/datasource.go

Lines changed: 28 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,12 @@ import (
55
"encoding/json"
66
"errors"
77
"fmt"
8+
"github.com/grafana/grafana-plugin-sdk-go/backend"
89
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
10+
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
911
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
12+
"github.com/wildmountainfarms/wild-graphql-datasource/pkg/plugin/util"
1013
"net/http"
11-
"time"
12-
13-
"github.com/grafana/grafana-plugin-sdk-go/backend"
14-
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
15-
"github.com/grafana/grafana-plugin-sdk-go/data"
1614
)
1715

1816
// Make sure Datasource implements required interfaces. This is important to do
@@ -93,7 +91,8 @@ type queryModel struct {
9391
// The name of the operation, or a blank string to let the GraphQL server infer the operation name
9492
OperationName string `json:"operationName"`
9593
// The variables for the operation. May either be a string or a map[string]interface{}
96-
Variables interface{} `json:"variables"`
94+
Variables interface{} `json:"variables"`
95+
ParsingOptions []util.ParsingOption `json:"parsingOptions"`
9796
}
9897

9998
func statusFromResponse(response http.Response) backend.Status {
@@ -110,7 +109,6 @@ func statusFromResponse(response http.Response) backend.Status {
110109
// In some cases that are never expected to happen, error is returned and the DataResponse is nil.
111110
// In these cases, you can assume that something is seriously wrong, as we didn't intend to recover from that specific situation.
112111
func (d *Datasource) query(ctx context.Context, pCtx backend.PluginContext, query backend.DataQuery) (*backend.DataResponse, error) {
113-
var response backend.DataResponse
114112

115113
log.DefaultLogger.Info(fmt.Sprintf("JSON is: %s", query.JSON))
116114

@@ -146,7 +144,7 @@ func (d *Datasource) query(ctx context.Context, pCtx backend.PluginContext, quer
146144

147145
err := json.Unmarshal([]byte(value), &variables)
148146
if err != nil {
149-
log.DefaultLogger.Error(fmt.Sprintf("Got error while parsing variables! Error: %s", err.Error()))
147+
log.DefaultLogger.Error("Got error while parsing variables! Error", err)
150148
log.DefaultLogger.Info(fmt.Sprintf("Value of variables from parsing error is: %s", value))
151149

152150
// continue executing query without interpolated variables
@@ -161,9 +159,9 @@ func (d *Datasource) query(ctx context.Context, pCtx backend.PluginContext, quer
161159
default:
162160
log.DefaultLogger.Error("Unable to parse variables for ref ID:" + query.RefID)
163161
}
164-
AutoPopulateVariables(query, &variables)
162+
util.AutoPopulateVariables(query, &variables)
165163

166-
graphQLRequest := GraphQLRequest{
164+
graphQLRequest := util.GraphQLRequest{
167165
Query: qm.QueryText,
168166
OperationName: qm.OperationName,
169167
Variables: variables,
@@ -185,7 +183,7 @@ func (d *Datasource) query(ctx context.Context, pCtx backend.PluginContext, quer
185183
}
186184
status := statusFromResponse(*resp)
187185

188-
graphQLResponse, responseParseError := ParseGraphQLResponse(resp.Body)
186+
graphQLResponse, responseParseError := util.ParseGraphQLResponse(resp.Body)
189187
if responseParseError != nil {
190188
return &backend.DataResponse{
191189
Error: err,
@@ -212,27 +210,27 @@ func (d *Datasource) query(ctx context.Context, pCtx backend.PluginContext, quer
212210
}, nil
213211
}
214212

215-
dataBytes, serializeError := json.Marshal(graphQLResponse.Data)
216-
if serializeError != nil {
217-
return nil, serializeError // this should not happen
218-
}
213+
//dataBytes, serializeError := json.Marshal(graphQLResponse.Data)
214+
//if serializeError != nil {
215+
// return nil, serializeError // this should not happen
216+
//}
217+
//log.DefaultLogger.Info(fmt.Sprintf("result is: %s", dataBytes))
219218

220-
log.DefaultLogger.Info("Successful query!")
221-
log.DefaultLogger.Info(fmt.Sprintf("result is: %s", dataBytes))
219+
log.DefaultLogger.Debug("Successful query!")
222220

223-
// create data frame response.
224-
// For an overview on data frames and how grafana handles them:
225-
// https://grafana.com/developers/plugin-tools/introduction/data-frames
226-
frame := data.NewFrame("response")
227-
228-
// add fields.
229-
frame.Fields = append(frame.Fields,
230-
data.NewField("time", nil, []time.Time{query.TimeRange.From, query.TimeRange.To}),
231-
data.NewField("values", nil, []int64{10, 20}),
232-
)
221+
var response backend.DataResponse
233222

234223
// add the frames to the response.
235-
response.Frames = append(response.Frames, frame)
224+
for _, parsingOption := range qm.ParsingOptions {
225+
frame, err := util.ParseData(
226+
graphQLResponse.Data,
227+
parsingOption,
228+
)
229+
if err != nil {
230+
return nil, err
231+
}
232+
response.Frames = append(response.Frames, frame)
233+
}
236234

237235
return &response, nil
238236
}
@@ -244,7 +242,7 @@ func (d *Datasource) query(ctx context.Context, pCtx backend.PluginContext, quer
244242
func (d *Datasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
245243
// test command to do the same thing:
246244
// curl -X POST -H "Content-Type: application/json" -d '{"query":"{\n\t\t __schema{\n\t\t\tqueryType{name}\n\t\t }\n\t\t}"}' https://swapi-graphql.netlify.app/.netlify/functions/index
247-
graphQLRequest := GraphQLRequest{
245+
graphQLRequest := util.GraphQLRequest{
248246
Query: `{
249247
__schema{
250248
queryType{name}
@@ -262,7 +260,7 @@ func (d *Datasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRe
262260
return nil, err
263261
}
264262

265-
graphQLResponse, responseParseError := ParseGraphQLResponse(resp.Body)
263+
graphQLResponse, responseParseError := util.ParseGraphQLResponse(resp.Body)
266264
if responseParseError != nil {
267265
if resp.StatusCode == 200 {
268266
return &backend.CheckHealthResult{

pkg/plugin/graphql.go renamed to pkg/plugin/util/graphql.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
package plugin
1+
package util
22

33
import (
44
"bytes"

pkg/plugin/util/parsing.go

Lines changed: 212 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,212 @@
1+
package util
2+
3+
import (
4+
"errors"
5+
"fmt"
6+
"github.com/grafana/grafana-plugin-sdk-go/data"
7+
"reflect"
8+
"strings"
9+
"time"
10+
)
11+
12+
// the purpose of this file is to parse JSON data with configuration from a ParsingOption
13+
14+
type ParsingOption struct {
15+
// The path from the root to the array. This is dot-delimited
16+
DataPath string `json:"dataPath"`
17+
// the time path relative to the data path.
18+
TimePath string `json:"timePath"`
19+
}
20+
21+
func ParseData(graphQlResponseData map[string]interface{}, parsingOption ParsingOption) (*data.Frame, error) {
22+
if len(parsingOption.DataPath) == 0 {
23+
return nil, errors.New("data path cannot be empty")
24+
}
25+
split := strings.Split(parsingOption.DataPath, ".")
26+
27+
var currentData map[string]interface{} = graphQlResponseData
28+
for _, part := range split[:len(split)-1] {
29+
newData, exists := currentData[part]
30+
if !exists {
31+
return nil, errors.New(fmt.Sprintf("Part of data path: %s does not exist! dataPath: %s", part, parsingOption.DataPath))
32+
}
33+
switch value := newData.(type) {
34+
case map[string]interface{}:
35+
currentData = value
36+
default:
37+
return nil, errors.New(fmt.Sprintf("Part of data path: %s is not a nested object! dataPath: %s", part, parsingOption.DataPath))
38+
}
39+
}
40+
// after this for loop, currentData should be an array if everything is going well
41+
finalData, finalDataExists := currentData[split[len(split)-1]]
42+
if !finalDataExists {
43+
return nil, errors.New(fmt.Sprintf("Final part of data path: %s does not exist! dataPath: %s", split[len(split)-1], parsingOption.DataPath))
44+
}
45+
46+
var dataArray []map[string]interface{}
47+
switch value := finalData.(type) {
48+
case []interface{}:
49+
dataArray = make([]map[string]interface{}, len(value))
50+
for i, element := range value {
51+
switch typedElement := element.(type) {
52+
case map[string]interface{}:
53+
dataArray[i] = typedElement
54+
default:
55+
return nil, errors.New(fmt.Sprintf("One of the elements inside the data array is not an object! element: %d is of type: %v", i, reflect.TypeOf(element)))
56+
}
57+
}
58+
default:
59+
return nil, errors.New(fmt.Sprintf("Final part of data path: is not an array! dataPath: %s type of result: %v", parsingOption.DataPath, reflect.TypeOf(value)))
60+
}
61+
62+
// fieldMap is a map of keys to array of data points. Upon first initialization of a particular key's value,
63+
// an array should be chosen corresponding to the first value of that key.
64+
// Upon subsequent element insertion, if the type of the array does not match that elements type, an error is thrown.
65+
// This error is never expected to occur because a correct GraphQL response should never have a particular field be of different types
66+
fieldMap := map[string]interface{}{}
67+
68+
for _, dataElement := range dataArray {
69+
flatData := map[string]interface{}{}
70+
flattenData(dataElement, "", flatData)
71+
for key, value := range flatData {
72+
existingFieldValues, fieldValuesExist := fieldMap[key]
73+
74+
if key == parsingOption.TimePath {
75+
var timeValue time.Time
76+
switch valueValue := value.(type) {
77+
case string:
78+
// TODO allow user to customize time format
79+
parsedTime, err := time.Parse(time.RFC3339, valueValue)
80+
if err != nil {
81+
return nil, errors.New(fmt.Sprintf("Time could not be parsed! Time: %s", valueValue))
82+
}
83+
timeValue = parsedTime
84+
case float64:
85+
timeValue = time.UnixMilli(int64(valueValue))
86+
case bool:
87+
return nil, errors.New("time field is a bool")
88+
default:
89+
// This case should never happen because we never expect other types to pop up here
90+
return nil, errors.New(fmt.Sprintf("Unsupported time type! Time: %s type: %v", valueValue, reflect.TypeOf(valueValue)))
91+
}
92+
var fieldValues []time.Time
93+
if fieldValuesExist {
94+
switch typedExistingFieldValues := existingFieldValues.(type) {
95+
case []time.Time:
96+
fieldValues = typedExistingFieldValues
97+
default:
98+
return nil, errors.New(fmt.Sprintf("This error should never occur. The existing array for time field values is of the type: %v", reflect.TypeOf(existingFieldValues)))
99+
}
100+
} else {
101+
fieldValues = []time.Time{}
102+
}
103+
fieldValues = append(fieldValues, timeValue)
104+
fieldMap[key] = fieldValues
105+
} else {
106+
if fieldValuesExist {
107+
switch typedExistingFieldValues := existingFieldValues.(type) {
108+
case []float64:
109+
switch typedValue := value.(type) {
110+
case float64:
111+
fieldMap[key] = append(typedExistingFieldValues, typedValue)
112+
default:
113+
return nil, errors.New(fmt.Sprintf("Existing field values for key: %s is float64, but got value with type: %v", key, reflect.TypeOf(value)))
114+
}
115+
case []string:
116+
switch typedValue := value.(type) {
117+
case string:
118+
fieldMap[key] = append(typedExistingFieldValues, typedValue)
119+
default:
120+
return nil, errors.New(fmt.Sprintf("Existing field values for key: %s is string, but got value with type: %v", key, reflect.TypeOf(value)))
121+
}
122+
case []bool:
123+
switch typedValue := value.(type) {
124+
case bool:
125+
fieldMap[key] = append(typedExistingFieldValues, typedValue)
126+
default:
127+
return nil, errors.New(fmt.Sprintf("Existing field values for key: %s is bool, but got value with type: %v", key, reflect.TypeOf(value)))
128+
}
129+
default:
130+
return nil, errors.New(fmt.Sprintf("This error should never occur. The existing array for time field values is of the type: %v", reflect.TypeOf(existingFieldValues)))
131+
}
132+
} else {
133+
switch typedValue := value.(type) {
134+
case float64:
135+
fieldMap[key] = []float64{typedValue}
136+
case string:
137+
fieldMap[key] = []string{typedValue}
138+
case bool:
139+
fieldMap[key] = []bool{typedValue}
140+
default:
141+
return nil, errors.New(fmt.Sprintf("Unsupported and unexpected type for key: %s. Type is: %v", key, reflect.TypeOf(value)))
142+
}
143+
}
144+
}
145+
}
146+
}
147+
148+
// create data frame response.
149+
// For an overview on data frames and how grafana handles them:
150+
// https://grafana.com/developers/plugin-tools/introduction/data-frames
151+
// The goal here is to output a long format. If needed, prepare time series can transform it
152+
// https://grafana.com/docs/grafana/latest/panels-visualizations/query-transform-data/transform-data/#prepare-time-series
153+
154+
frame := data.NewFrame("response")
155+
156+
for key, values := range fieldMap {
157+
frame.Fields = append(frame.Fields,
158+
data.NewField(key, nil, values),
159+
)
160+
}
161+
162+
return frame, nil
163+
}
164+
165+
func flattenArray[T interface{}](array []T, prefix string, flattenedData map[string]interface{}) {
166+
for key, value := range array {
167+
flattenedData[fmt.Sprintf("%s%d", prefix, key)] = value
168+
}
169+
}
170+
171+
func flattenData(originalData map[string]interface{}, prefix string, flattenedData map[string]interface{}) {
172+
for key, value := range originalData {
173+
switch valueValue := value.(type) {
174+
case map[string]interface{}: // an object
175+
flattenData(valueValue, prefix+key+".", flattenedData)
176+
case []map[string]interface{}: // an array of objects
177+
for subKey, subValue := range valueValue {
178+
flattenData(subValue, fmt.Sprintf("%s%s.%d", prefix, key, subKey), flattenedData)
179+
}
180+
case []int:
181+
flattenArray(valueValue, prefix+key+".", flattenedData)
182+
case []int64:
183+
flattenArray(valueValue, prefix+key+".", flattenedData)
184+
case []float32:
185+
flattenArray(valueValue, prefix+key+".", flattenedData)
186+
case []float64:
187+
flattenArray(valueValue, prefix+key+".", flattenedData)
188+
case []bool:
189+
flattenArray(valueValue, prefix+key+".", flattenedData)
190+
case []uint:
191+
flattenArray(valueValue, prefix+key+".", flattenedData)
192+
case []uint64:
193+
flattenArray(valueValue, prefix+key+".", flattenedData)
194+
case []*int:
195+
flattenArray(valueValue, prefix+key+".", flattenedData)
196+
case []*int64:
197+
flattenArray(valueValue, prefix+key+".", flattenedData)
198+
case []*float32:
199+
flattenArray(valueValue, prefix+key+".", flattenedData)
200+
case []*float64:
201+
flattenArray(valueValue, prefix+key+".", flattenedData)
202+
case []*bool:
203+
flattenArray(valueValue, prefix+key+".", flattenedData)
204+
case []*uint:
205+
flattenArray(valueValue, prefix+key+".", flattenedData)
206+
case []*uint64:
207+
flattenArray(valueValue, prefix+key+".", flattenedData)
208+
default:
209+
flattenedData[prefix+key] = valueValue
210+
}
211+
}
212+
}

0 commit comments

Comments
 (0)