From f221b35602c3abcfa31ad1198008645906cdc328 Mon Sep 17 00:00:00 2001 From: Joseph Makar Date: Wed, 4 May 2022 11:40:42 -0400 Subject: [PATCH 1/2] Misc, minor cleanups/changes --- .gitignore | 1 + Magefile.go | 6 -- go.mod | 2 +- pkg/main.go | 5 +- pkg/plugin/client.go | 161 +++++++++++--------------------------- pkg/plugin/plugin.go | 133 +++++++++++++------------------ pkg/plugin/plugin_test.go | 3 +- pkg/plugin/resources.go | 31 +++++--- src/plugin.json | 12 +-- 9 files changed, 137 insertions(+), 217 deletions(-) diff --git a/.gitignore b/.gitignore index b315310..7ab98da 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,4 @@ e2e-results/ # Editor .idea +.*.swp diff --git a/Magefile.go b/Magefile.go index f23b1e1..9260457 100644 --- a/Magefile.go +++ b/Magefile.go @@ -3,15 +3,9 @@ package main import ( - "fmt" // mage:import build "github.com/grafana/grafana-plugin-sdk-go/build" ) -// Hello prints a message (shows that you can define custom Mage targets). -func Hello() { - fmt.Println("hello plugin developer!") -} - // Default configures the default target. var Default = build.BuildAll diff --git a/go.mod b/go.mod index 3be33d9..8425cbb 100644 --- a/go.mod +++ b/go.mod @@ -1,4 +1,4 @@ -module github.com/grafana/grafana-starter-datasource-backend +module github.com/scalyr/scalyr-grafana-datasource-plugin go 1.16 diff --git a/pkg/main.go b/pkg/main.go index c5e77cc..44fb6bb 100644 --- a/pkg/main.go +++ b/pkg/main.go @@ -5,7 +5,8 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/backend/datasource" "github.com/grafana/grafana-plugin-sdk-go/backend/log" - "github.com/grafana/grafana-starter-datasource-backend/pkg/plugin" + + "github.com/scalyr/scalyr-grafana-datasource-plugin/pkg/plugin" ) func main() { @@ -17,7 +18,7 @@ func main() { // from Grafana to create different instances of DataSetDatasource (per datasource // ID). When datasource configuration changed Dispose method will be called and // new datasource instance created using NewDataSetDatasource factory. - if err := datasource.Manage("myorgid-simple-backend-datasource", plugin.NewDataSetDatasource, datasource.ManageOpts{}); err != nil { + if err := datasource.Manage("sentinelone-dataset-datasource", plugin.NewDataSetDatasource, datasource.ManageOpts{}); err != nil { log.DefaultLogger.Error(err.Error()) os.Exit(1) } diff --git a/pkg/plugin/client.go b/pkg/plugin/client.go index fbedc63..387c3f1 100644 --- a/pkg/plugin/client.go +++ b/pkg/plugin/client.go @@ -6,6 +6,7 @@ import ( "fmt" "io" "net/http" + "net/url" "time" "github.com/grafana/grafana-plugin-sdk-go/backend/log" @@ -36,173 +37,105 @@ func NewDataSetClient(dataSetUrl string, apiKey string) *DataSetClient { } } -func (d *DataSetClient) DoLRQRequest(req LRQRequest) (LRQResult, error) { - var body []byte - body, _ = json.Marshal(req) - - request, err := http.NewRequest("POST", d.dataSetUrl+"/v2/api/queries", bytes.NewBuffer(body)) +func (d *DataSetClient) doPingRequest(req interface{}) (*LRQResult, error) { + body, err := json.Marshal(req) if err != nil { - log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) - return LRQResult{}, err - } - request.Header.Set("Authorization", "Bearer "+d.apiKey) - request.Header.Set("Content-Type", "application/json") - - var responseBody LRQResult - stepsComplete, stepsTotal := 0, 1 - // Repeat ping requests for our query until we get a result with all steps steps complete - // TODO: A timeout or some other way of escaping besides an error - for stepsComplete < stepsTotal { - resp, err := d.netClient.Do(request) - if err != nil { - return LRQResult{}, err - } - defer resp.Body.Close() - responseBytes, err := io.ReadAll(resp.Body) - if err != nil { - log.DefaultLogger.Warn("error reading response from DataSet", "err", err) - return LRQResult{}, err - } - responseBody = LRQResult{} - err = json.Unmarshal(responseBytes, &responseBody) - if err != nil { - log.DefaultLogger.Warn(" error unmarshaling response from DataSet", "err", err) - return LRQResult{}, err - } - stepsTotal = responseBody.StepsTotal - stepsComplete = responseBody.StepsCompleted - // Build next ping request (which we might not use) - request, err = http.NewRequest("GET", fmt.Sprintf("%s/v2/api/queries/%s?lastStepSeen=%d", d.dataSetUrl, responseBody.Id, responseBody.StepsCompleted), nil) - if err != nil { - log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) - return LRQResult{}, err - } - request.Header.Set("Authorization", "Bearer "+d.apiKey) - request.Header.Set("Content-Type", "application/json") + log.DefaultLogger.Warn("error marshalling request to DataSet", "err", err) + return nil, err } - return responseBody, nil -} - -func (d *DataSetClient) DoFacetValuesRequest(req FacetQuery) (LRQResult, error) { - var body []byte - body, _ = json.Marshal(req) - request, err := http.NewRequest("POST", d.dataSetUrl+"/v2/api/queries", bytes.NewBuffer(body)) if err != nil { log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) - return LRQResult{}, err + return nil, err } request.Header.Set("Authorization", "Bearer "+d.apiKey) request.Header.Set("Content-Type", "application/json") var responseBody LRQResult stepsComplete, stepsTotal := 0, 1 + // Repeat ping requests for our query until we get a result with all steps steps complete - // TODO: A timeout or some other way of escaping besides an error for stepsComplete < stepsTotal { resp, err := d.netClient.Do(request) if err != nil { - log.DefaultLogger.Warn("error sending request to DataSet", "err", err) - return LRQResult{}, err + if e, ok := err.(*url.Error); ok && e.Timeout() { + log.DefaultLogger.Warn("request to DataSet timed out") + return nil, e + } else { + return nil, err + } } - defer resp.Body.Close() + responseBytes, err := io.ReadAll(resp.Body) + resp.Body.Close() if err != nil { log.DefaultLogger.Warn("error reading response from DataSet", "err", err) - return LRQResult{}, err + return nil, err } - responseBody = LRQResult{} - err = json.Unmarshal(responseBytes, &responseBody) - if err != nil { + + if err = json.Unmarshal(responseBytes, &responseBody); err != nil { log.DefaultLogger.Warn(" error unmarshaling response from DataSet", "err", err) - return LRQResult{}, err + return nil, err } + stepsTotal = responseBody.StepsTotal stepsComplete = responseBody.StepsCompleted + // Build next ping request (which we might not use) - request, err = http.NewRequest("GET", fmt.Sprintf("%s/v2/api/queries/%s?lastStepSeen=%d", d.dataSetUrl, responseBody.Id, responseBody.StepsCompleted), nil) + url := fmt.Sprintf("%s/v2/api/queries/%s?lastStepSeen=%d", d.dataSetUrl, responseBody.Id, responseBody.StepsCompleted) + request, err = http.NewRequest("GET", url, nil) if err != nil { log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) - return LRQResult{}, err + return nil, err } request.Header.Set("Authorization", "Bearer "+d.apiKey) request.Header.Set("Content-Type", "application/json") } - return responseBody, nil + return &responseBody, nil } -func (d *DataSetClient) DoTopFacetRequest(req TopFacetRequest) (LRQResult, error) { - var body []byte - body, _ = json.Marshal(req) - - request, err := http.NewRequest("POST", d.dataSetUrl+"/v2/api/queries", bytes.NewBuffer(body)) - if err != nil { - log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) - return LRQResult{}, err - } - request.Header.Set("Authorization", "Bearer "+d.apiKey) - request.Header.Set("Content-Type", "application/json") +func (d *DataSetClient) DoLRQRequest(req LRQRequest) (*LRQResult, error) { + return d.doPingRequest(req) +} - var responseBody LRQResult - stepsComplete, stepsTotal := 0, 1 - // Repeat ping requests for our query until we get a result with all steps steps complete - // TODO: A timeout or some other way of escaping besides an error - for stepsComplete < stepsTotal { - resp, err := d.netClient.Do(request) - if err != nil { - log.DefaultLogger.Warn("error sending request to DataSet", "err", err) - return LRQResult{}, err - } - defer resp.Body.Close() - responseBytes, err := io.ReadAll(resp.Body) - if err != nil { - log.DefaultLogger.Warn("error reading response from DataSet", "err", err) - return LRQResult{}, err - } - responseBody = LRQResult{} - err = json.Unmarshal(responseBytes, &responseBody) - if err != nil { - log.DefaultLogger.Warn(" error unmarshaling response from DataSet", "err", err) - return LRQResult{}, err - } - stepsTotal = responseBody.StepsTotal - stepsComplete = responseBody.StepsCompleted - // Build next ping request (which we might not use) - request, err = http.NewRequest("GET", fmt.Sprintf("%s/v2/api/queries/%s?lastStepSeen=%d", d.dataSetUrl, responseBody.Id, responseBody.StepsCompleted), nil) - if err != nil { - log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) - return LRQResult{}, err - } - request.Header.Set("Authorization", "Bearer "+d.apiKey) - request.Header.Set("Content-Type", "application/json") - } +func (d *DataSetClient) DoFacetValuesRequest(req FacetQuery) (*LRQResult, error) { + return d.doPingRequest(req) +} - return responseBody, nil +func (d *DataSetClient) DoTopFacetRequest(req TopFacetRequest) (*LRQResult, error) { + return d.doPingRequest(req) } -func (d *DataSetClient) DoFacetRequest(req FacetRequest) int { - var body []byte - body, _ = json.Marshal(req) +func (d *DataSetClient) DoFacetRequest(req FacetRequest) (int, error) { + body, err := json.Marshal(req) + if err != nil { + log.DefaultLogger.Warn("error marshalling request to DataSet", "err", err) + return 0, err + } request, err := http.NewRequest("POST", d.dataSetUrl+"/api/facetQuery", bytes.NewBuffer(body)) + if err != nil { + log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) + return 0, err + } request.Header.Set("Authorization", "Bearer "+d.apiKey) request.Header.Set("Content-Type", "application/json") resp, err := d.netClient.Do(request) if err != nil { log.DefaultLogger.Warn("error sending request to DataSet", "err", err) - return 0 + return 0, err } defer resp.Body.Close() responseBytes, err := io.ReadAll(resp.Body) if err != nil { log.DefaultLogger.Warn("error reading response from DataSet", "err", err) + return 0, err } - responseString := string(responseBytes) - log.DefaultLogger.Info("Result of request to facet", "body", responseString) + log.DefaultLogger.Info("Result of request to facet", "body", string(responseBytes)) - return resp.StatusCode + return resp.StatusCode, nil } diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 0bdb180..3e3f041 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -3,9 +3,9 @@ package plugin import ( "context" "encoding/json" + "errors" "fmt" "math" - "strconv" "strings" "time" @@ -15,29 +15,11 @@ import ( "github.com/grafana/grafana-plugin-sdk-go/data" ) -// Make sure the datasource implements required interfaces. This is important to do -// since otherwise we will only get a not implemented error response from plugin in -// runtime. We implement backend.QueryDataHandler, -// backend.CheckHealthHandler interfaces. Plugin should not -// implement all these interfaces - only those which are required for a particular task. -// For example if plugin does not need streaming functionality then you are free to remove -// methods that implement backend.StreamHandler. Implementing instancemgmt.InstanceDisposer -// is useful to clean up resources used by previous datasource instance when a new datasource -// instance created upon datasource settings changed. -var ( - _ backend.QueryDataHandler = (*DataSetDatasource)(nil) - _ backend.CheckHealthHandler = (*DataSetDatasource)(nil) - _ backend.CallResourceHandler = (*DataSetDatasource)(nil) - _ instancemgmt.InstanceDisposer = (*DataSetDatasource)(nil) -) - func NewDataSetDatasource(settings backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) { - type jsonData struct { + var unsecure struct { ScalyrUrl string `json:"scalyrUrl"` } - var unsecure jsonData - err := json.Unmarshal(settings.JSONData, &unsecure) - if err != nil { + if err := json.Unmarshal(settings.JSONData, &unsecure); err != nil { return nil, err } url := unsecure.ScalyrUrl @@ -45,10 +27,13 @@ func NewDataSetDatasource(settings backend.DataSourceInstanceSettings) (instance url = url[:len(url)-1] } - secure := settings.DecryptedSecureJSONData + apiKey, ok := settings.DecryptedSecureJSONData["apiKey"] + if !ok { + return nil, errors.New("apiKey not found") + } return &DataSetDatasource{ - dataSetClient: NewDataSetClient(url, secure["apiKey"]), + dataSetClient: NewDataSetClient(url, apiKey), }, nil } @@ -92,20 +77,24 @@ type queryModel struct { } func (d *DataSetDatasource) query(_ context.Context, pCtx backend.PluginContext, query backend.DataQuery) backend.DataResponse { + response := backend.DataResponse{} + // Unmarshal the JSON into our queryModel. var qm queryModel - response := backend.DataResponse{} + response.Error = json.Unmarshal(query.JSON, &qm) if response.Error != nil { return response } - buckets := int64(float64(query.TimeRange.To.Unix()-query.TimeRange.From.Unix()) / (query.Interval.Seconds())) + + buckets := int64(query.TimeRange.Duration().Seconds() / query.Interval.Seconds()) if buckets > 5000 { buckets = 5000 } if buckets < 1 { buckets = 1 } + var request LRQRequest if qm.QueryType == "Power Query" { request = LRQRequest{ @@ -146,7 +135,12 @@ func (d *DataSetDatasource) query(_ context.Context, pCtx backend.PluginContext, } } - result, _ := d.dataSetClient.DoLRQRequest(request) + var result *LRQResult + result, response.Error = d.dataSetClient.DoLRQRequest(request) + if response.Error != nil { + return response + } + if qm.QueryType == "Power Query" { return displayPQData(result, response) } else { @@ -154,19 +148,18 @@ func (d *DataSetDatasource) query(_ context.Context, pCtx backend.PluginContext, } } -func displayPlotData(result LRQResult, response backend.DataResponse) backend.DataResponse { +func displayPlotData(result *LRQResult, response backend.DataResponse) backend.DataResponse { resultData := PlotResultData{} - err := json.Unmarshal(result.Data, &resultData) - if err != nil { - log.DefaultLogger.Warn("error unmarshaling response from DataSet", "err", err) + response.Error = json.Unmarshal(result.Data, &resultData) + if response.Error != nil { + log.DefaultLogger.Warn("error unmarshaling response from DataSet", "err", response.Error) return response } if len(resultData.Plots) < 1 { - // No usable data return response } - // create data frame response. + frame := data.NewFrame("response") for i, plot := range resultData.Plots { @@ -180,59 +173,63 @@ func displayPlotData(result LRQResult, response backend.DataResponse) backend.Da ) for pIdx, point := range plot.Samples { if i == 0 { - times := time.Unix((resultData.XAxis[pIdx])/1000, 0) - frame.Set(i, pIdx, times) + sec := resultData.XAxis[pIdx] / 1000 + nsec := (resultData.XAxis[pIdx] % 1000) * 1000000 + frame.Set(i, pIdx, time.Unix(sec, nsec)) } frame.Set(i+1, pIdx, point) } } - // add the frames to the response. response.Frames = append(response.Frames, frame) return response } -func displayPQData(result LRQResult, response backend.DataResponse) backend.DataResponse { +func displayPQData(result *LRQResult, response backend.DataResponse) backend.DataResponse { resultData := TableResultData{} - err := json.Unmarshal(result.Data, &resultData) - if err != nil { + + response.Error = json.Unmarshal(result.Data, &resultData) + if response.Error != nil { return response } if len(resultData.Values) < 1 { return response } + frame := data.NewFrame("response") + // Iterate over the data to modify the result into dataframe acceptable format for idx, col := range resultData.Columns { - switch cellType := col.Type; { - case cellType == TIMESTAMP: + if cellType := col.Type; cellType == TIMESTAMP { res := make([]time.Time, len(resultData.Values)) for i, val := range resultData.Values { - timeInInt := int64(val[idx].(float64)) - res[i] = time.Unix(timeInInt/1000000000, 0) // convert nanoseconds to Time.time format + if w, ok := val[idx].(float64); ok { + sec := int64(w / 1000000000) + nsec := int64(math.Mod(w, 1000000000)) + res[i] = time.Unix(sec, nsec) + } } frame.Fields = append(frame.Fields, data.NewField(col.Name, nil, res), ) - break - case cellType == PERCENTAGE: + } else if cellType == PERCENTAGE { res := make([]string, len(resultData.Values)) for i, val := range resultData.Values { if w, ok := val[idx].(int); ok { - res[i] = strconv.FormatInt(int64(w), 10) + "%" + res[i] = fmt.Sprintf("%d%%", w) } } frame.Fields = append(frame.Fields, data.NewField(col.Name, nil, res), ) - break - case cellType == NUMBER && col.DecimalPlaces > 0: + } else if cellType == NUMBER && col.DecimalPlaces > 0 { res := make([]float64, len(resultData.Values)) for i, val := range resultData.Values { switch val[idx].(type) { case float32: res[i] = float64(val[idx].(float32)) - break + case float64: + res[i] = val[idx].(float64) case string: if val[idx] == "Infinity" { res[i] = math.Inf(1) @@ -241,57 +238,36 @@ func displayPQData(result LRQResult, response backend.DataResponse) backend.Data } else if val[idx] == "NaN" { res[i] = math.NaN() } - break - default: - res[i] = val[idx].(float64) } } frame.Fields = append(frame.Fields, data.NewField(col.Name, nil, res), ) - break - case cellType == NUMBER && col.DecimalPlaces <= 0: + } else if cellType == NUMBER && col.DecimalPlaces <= 0 { res := make([]int64, len(resultData.Values)) for i, val := range resultData.Values { switch val[idx].(type) { case int: res[i] = int64(val[idx].(int)) - break case int16: res[i] = int64(val[idx].(int16)) - break case int32: res[i] = int64(val[idx].(int32)) - break + case int64: + res[i] = val[idx].(int64) case float32: res[i] = int64(val[idx].(float32)) - break case float64: res[i] = int64(val[idx].(float64)) - break - default: - res[i] = val[idx].(int64) } } frame.Fields = append(frame.Fields, data.NewField(col.Name, nil, res), ) - break - default: + } else { res := make([]string, len(resultData.Values)) for i, val := range resultData.Values { - switch val[idx].(type) { - case string: - res[i] = val[idx].(string) - break - case bool: - if w, ok := val[idx].(bool); ok { - res[i] = strconv.FormatBool(w) - } - break - default: - - } + res[i] = fmt.Sprintf("%v", val[idx]) } frame.Fields = append(frame.Fields, data.NewField(col.Name, nil, res), @@ -307,12 +283,14 @@ func displayPQData(result LRQResult, response backend.DataResponse) backend.Data // datasource configuration page which allows users to verify that // a datasource is working as expected. func (d *DataSetDatasource) CheckHealth(_ context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) { - request := FacetRequest{ + statusCode, err := d.dataSetClient.DoFacetRequest(FacetRequest{ QueryType: "facet", MaxCount: 1, Field: "test", + }) + if err != nil { + return nil, err } - statusCode := d.dataSetClient.DoFacetRequest(request) if statusCode != 200 { return &backend.CheckHealthResult{ @@ -328,8 +306,7 @@ func (d *DataSetDatasource) CheckHealth(_ context.Context, req *backend.CheckHea } func (d *DataSetDatasource) CollectMetrics(_ context.Context, req *backend.CollectMetricsRequest) (*backend.CollectMetricsResult, error) { - var prometheusMetrics []byte return &backend.CollectMetricsResult{ - PrometheusMetrics: prometheusMetrics, + PrometheusMetrics: []byte{}, }, nil } diff --git a/pkg/plugin/plugin_test.go b/pkg/plugin/plugin_test.go index 19dcd3c..9d458e4 100644 --- a/pkg/plugin/plugin_test.go +++ b/pkg/plugin/plugin_test.go @@ -5,7 +5,8 @@ import ( "testing" "github.com/grafana/grafana-plugin-sdk-go/backend" - "github.com/grafana/grafana-starter-datasource-backend/pkg/plugin" + + "github.com/scalyr/scalyr-grafana-datasource-plugin/pkg/plugin" ) // This is where the tests for the datasource backend live. diff --git a/pkg/plugin/resources.go b/pkg/plugin/resources.go index f2439df..eb5e746 100644 --- a/pkg/plugin/resources.go +++ b/pkg/plugin/resources.go @@ -17,8 +17,7 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR switch req.Path { case "facet-query": var fm filterModal - err1 := json.Unmarshal(req.Body, &fm) - if err1 != nil { + if err := json.Unmarshal(req.Body, &fm); err != nil { return sender.Send(&backend.CallResourceResponse{ Status: http.StatusBadRequest, }) @@ -30,13 +29,17 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR MaxValues: "100", }, } - result, _ := d.dataSetClient.DoFacetValuesRequest(request) - facetResultData := FacetList{} - err := json.Unmarshal(result.Data, &facetResultData) + result, err := d.dataSetClient.DoFacetValuesRequest(request) if err != nil { + return sender.Send(&backend.CallResourceResponse{ + Status: http.StatusInternalServerError, + }) + } + var facetResultData FacetList + if err := json.Unmarshal(result.Data, &facetResultData); err != nil { log.DefaultLogger.Warn("error unmarshaling response from FACETS LIST query", "err", err) return sender.Send(&backend.CallResourceResponse{ - Status: http.StatusNotFound, + Status: http.StatusInternalServerError, }) } finalResponse := make([]string, len(facetResultData.Facet.Values)) @@ -47,6 +50,9 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR jsonStr, err := json.Marshal(pb) if err != nil { log.DefaultLogger.Warn("could not marshal facets JSON: %s", err) + return sender.Send(&backend.CallResourceResponse{ + Status: http.StatusInternalServerError, + }) } return sender.Send(&backend.CallResourceResponse{ Status: http.StatusOK, @@ -61,10 +67,14 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR Filter: "tag", }, } - result, _ := d.dataSetClient.DoTopFacetRequest(request) - topFacets := TopFacets{} - err := json.Unmarshal(result.Data, &topFacets) + result, err := d.dataSetClient.DoTopFacetRequest(request) if err != nil { + return sender.Send(&backend.CallResourceResponse{ + Status: http.StatusInternalServerError, + }) + } + var topFacets TopFacets + if err := json.Unmarshal(result.Data, &topFacets); err != nil { log.DefaultLogger.Warn("error unmarshaling response from TOP FACETS query", "err", err) return sender.Send(&backend.CallResourceResponse{ Status: http.StatusNotFound, @@ -73,6 +83,9 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR jsonStr, err := json.Marshal(topFacets) if err != nil { log.DefaultLogger.Warn("could not marshal JSON: %s", err) + return sender.Send(&backend.CallResourceResponse{ + Status: http.StatusInternalServerError, + }) } return sender.Send(&backend.CallResourceResponse{ Status: http.StatusOK, diff --git a/src/plugin.json b/src/plugin.json index b73da60..0d35632 100644 --- a/src/plugin.json +++ b/src/plugin.json @@ -11,7 +11,7 @@ "info": { "description": "Scalyr Observability Platform", "author": { - "name": "Yan Shnayder", + "name": "DataSet", "url": "https://www.dataset.com/" }, "keywords": ["database"], @@ -22,11 +22,11 @@ "links": [ { "name": "Website", - "url": "https://github.com/grafana/grafana-starter-datasource-backend" + "url": "https://github.com/scalyr/scalyr-grafana-datasource-plugin" }, { "name": "License", - "url": "https://github.com/grafana/grafana-starter-datasource-backend/blob/master/LICENSE" + "url": "https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/master/LICENSE" } ], "screenshots": [ @@ -43,11 +43,11 @@ "path": "img/DatasetConfig.png" } ], - "version": "1.0.0", - "updated": "2022-02-16" + "version": "3.0.1", + "updated": "2022-05-04" }, "dependencies": { "grafanaDependency": ">=7.0.0", "plugins": [] } -} \ No newline at end of file +} From 588854bdd05a50aa3d4d82f384ddb2e48005062c Mon Sep 17 00:00:00 2001 From: Joseph Makar Date: Wed, 4 May 2022 14:21:09 -0400 Subject: [PATCH 2/2] Feedback resolutions from Grafana support --- CHANGELOG.md | 2 +- README.md | 100 +++++++++++++++++++++++++++------------- pkg/plugin/client.go | 22 +++++---- pkg/plugin/plugin.go | 8 +--- pkg/plugin/resources.go | 8 ++-- 5 files changed, 85 insertions(+), 55 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9d63e78..542f92c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,5 @@ # Changelog -## 1.0.0 (Unreleased) +## 3.0.0 (Unreleased) Initial release. diff --git a/README.md b/README.md index 656fb86..c3c66ff 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,8 @@ and dashboards in Grafana using data in Dataset. You may want to use this plugin to allow you to visualize Dataset data next to other data sources, for instance when you want to monitor many feeds on a single dashboard. -![SystemDashboard](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/SystemDashboard.png) + +![SystemDashboard](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/SystemDashboard.png) With the Dataset plugin, you will be able to create and visualize your log-based metrics along side all of your other data sources. It's a great way to have a @@ -27,7 +28,7 @@ can find documentation on API Keys [here](https://www.scalyr.com/help/api#scalyr ## Getting started -### Installing with grafana-cli +### Installing the latest / stable version with grafana-cli 1. To install the stable version of the plugin using grafana-cli, run the following command: @@ -37,13 +38,9 @@ can find documentation on API Keys [here](https://www.scalyr.com/help/api#scalyr plugins install sentinelone-dataset-datasource ``` -2. Update your Grafana configuration in the `grafana.ini` file to allow this plugin by adding the following line: + Older versions can be downloaded from [github releases](https://github.com/scalyr/scalyr-grafana-datasource-plugin/releases). - ```bash - allow_loading_unsigned_plugins = sentinelone-dataset-datasource - ``` - -3. Adding plugins requires a restart of your grafana server. +2. Adding plugins requires a restart of your grafana server. For init.d based services you can use the command: @@ -57,41 +54,60 @@ can find documentation on API Keys [here](https://www.scalyr.com/help/api#scalyr systemctl restart grafana-server ``` -If you require the development version, use the manual installation instructions. -### Installing manually +### Building a development version and installing manually -1. If you want a stable version of plugin, download the desired version from -[github releases](https://github.com/scalyr/scalyr-grafana-datasource-plugin/releases). -If you want the `development` version of the plugin, -clone the [plugin repository](https://github.com/scalyr/scalyr-grafana-datasource-plugin) -from GitHub. Switch to branch `go-rewrite-v2` +1. To build and install the `development` version of the plugin, clone the +[plugin repository](https://github.com/scalyr/scalyr-grafana-datasource-plugin) from GitHub. ```bash git clone https://github.com/scalyr/scalyr-grafana-datasource-plugin.git ``` -2. Grafana plugins exist in the directory: `/var/lib/grafana/plugins/`. Create a folder for the dataset plugin: +2. Build the Golang backend (with the version defined in go.mod, currently 1.16) using Mage ```bash - mkdir /var/lib/grafana/plugins/dataset + mage + ``` + + This will build the executables in `dist/` + + To install Mage (Golang make-like build tool): + + ```bash + git clone https://github.com/magefile/mage $GOPATH/src/github.com/magefile/mage + cd $GOPATH/src/github.com/magefile/mage + git checkout tags/v1.12.1 # Specified in go.mod + go run bootstrap.go ``` -3. Copy the contents of the Dataset plugin into grafana: + A `mage` executable should now be in `$GOPATH/bin/`. - Stable version: +3. Build the Typescript frontend using LTS Node (>= v14) and Yarn ```bash - tar -xvf scalyr_grafana_plugin_51057f6.tar.gz - cp -rf dist/ /var/lib/grafana/plugins/scalyr/ + yarn install --pure-lockfile # Install dependencies into node_modules + yarn build ``` - Development version: + This will build and the frontend files in `dist/` + + To install Yarn: `npm install --global yarn` + + +5. For development versions, simply copy the files to the Grafana server plugin directory ```bash - cp -r scalyr-grafana-datasource/dist/ /var/lib/grafana/plugins/scalyr/ + mkdir /var/lib/grafana/plugins/dataset + # copy files from dist/ into /var/lib/grafana/plugins/dataset ``` -4. Adding plugins requires a restart of your grafana server. + Note that this is an unsigned plugin, and you must update your `grafana.ini` file to allow it adding the following line: + + ```bash + allow_loading_unsigned_plugins = sentinelone-dataset-datasource + ``` + +6. Adding plugins requires a restart of your grafana server. For init.d based services you can use the command: @@ -104,26 +120,44 @@ from GitHub. Switch to branch `go-rewrite-v2` ```bash systemctl restart grafana-server ``` + +### Package and sign the plugin + +To sign and package the plugin for distribution: + +```bash +export GRAFANA_API_KEY= +npx @grafana/toolkit plugin:sign # This creates dist/MANIFEST.txt + +cp -r dist sentinelone-dataset-datasource +zip -r sentinelone-dataset-datasource-$(jq -r .info.version sentinelone-dataset-datasource/plugin.json).zip sentinelone-dataset-datasource +rm -rf sentinelone-dataset-datasource # Cleanup +``` + +References +- https://grafana.com/docs/grafana/latest/developers/plugins/package-a-plugin/ +- https://grafana.com/docs/grafana/latest/developers/plugins/sign-a-plugin/ + ### Verify the Plugin was Installed 1. In order to verify proper installation you must log in to your grafana instance and navigate to **Configuration Settings -> Data Sources**. - ![ConfigDataSource](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/ConfigDataSource.png) + ![ConfigDataSource](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/ConfigDataSource.png) 2. This will take you into the configuration page. If you already have other data sources installed, you will see them show up here. Click on the **Add data source** button: - ![DatasetConfig](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/DatasetConfig.png) + ![DatasetConfig](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/DatasetConfig.png) 3. If you enter "Dataset" in the search bar on the resulting page you should see "Dataset" grafana plugin show up as an option. - ![SearchForPlugin](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/SearchForPlugin.png) + ![SearchForPlugin](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/SearchForPlugin.png) 4. Click on ***“Select”***. This will take you to a configuration page where you insert your API key mentioned in the prerequisite section. - ![PluginConfig](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/PluginConfig.png) + ![PluginConfig](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/PluginConfig.png) 5. Enter these settings: @@ -141,17 +175,17 @@ using Scalyr data. 1. Create a new dashboard by click Create > dashboard - ![CreateDashboard](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/CreateDashboard.png) + ![CreateDashboard](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/CreateDashboard.png) 2. In the **“New dashboard”** box, select the **“Add a new panel** icon 3. From the Data source dropdown, select **"Dataset"**. - ![DataSetPlugin](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/DatasetPlugin.png) + ![DataSetPlugin](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/DatasetPlugin.png) 4. A 'Query Type' field allows to choose the type of query you wanted to search for - ![QueryType](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/QueryType.png) + ![QueryType](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/QueryType.png) 5. 'Standard Query' - A standard query allows to search on Graph view, You can enter Graph Functions into the expression box and visualize the results. You can even enter and visualize Complex Expressions @@ -160,13 +194,13 @@ using Scalyr data. Enter expression and click the save button. In the image below, we've added a query to graph visualized the number of log messages that contain the word "error" - ![StandardQuery](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/StandardQuery.png) + ![StandardQuery](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/StandardQuery.png) 6. 'Power Query' - Works similar to PQ search in Dataset app. You can enter rich set of commands for transforming and manipulating data. Data can be viewed in Table format Visit [this documentation](https://app.scalyr.com/help/power-queries) for more information on building Power Queries - ![PowerQuery](https://github.com/scalyr/scalyr-grafana-datasource-plugin/blob/go-rewrite-v2/src/img/PowerQuery.png) + ![PowerQuery](https://raw.githubusercontent.com/scalyr/scalyr-grafana-datasource-plugin/go-rewrite-v2/src/img/PowerQuery.png) You’ve successfully installed, configured and created a graph in Grafana using Dataset data! diff --git a/pkg/plugin/client.go b/pkg/plugin/client.go index 387c3f1..a6d8193 100644 --- a/pkg/plugin/client.go +++ b/pkg/plugin/client.go @@ -26,6 +26,8 @@ type DataSetClient struct { } func NewDataSetClient(dataSetUrl string, apiKey string) *DataSetClient { + // Consider using the backend.httpclient package provided by the Grafana SDK. + // This would allow a per-instance configurable timeout, rather than the hardcoded value here. var netClient = &http.Client{ Timeout: time.Second * 10, } @@ -40,13 +42,13 @@ func NewDataSetClient(dataSetUrl string, apiKey string) *DataSetClient { func (d *DataSetClient) doPingRequest(req interface{}) (*LRQResult, error) { body, err := json.Marshal(req) if err != nil { - log.DefaultLogger.Warn("error marshalling request to DataSet", "err", err) + log.DefaultLogger.Error("error marshalling request to DataSet", "err", err) return nil, err } request, err := http.NewRequest("POST", d.dataSetUrl+"/v2/api/queries", bytes.NewBuffer(body)) if err != nil { - log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) + log.DefaultLogger.Error("error constructing request to DataSet", "err", err) return nil, err } request.Header.Set("Authorization", "Bearer "+d.apiKey) @@ -60,7 +62,7 @@ func (d *DataSetClient) doPingRequest(req interface{}) (*LRQResult, error) { resp, err := d.netClient.Do(request) if err != nil { if e, ok := err.(*url.Error); ok && e.Timeout() { - log.DefaultLogger.Warn("request to DataSet timed out") + log.DefaultLogger.Error("request to DataSet timed out") return nil, e } else { return nil, err @@ -70,12 +72,12 @@ func (d *DataSetClient) doPingRequest(req interface{}) (*LRQResult, error) { responseBytes, err := io.ReadAll(resp.Body) resp.Body.Close() if err != nil { - log.DefaultLogger.Warn("error reading response from DataSet", "err", err) + log.DefaultLogger.Error("error reading response from DataSet", "err", err) return nil, err } if err = json.Unmarshal(responseBytes, &responseBody); err != nil { - log.DefaultLogger.Warn(" error unmarshaling response from DataSet", "err", err) + log.DefaultLogger.Error(" error unmarshaling response from DataSet", "err", err) return nil, err } @@ -86,7 +88,7 @@ func (d *DataSetClient) doPingRequest(req interface{}) (*LRQResult, error) { url := fmt.Sprintf("%s/v2/api/queries/%s?lastStepSeen=%d", d.dataSetUrl, responseBody.Id, responseBody.StepsCompleted) request, err = http.NewRequest("GET", url, nil) if err != nil { - log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) + log.DefaultLogger.Error("error constructing request to DataSet", "err", err) return nil, err } request.Header.Set("Authorization", "Bearer "+d.apiKey) @@ -111,13 +113,13 @@ func (d *DataSetClient) DoTopFacetRequest(req TopFacetRequest) (*LRQResult, erro func (d *DataSetClient) DoFacetRequest(req FacetRequest) (int, error) { body, err := json.Marshal(req) if err != nil { - log.DefaultLogger.Warn("error marshalling request to DataSet", "err", err) + log.DefaultLogger.Error("error marshalling request to DataSet", "err", err) return 0, err } request, err := http.NewRequest("POST", d.dataSetUrl+"/api/facetQuery", bytes.NewBuffer(body)) if err != nil { - log.DefaultLogger.Warn("error constructing request to DataSet", "err", err) + log.DefaultLogger.Error("error constructing request to DataSet", "err", err) return 0, err } request.Header.Set("Authorization", "Bearer "+d.apiKey) @@ -125,14 +127,14 @@ func (d *DataSetClient) DoFacetRequest(req FacetRequest) (int, error) { resp, err := d.netClient.Do(request) if err != nil { - log.DefaultLogger.Warn("error sending request to DataSet", "err", err) + log.DefaultLogger.Error("error sending request to DataSet", "err", err) return 0, err } defer resp.Body.Close() responseBytes, err := io.ReadAll(resp.Body) if err != nil { - log.DefaultLogger.Warn("error reading response from DataSet", "err", err) + log.DefaultLogger.Error("error reading response from DataSet", "err", err) return 0, err } log.DefaultLogger.Info("Result of request to facet", "body", string(responseBytes)) diff --git a/pkg/plugin/plugin.go b/pkg/plugin/plugin.go index 3e3f041..97fe232 100644 --- a/pkg/plugin/plugin.go +++ b/pkg/plugin/plugin.go @@ -153,7 +153,7 @@ func displayPlotData(result *LRQResult, response backend.DataResponse) backend.D response.Error = json.Unmarshal(result.Data, &resultData) if response.Error != nil { - log.DefaultLogger.Warn("error unmarshaling response from DataSet", "err", response.Error) + log.DefaultLogger.Error("error unmarshaling response from DataSet", "err", response.Error) return response } if len(resultData.Plots) < 1 { @@ -304,9 +304,3 @@ func (d *DataSetDatasource) CheckHealth(_ context.Context, req *backend.CheckHea Message: "Successfully connected to DataSet", }, nil } - -func (d *DataSetDatasource) CollectMetrics(_ context.Context, req *backend.CollectMetricsRequest) (*backend.CollectMetricsResult, error) { - return &backend.CollectMetricsResult{ - PrometheusMetrics: []byte{}, - }, nil -} diff --git a/pkg/plugin/resources.go b/pkg/plugin/resources.go index eb5e746..6d2bb9b 100644 --- a/pkg/plugin/resources.go +++ b/pkg/plugin/resources.go @@ -37,7 +37,7 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR } var facetResultData FacetList if err := json.Unmarshal(result.Data, &facetResultData); err != nil { - log.DefaultLogger.Warn("error unmarshaling response from FACETS LIST query", "err", err) + log.DefaultLogger.Error("error unmarshaling response from FACETS LIST query", "err", err) return sender.Send(&backend.CallResourceResponse{ Status: http.StatusInternalServerError, }) @@ -49,7 +49,7 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR pb := &FacetResponse{Value: finalResponse} jsonStr, err := json.Marshal(pb) if err != nil { - log.DefaultLogger.Warn("could not marshal facets JSON: %s", err) + log.DefaultLogger.Error("could not marshal facets JSON", "err", err) return sender.Send(&backend.CallResourceResponse{ Status: http.StatusInternalServerError, }) @@ -75,14 +75,14 @@ func (d *DataSetDatasource) CallResource(ctx context.Context, req *backend.CallR } var topFacets TopFacets if err := json.Unmarshal(result.Data, &topFacets); err != nil { - log.DefaultLogger.Warn("error unmarshaling response from TOP FACETS query", "err", err) + log.DefaultLogger.Error("error unmarshaling response from TOP FACETS query", "err", err) return sender.Send(&backend.CallResourceResponse{ Status: http.StatusNotFound, }) } jsonStr, err := json.Marshal(topFacets) if err != nil { - log.DefaultLogger.Warn("could not marshal JSON: %s", err) + log.DefaultLogger.Error("could not marshal JSON", "err", err) return sender.Send(&backend.CallResourceResponse{ Status: http.StatusInternalServerError, })