Skip to content

Commit

Permalink
feat(api): update via SDK Studio (#21)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] authored and stainless-bot committed Jul 17, 2024
1 parent b142e08 commit f83d4cf
Show file tree
Hide file tree
Showing 5 changed files with 186 additions and 1 deletion.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 8
configured_endpoints: 9
10 changes: 10 additions & 0 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,16 @@ Methods:

- <code title="post /inference-pipelines/{inferencePipelineId}/data-stream">client.InferencePipelines.Data.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineDataService.Stream">Stream</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, inferencePipelineID <a href="https://pkg.go.dev/builtin#string">string</a>, body <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineDataStreamParams">InferencePipelineDataStreamParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineDataStreamResponse">InferencePipelineDataStreamResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>

## Rows

Response Types:

- <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowStreamResponse">InferencePipelineRowStreamResponse</a>

Methods:

- <code title="put /inference-pipelines/{inferencePipelineId}/rows">client.InferencePipelines.Rows.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowService.Stream">Stream</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, inferencePipelineID <a href="https://pkg.go.dev/builtin#string">string</a>, params <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowStreamParams">InferencePipelineRowStreamParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineRowStreamResponse">InferencePipelineRowStreamResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>

## TestResults

Response Types:
Expand Down
2 changes: 2 additions & 0 deletions inferencepipeline.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import (
type InferencePipelineService struct {
Options []option.RequestOption
Data *InferencePipelineDataService
Rows *InferencePipelineRowService
TestResults *InferencePipelineTestResultService
}

Expand All @@ -25,6 +26,7 @@ func NewInferencePipelineService(opts ...option.RequestOption) (r *InferencePipe
r = &InferencePipelineService{}
r.Options = opts
r.Data = NewInferencePipelineDataService(opts...)
r.Rows = NewInferencePipelineRowService(opts...)
r.TestResults = NewInferencePipelineTestResultService(opts...)
return
}
123 changes: 123 additions & 0 deletions inferencepipelinerow.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

package openlayer

import (
"context"
"errors"
"fmt"
"net/http"
"net/url"

"github.com/openlayer-ai/openlayer-go/internal/apijson"
"github.com/openlayer-ai/openlayer-go/internal/apiquery"
"github.com/openlayer-ai/openlayer-go/internal/param"
"github.com/openlayer-ai/openlayer-go/internal/requestconfig"
"github.com/openlayer-ai/openlayer-go/option"
)

// InferencePipelineRowService contains methods and other services that help with
// interacting with the openlayer API.
//
// Note, unlike clients, this service does not read variables from the environment
// automatically. You should not instantiate this service directly, and instead use
// the [NewInferencePipelineRowService] method instead.
type InferencePipelineRowService struct {
Options []option.RequestOption
}

// NewInferencePipelineRowService generates a new service that applies the given
// options to each request. These options are applied after the parent client's
// options (if there is one), and before any request-specific options.
func NewInferencePipelineRowService(opts ...option.RequestOption) (r *InferencePipelineRowService) {
r = &InferencePipelineRowService{}
r.Options = opts
return
}

// Update an inference data point in an inference pipeline.
func (r *InferencePipelineRowService) Stream(ctx context.Context, inferencePipelineID string, params InferencePipelineRowStreamParams, opts ...option.RequestOption) (res *InferencePipelineRowStreamResponse, err error) {
opts = append(r.Options[:], opts...)
if inferencePipelineID == "" {
err = errors.New("missing required inferencePipelineId parameter")
return
}
path := fmt.Sprintf("inference-pipelines/%s/rows", inferencePipelineID)
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPut, path, params, &res, opts...)
return
}

type InferencePipelineRowStreamResponse struct {
Success InferencePipelineRowStreamResponseSuccess `json:"success,required"`
JSON inferencePipelineRowStreamResponseJSON `json:"-"`
}

// inferencePipelineRowStreamResponseJSON contains the JSON metadata for the struct
// [InferencePipelineRowStreamResponse]
type inferencePipelineRowStreamResponseJSON struct {
Success apijson.Field
raw string
ExtraFields map[string]apijson.Field
}

func (r *InferencePipelineRowStreamResponse) UnmarshalJSON(data []byte) (err error) {
return apijson.UnmarshalRoot(data, r)
}

func (r inferencePipelineRowStreamResponseJSON) RawJSON() string {
return r.raw
}

type InferencePipelineRowStreamResponseSuccess bool

const (
InferencePipelineRowStreamResponseSuccessTrue InferencePipelineRowStreamResponseSuccess = true
)

func (r InferencePipelineRowStreamResponseSuccess) IsKnown() bool {
switch r {
case InferencePipelineRowStreamResponseSuccessTrue:
return true
}
return false
}

type InferencePipelineRowStreamParams struct {
// Specify the inference id as a query param.
InferenceID param.Field[string] `query:"inferenceId,required"`
Row param.Field[interface{}] `json:"row,required"`
Config param.Field[InferencePipelineRowStreamParamsConfig] `json:"config"`
}

func (r InferencePipelineRowStreamParams) MarshalJSON() (data []byte, err error) {
return apijson.MarshalRoot(r)
}

// URLQuery serializes [InferencePipelineRowStreamParams]'s query parameters as
// `url.Values`.
func (r InferencePipelineRowStreamParams) URLQuery() (v url.Values) {
return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{
ArrayFormat: apiquery.ArrayQueryFormatComma,
NestedFormat: apiquery.NestedQueryFormatBrackets,
})
}

type InferencePipelineRowStreamParamsConfig struct {
// Name of the column with the ground truths.
GroundTruthColumnName param.Field[string] `json:"groundTruthColumnName"`
// Name of the column with human feedback.
HumanFeedbackColumnName param.Field[string] `json:"humanFeedbackColumnName"`
// Name of the column with the inference ids. This is useful if you want to update
// rows at a later point in time. If not provided, a unique id is generated by
// Openlayer.
InferenceIDColumnName param.Field[string] `json:"inferenceIdColumnName"`
// Name of the column with the latencies.
LatencyColumnName param.Field[string] `json:"latencyColumnName"`
// Name of the column with the timestamps. Timestamps must be in UNIX sec format.
// If not provided, the upload timestamp is used.
TimestampColumnName param.Field[string] `json:"timestampColumnName"`
}

func (r InferencePipelineRowStreamParamsConfig) MarshalJSON() (data []byte, err error) {
return apijson.MarshalRoot(r)
}
50 changes: 50 additions & 0 deletions inferencepipelinerow_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

package openlayer_test

import (
"context"
"errors"
"os"
"testing"

"github.com/openlayer-ai/openlayer-go"
"github.com/openlayer-ai/openlayer-go/internal/testutil"
"github.com/openlayer-ai/openlayer-go/option"
)

func TestInferencePipelineRowStreamWithOptionalParams(t *testing.T) {
baseURL := "http://localhost:4010"
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok {
baseURL = envURL
}
if !testutil.CheckTestServer(t, baseURL) {
return
}
client := openlayer.NewClient(
option.WithBaseURL(baseURL),
option.WithAPIKey("My API Key"),
)
_, err := client.InferencePipelines.Rows.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineRowStreamParams{
InferenceID: openlayer.F("inferenceId"),
Row: openlayer.F[any](map[string]interface{}{}),
Config: openlayer.F(openlayer.InferencePipelineRowStreamParamsConfig{
InferenceIDColumnName: openlayer.F("id"),
LatencyColumnName: openlayer.F("latency"),
TimestampColumnName: openlayer.F("timestamp"),
GroundTruthColumnName: openlayer.F("ground_truth"),
HumanFeedbackColumnName: openlayer.F("human_feedback"),
}),
},
)
if err != nil {
var apierr *openlayer.Error
if errors.As(err, &apierr) {
t.Log(string(apierr.DumpRequest(true)))
}
t.Fatalf("err should be nil: %s", err.Error())
}
}

0 comments on commit f83d4cf

Please sign in to comment.