-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(api): update via SDK Studio (#21)
- Loading branch information
1 parent
b142e08
commit f83d4cf
Showing
5 changed files
with
186 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1 +1 @@ | ||
configured_endpoints: 8 | ||
configured_endpoints: 9 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,123 @@ | ||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. | ||
|
||
package openlayer | ||
|
||
import ( | ||
"context" | ||
"errors" | ||
"fmt" | ||
"net/http" | ||
"net/url" | ||
|
||
"github.com/openlayer-ai/openlayer-go/internal/apijson" | ||
"github.com/openlayer-ai/openlayer-go/internal/apiquery" | ||
"github.com/openlayer-ai/openlayer-go/internal/param" | ||
"github.com/openlayer-ai/openlayer-go/internal/requestconfig" | ||
"github.com/openlayer-ai/openlayer-go/option" | ||
) | ||
|
||
// InferencePipelineRowService contains methods and other services that help with | ||
// interacting with the openlayer API. | ||
// | ||
// Note, unlike clients, this service does not read variables from the environment | ||
// automatically. You should not instantiate this service directly, and instead use | ||
// the [NewInferencePipelineRowService] method instead. | ||
type InferencePipelineRowService struct { | ||
Options []option.RequestOption | ||
} | ||
|
||
// NewInferencePipelineRowService generates a new service that applies the given | ||
// options to each request. These options are applied after the parent client's | ||
// options (if there is one), and before any request-specific options. | ||
func NewInferencePipelineRowService(opts ...option.RequestOption) (r *InferencePipelineRowService) { | ||
r = &InferencePipelineRowService{} | ||
r.Options = opts | ||
return | ||
} | ||
|
||
// Update an inference data point in an inference pipeline. | ||
func (r *InferencePipelineRowService) Stream(ctx context.Context, inferencePipelineID string, params InferencePipelineRowStreamParams, opts ...option.RequestOption) (res *InferencePipelineRowStreamResponse, err error) { | ||
opts = append(r.Options[:], opts...) | ||
if inferencePipelineID == "" { | ||
err = errors.New("missing required inferencePipelineId parameter") | ||
return | ||
} | ||
path := fmt.Sprintf("inference-pipelines/%s/rows", inferencePipelineID) | ||
err = requestconfig.ExecuteNewRequest(ctx, http.MethodPut, path, params, &res, opts...) | ||
return | ||
} | ||
|
||
type InferencePipelineRowStreamResponse struct { | ||
Success InferencePipelineRowStreamResponseSuccess `json:"success,required"` | ||
JSON inferencePipelineRowStreamResponseJSON `json:"-"` | ||
} | ||
|
||
// inferencePipelineRowStreamResponseJSON contains the JSON metadata for the struct | ||
// [InferencePipelineRowStreamResponse] | ||
type inferencePipelineRowStreamResponseJSON struct { | ||
Success apijson.Field | ||
raw string | ||
ExtraFields map[string]apijson.Field | ||
} | ||
|
||
func (r *InferencePipelineRowStreamResponse) UnmarshalJSON(data []byte) (err error) { | ||
return apijson.UnmarshalRoot(data, r) | ||
} | ||
|
||
func (r inferencePipelineRowStreamResponseJSON) RawJSON() string { | ||
return r.raw | ||
} | ||
|
||
type InferencePipelineRowStreamResponseSuccess bool | ||
|
||
const ( | ||
InferencePipelineRowStreamResponseSuccessTrue InferencePipelineRowStreamResponseSuccess = true | ||
) | ||
|
||
func (r InferencePipelineRowStreamResponseSuccess) IsKnown() bool { | ||
switch r { | ||
case InferencePipelineRowStreamResponseSuccessTrue: | ||
return true | ||
} | ||
return false | ||
} | ||
|
||
type InferencePipelineRowStreamParams struct { | ||
// Specify the inference id as a query param. | ||
InferenceID param.Field[string] `query:"inferenceId,required"` | ||
Row param.Field[interface{}] `json:"row,required"` | ||
Config param.Field[InferencePipelineRowStreamParamsConfig] `json:"config"` | ||
} | ||
|
||
func (r InferencePipelineRowStreamParams) MarshalJSON() (data []byte, err error) { | ||
return apijson.MarshalRoot(r) | ||
} | ||
|
||
// URLQuery serializes [InferencePipelineRowStreamParams]'s query parameters as | ||
// `url.Values`. | ||
func (r InferencePipelineRowStreamParams) URLQuery() (v url.Values) { | ||
return apiquery.MarshalWithSettings(r, apiquery.QuerySettings{ | ||
ArrayFormat: apiquery.ArrayQueryFormatComma, | ||
NestedFormat: apiquery.NestedQueryFormatBrackets, | ||
}) | ||
} | ||
|
||
type InferencePipelineRowStreamParamsConfig struct { | ||
// Name of the column with the ground truths. | ||
GroundTruthColumnName param.Field[string] `json:"groundTruthColumnName"` | ||
// Name of the column with human feedback. | ||
HumanFeedbackColumnName param.Field[string] `json:"humanFeedbackColumnName"` | ||
// Name of the column with the inference ids. This is useful if you want to update | ||
// rows at a later point in time. If not provided, a unique id is generated by | ||
// Openlayer. | ||
InferenceIDColumnName param.Field[string] `json:"inferenceIdColumnName"` | ||
// Name of the column with the latencies. | ||
LatencyColumnName param.Field[string] `json:"latencyColumnName"` | ||
// Name of the column with the timestamps. Timestamps must be in UNIX sec format. | ||
// If not provided, the upload timestamp is used. | ||
TimestampColumnName param.Field[string] `json:"timestampColumnName"` | ||
} | ||
|
||
func (r InferencePipelineRowStreamParamsConfig) MarshalJSON() (data []byte, err error) { | ||
return apijson.MarshalRoot(r) | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. | ||
|
||
package openlayer_test | ||
|
||
import ( | ||
"context" | ||
"errors" | ||
"os" | ||
"testing" | ||
|
||
"github.com/openlayer-ai/openlayer-go" | ||
"github.com/openlayer-ai/openlayer-go/internal/testutil" | ||
"github.com/openlayer-ai/openlayer-go/option" | ||
) | ||
|
||
func TestInferencePipelineRowStreamWithOptionalParams(t *testing.T) { | ||
baseURL := "http://localhost:4010" | ||
if envURL, ok := os.LookupEnv("TEST_API_BASE_URL"); ok { | ||
baseURL = envURL | ||
} | ||
if !testutil.CheckTestServer(t, baseURL) { | ||
return | ||
} | ||
client := openlayer.NewClient( | ||
option.WithBaseURL(baseURL), | ||
option.WithAPIKey("My API Key"), | ||
) | ||
_, err := client.InferencePipelines.Rows.Stream( | ||
context.TODO(), | ||
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e", | ||
openlayer.InferencePipelineRowStreamParams{ | ||
InferenceID: openlayer.F("inferenceId"), | ||
Row: openlayer.F[any](map[string]interface{}{}), | ||
Config: openlayer.F(openlayer.InferencePipelineRowStreamParamsConfig{ | ||
InferenceIDColumnName: openlayer.F("id"), | ||
LatencyColumnName: openlayer.F("latency"), | ||
TimestampColumnName: openlayer.F("timestamp"), | ||
GroundTruthColumnName: openlayer.F("ground_truth"), | ||
HumanFeedbackColumnName: openlayer.F("human_feedback"), | ||
}), | ||
}, | ||
) | ||
if err != nil { | ||
var apierr *openlayer.Error | ||
if errors.As(err, &apierr) { | ||
t.Log(string(apierr.DumpRequest(true))) | ||
} | ||
t.Fatalf("err should be nil: %s", err.Error()) | ||
} | ||
} |