Skip to content

Commit

Permalink
feat(api): OpenAPI spec update via Stainless API (#13)
Browse files Browse the repository at this point in the history
  • Loading branch information
stainless-app[bot] authored and stainless-bot committed Jul 5, 2024
1 parent 25f7816 commit bb83bc0
Show file tree
Hide file tree
Showing 16 changed files with 87 additions and 1,832 deletions.
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 6
configured_endpoints: 2
96 changes: 19 additions & 77 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,30 +52,14 @@ func main() {
client := openlayer.NewClient(
option.WithAPIKey("My API Key"), // defaults to os.LookupEnv("OPENLAYER_API_KEY")
)
inferencePipelineDataStreamResponse, err := client.InferencePipelines.Data.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
projectNewResponse, err := client.Projects.New(context.TODO(), openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if err != nil {
panic(err.Error())
}
fmt.Printf("%+v\n", inferencePipelineDataStreamResponse.Success)
fmt.Printf("%+v\n", projectNewResponse.ID)
}

```
Expand Down Expand Up @@ -164,7 +148,7 @@ client := openlayer.NewClient(
option.WithHeader("X-Some-Header", "custom_header_info"),
)

client.InferencePipelines.Data.Stream(context.TODO(), ...,
client.Projects.New(context.TODO(), ...,
// Override the header
option.WithHeader("X-Some-Header", "some_other_custom_header_info"),
// Add an undocumented field to the request body, using sjson syntax
Expand Down Expand Up @@ -193,33 +177,17 @@ When the API returns a non-success status code, we return an error with type
To handle errors, we recommend that you use the `errors.As` pattern:

```go
_, err := client.InferencePipelines.Data.Stream(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
_, err := client.Projects.New(context.TODO(), openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if err != nil {
var apierr *openlayer.Error
if errors.As(err, &apierr) {
println(string(apierr.DumpRequest(true))) // Prints the serialized HTTP request
println(string(apierr.DumpResponse(true))) // Prints the serialized HTTP response
}
panic(err.Error()) // GET "/inference-pipelines/{id}/data-stream": 400 Bad Request { ... }
panic(err.Error()) // GET "/projects": 400 Bad Request { ... }
}
```

Expand All @@ -237,24 +205,11 @@ To set a per-retry timeout, use `option.WithRequestTimeout()`.
// This sets the timeout for the request, including all the retries.
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
client.InferencePipelines.Data.Stream(
client.Projects.New(
ctx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
},
// This sets the per-retry timeout
option.WithRequestTimeout(20*time.Second),
Expand Down Expand Up @@ -289,24 +244,11 @@ client := openlayer.NewClient(
)

// Override per-request:
client.InferencePipelines.Data.Stream(
client.Projects.New(
context.TODO(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
},
option.WithMaxRetries(5),
)
Expand Down
32 changes: 0 additions & 32 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,46 +12,14 @@ Methods:

## Commits

Response Types:

- <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#ProjectCommitListResponse">ProjectCommitListResponse</a>

Methods:

- <code title="get /projects/{id}/versions">client.Projects.Commits.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#ProjectCommitService.List">List</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, id <a href="https://pkg.go.dev/builtin#string">string</a>, query <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#ProjectCommitListParams">ProjectCommitListParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#ProjectCommitListResponse">ProjectCommitListResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>

## InferencePipelines

# Commits

## TestResults

Response Types:

- <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#CommitTestResultListResponse">CommitTestResultListResponse</a>

Methods:

- <code title="get /versions/{id}/results">client.Commits.TestResults.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#CommitTestResultService.List">List</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, id <a href="https://pkg.go.dev/builtin#string">string</a>, query <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#CommitTestResultListParams">CommitTestResultListParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#CommitTestResultListResponse">CommitTestResultListResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>

# InferencePipelines

## Data

Response Types:

- <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineDataStreamResponse">InferencePipelineDataStreamResponse</a>

Methods:

- <code title="post /inference-pipelines/{id}/data-stream">client.InferencePipelines.Data.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineDataService.Stream">Stream</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, id <a href="https://pkg.go.dev/builtin#string">string</a>, body <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineDataStreamParams">InferencePipelineDataStreamParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineDataStreamResponse">InferencePipelineDataStreamResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>

## TestResults

Response Types:

- <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineTestResultListResponse">InferencePipelineTestResultListResponse</a>

Methods:

- <code title="get /inference-pipelines/{id}/results">client.InferencePipelines.TestResults.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineTestResultService.List">List</a>(ctx <a href="https://pkg.go.dev/context">context</a>.<a href="https://pkg.go.dev/context#Context">Context</a>, id <a href="https://pkg.go.dev/builtin#string">string</a>, query <a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineTestResultListParams">InferencePipelineTestResultListParams</a>) (<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go">openlayer</a>.<a href="https://pkg.go.dev/github.com/openlayer-ai/openlayer-go#InferencePipelineTestResultListResponse">InferencePipelineTestResultListResponse</a>, <a href="https://pkg.go.dev/builtin#error">error</a>)</code>
144 changes: 24 additions & 120 deletions client_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,26 +36,10 @@ func TestUserAgentHeader(t *testing.T) {
},
}),
)
client.InferencePipelines.Data.Stream(
context.Background(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
client.Projects.New(context.Background(), openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if userAgent != fmt.Sprintf("Openlayer/Go %s", internal.PackageVersion) {
t.Errorf("Expected User-Agent to be correct, but got: %#v", userAgent)
}
Expand All @@ -78,26 +62,10 @@ func TestRetryAfter(t *testing.T) {
},
}),
)
res, err := client.InferencePipelines.Data.Stream(
context.Background(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
res, err := client.Projects.New(context.Background(), openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if err == nil || res != nil {
t.Error("Expected there to be a cancel error and for the response to be nil")
}
Expand All @@ -123,26 +91,10 @@ func TestRetryAfterMs(t *testing.T) {
},
}),
)
res, err := client.InferencePipelines.Data.Stream(
context.Background(),
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
res, err := client.Projects.New(context.Background(), openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if err == nil || res != nil {
t.Error("Expected there to be a cancel error and for the response to be nil")
}
Expand All @@ -164,26 +116,10 @@ func TestContextCancel(t *testing.T) {
)
cancelCtx, cancel := context.WithCancel(context.Background())
cancel()
res, err := client.InferencePipelines.Data.Stream(
cancelCtx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
res, err := client.Projects.New(cancelCtx, openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if err == nil || res != nil {
t.Error("Expected there to be a cancel error and for the response to be nil")
}
Expand All @@ -202,26 +138,10 @@ func TestContextCancelDelay(t *testing.T) {
)
cancelCtx, cancel := context.WithTimeout(context.Background(), 2*time.Millisecond)
defer cancel()
res, err := client.InferencePipelines.Data.Stream(
cancelCtx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
res, err := client.Projects.New(cancelCtx, openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if err == nil || res != nil {
t.Error("expected there to be a cancel error and for the response to be nil")
}
Expand All @@ -246,26 +166,10 @@ func TestContextDeadline(t *testing.T) {
},
}),
)
res, err := client.InferencePipelines.Data.Stream(
deadlineCtx,
"182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
openlayer.InferencePipelineDataStreamParams{
Config: openlayer.F[openlayer.InferencePipelineDataStreamParamsConfigUnion](openlayer.InferencePipelineDataStreamParamsConfigLlmData{
InputVariableNames: openlayer.F([]string{"user_query"}),
OutputColumnName: openlayer.F("output"),
NumOfTokenColumnName: openlayer.F("tokens"),
CostColumnName: openlayer.F("cost"),
TimestampColumnName: openlayer.F("timestamp"),
}),
Rows: openlayer.F([]map[string]interface{}{{
"user_query": "what's the meaning of life?",
"output": "42",
"tokens": map[string]interface{}{},
"cost": map[string]interface{}{},
"timestamp": map[string]interface{}{},
}}),
},
)
res, err := client.Projects.New(deadlineCtx, openlayer.ProjectNewParams{
Name: openlayer.F("My Project"),
TaskType: openlayer.F(openlayer.ProjectNewParamsTaskTypeLlmBase),
})
if err == nil || res != nil {
t.Error("expected there to be a deadline error and for the response to be nil")
}
Expand Down
Loading

0 comments on commit bb83bc0

Please sign in to comment.