From d5fa2c238b72262b13e9c0d121dc61155d3eec30 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Wed, 1 Apr 2026 22:14:21 +0000
Subject: [PATCH 1/2] feat: Cid/fetching endpoints
---
.stats.yml | 6 +-
api.md | 4 +-
.../inference_pipelines.py | 246 +++++++++++++++++-
src/openlayer/types/__init__.py | 6 +
...rence_pipeline_retrieve_sessions_params.py | 77 ++++++
...nce_pipeline_retrieve_sessions_response.py | 53 ++++
...nference_pipeline_retrieve_users_params.py | 64 ++++-
.../api_resources/test_inference_pipelines.py | 155 +++++++++++
8 files changed, 601 insertions(+), 10 deletions(-)
create mode 100644 src/openlayer/types/inference_pipeline_retrieve_sessions_params.py
create mode 100644 src/openlayer/types/inference_pipeline_retrieve_sessions_response.py
diff --git a/.stats.yml b/.stats.yml
index a47d0ba9..9f47e6b1 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1,3 +1,3 @@
-configured_endpoints: 28
-openapi_spec_hash: 5f7962599290c70cb47c05c3b29fdbd8
-config_hash: f1c0c034bd832878eb47146c51afdd55
+configured_endpoints: 29
+openapi_spec_hash: 20caa671b1b1a87c0a5cb6ebd7e4b064
+config_hash: aaf12ae1da71c62ca5195fc2b66f657c
diff --git a/api.md b/api.md
index 64d5c73b..6b64c75b 100644
--- a/api.md
+++ b/api.md
@@ -122,6 +122,7 @@ Types:
from openlayer.types import (
InferencePipelineRetrieveResponse,
InferencePipelineUpdateResponse,
+ InferencePipelineRetrieveSessionsResponse,
InferencePipelineRetrieveUsersResponse,
)
```
@@ -131,7 +132,8 @@ Methods:
- client.inference_pipelines.retrieve(inference_pipeline_id, \*\*params) -> InferencePipelineRetrieveResponse
- client.inference_pipelines.update(inference_pipeline_id, \*\*params) -> InferencePipelineUpdateResponse
- client.inference_pipelines.delete(inference_pipeline_id) -> None
-- client.inference_pipelines.retrieve_users(inference_pipeline_id, \*\*params) -> InferencePipelineRetrieveUsersResponse
+- client.inference_pipelines.retrieve_sessions(inference_pipeline_id, \*\*params) -> InferencePipelineRetrieveSessionsResponse
+- client.inference_pipelines.retrieve_users(inference_pipeline_id, \*\*params) -> InferencePipelineRetrieveUsersResponse
## Data
diff --git a/src/openlayer/resources/inference_pipelines/inference_pipelines.py b/src/openlayer/resources/inference_pipelines/inference_pipelines.py
index 9615453f..4336c552 100644
--- a/src/openlayer/resources/inference_pipelines/inference_pipelines.py
+++ b/src/openlayer/resources/inference_pipelines/inference_pipelines.py
@@ -2,7 +2,7 @@
from __future__ import annotations
-from typing import List, Optional
+from typing import List, Iterable, Optional
from typing_extensions import Literal
import httpx
@@ -27,8 +27,9 @@
inference_pipeline_update_params,
inference_pipeline_retrieve_params,
inference_pipeline_retrieve_users_params,
+ inference_pipeline_retrieve_sessions_params,
)
-from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, omit, not_given
+from ..._types import Body, Omit, Query, Headers, NoneType, NotGiven, SequenceNotStr, omit, not_given
from ..._utils import path_template, maybe_transform, async_maybe_transform
from ..._compat import cached_property
from ..._resource import SyncAPIResource, AsyncAPIResource
@@ -50,6 +51,7 @@
from ...types.inference_pipeline_update_response import InferencePipelineUpdateResponse
from ...types.inference_pipeline_retrieve_response import InferencePipelineRetrieveResponse
from ...types.inference_pipeline_retrieve_users_response import InferencePipelineRetrieveUsersResponse
+from ...types.inference_pipeline_retrieve_sessions_response import InferencePipelineRetrieveSessionsResponse
__all__ = ["InferencePipelinesResource", "AsyncInferencePipelinesResource"]
@@ -219,12 +221,106 @@ def delete(
cast_to=NoneType,
)
+ def retrieve_sessions(
+ self,
+ inference_pipeline_id: str,
+ *,
+ asc: bool | Omit = omit,
+ page: int | Omit = omit,
+ per_page: int | Omit = omit,
+ sort_column: str | Omit = omit,
+ column_filters: Optional[Iterable[inference_pipeline_retrieve_sessions_params.ColumnFilter]] | Omit = omit,
+ exclude_row_id_list: Optional[Iterable[int]] | Omit = omit,
+ not_search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ not_search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
+ row_id_list: Optional[Iterable[int]] | Omit = omit,
+ search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> InferencePipelineRetrieveSessionsResponse:
+ """
+ Get aggregated session data for an inference pipeline with pagination and
+ metadata.
+
+ Returns a list of sessions for the inference pipeline, including activity
+ statistics such as record counts, token usage, cost, latency, and the first and
+ last records.
+
+ Args:
+ asc: Whether or not to sort on the sortColumn in ascending order.
+
+ page: The page to return in a paginated query.
+
+ per_page: Maximum number of items to return per page.
+
+ sort_column: Name of the column to sort on
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not inference_pipeline_id:
+ raise ValueError(
+ f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
+ )
+ return self._post(
+ path_template(
+ "/inference-pipelines/{inference_pipeline_id}/sessions", inference_pipeline_id=inference_pipeline_id
+ ),
+ body=maybe_transform(
+ {
+ "column_filters": column_filters,
+ "exclude_row_id_list": exclude_row_id_list,
+ "not_search_query_and": not_search_query_and,
+ "not_search_query_or": not_search_query_or,
+ "row_id_list": row_id_list,
+ "search_query_and": search_query_and,
+ "search_query_or": search_query_or,
+ },
+ inference_pipeline_retrieve_sessions_params.InferencePipelineRetrieveSessionsParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=maybe_transform(
+ {
+ "asc": asc,
+ "page": page,
+ "per_page": per_page,
+ "sort_column": sort_column,
+ },
+ inference_pipeline_retrieve_sessions_params.InferencePipelineRetrieveSessionsParams,
+ ),
+ ),
+ cast_to=InferencePipelineRetrieveSessionsResponse,
+ )
+
def retrieve_users(
self,
inference_pipeline_id: str,
*,
+ asc: bool | Omit = omit,
page: int | Omit = omit,
per_page: int | Omit = omit,
+ sort_column: str | Omit = omit,
+ column_filters: Optional[Iterable[inference_pipeline_retrieve_users_params.ColumnFilter]] | Omit = omit,
+ exclude_row_id_list: Optional[Iterable[int]] | Omit = omit,
+ not_search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ not_search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
+ row_id_list: Optional[Iterable[int]] | Omit = omit,
+ search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -240,10 +336,14 @@ def retrieve_users(
usage, and costs.
Args:
+ asc: Whether or not to sort on the sortColumn in ascending order.
+
page: The page to return in a paginated query.
per_page: Maximum number of items to return per page.
+ sort_column: Name of the column to sort on
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -256,10 +356,22 @@ def retrieve_users(
raise ValueError(
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
)
- return self._get(
+ return self._post(
path_template(
"/inference-pipelines/{inference_pipeline_id}/users", inference_pipeline_id=inference_pipeline_id
),
+ body=maybe_transform(
+ {
+ "column_filters": column_filters,
+ "exclude_row_id_list": exclude_row_id_list,
+ "not_search_query_and": not_search_query_and,
+ "not_search_query_or": not_search_query_or,
+ "row_id_list": row_id_list,
+ "search_query_and": search_query_and,
+ "search_query_or": search_query_or,
+ },
+ inference_pipeline_retrieve_users_params.InferencePipelineRetrieveUsersParams,
+ ),
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -267,8 +379,10 @@ def retrieve_users(
timeout=timeout,
query=maybe_transform(
{
+ "asc": asc,
"page": page,
"per_page": per_page,
+ "sort_column": sort_column,
},
inference_pipeline_retrieve_users_params.InferencePipelineRetrieveUsersParams,
),
@@ -442,12 +556,106 @@ async def delete(
cast_to=NoneType,
)
+ async def retrieve_sessions(
+ self,
+ inference_pipeline_id: str,
+ *,
+ asc: bool | Omit = omit,
+ page: int | Omit = omit,
+ per_page: int | Omit = omit,
+ sort_column: str | Omit = omit,
+ column_filters: Optional[Iterable[inference_pipeline_retrieve_sessions_params.ColumnFilter]] | Omit = omit,
+ exclude_row_id_list: Optional[Iterable[int]] | Omit = omit,
+ not_search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ not_search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
+ row_id_list: Optional[Iterable[int]] | Omit = omit,
+ search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = not_given,
+ ) -> InferencePipelineRetrieveSessionsResponse:
+ """
+ Get aggregated session data for an inference pipeline with pagination and
+ metadata.
+
+ Returns a list of sessions for the inference pipeline, including activity
+ statistics such as record counts, token usage, cost, latency, and the first and
+ last records.
+
+ Args:
+ asc: Whether or not to sort on the sortColumn in ascending order.
+
+ page: The page to return in a paginated query.
+
+ per_page: Maximum number of items to return per page.
+
+ sort_column: Name of the column to sort on
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not inference_pipeline_id:
+ raise ValueError(
+ f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
+ )
+ return await self._post(
+ path_template(
+ "/inference-pipelines/{inference_pipeline_id}/sessions", inference_pipeline_id=inference_pipeline_id
+ ),
+ body=await async_maybe_transform(
+ {
+ "column_filters": column_filters,
+ "exclude_row_id_list": exclude_row_id_list,
+ "not_search_query_and": not_search_query_and,
+ "not_search_query_or": not_search_query_or,
+ "row_id_list": row_id_list,
+ "search_query_and": search_query_and,
+ "search_query_or": search_query_or,
+ },
+ inference_pipeline_retrieve_sessions_params.InferencePipelineRetrieveSessionsParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers,
+ extra_query=extra_query,
+ extra_body=extra_body,
+ timeout=timeout,
+ query=await async_maybe_transform(
+ {
+ "asc": asc,
+ "page": page,
+ "per_page": per_page,
+ "sort_column": sort_column,
+ },
+ inference_pipeline_retrieve_sessions_params.InferencePipelineRetrieveSessionsParams,
+ ),
+ ),
+ cast_to=InferencePipelineRetrieveSessionsResponse,
+ )
+
async def retrieve_users(
self,
inference_pipeline_id: str,
*,
+ asc: bool | Omit = omit,
page: int | Omit = omit,
per_page: int | Omit = omit,
+ sort_column: str | Omit = omit,
+ column_filters: Optional[Iterable[inference_pipeline_retrieve_users_params.ColumnFilter]] | Omit = omit,
+ exclude_row_id_list: Optional[Iterable[int]] | Omit = omit,
+ not_search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ not_search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
+ row_id_list: Optional[Iterable[int]] | Omit = omit,
+ search_query_and: Optional[SequenceNotStr[str]] | Omit = omit,
+ search_query_or: Optional[SequenceNotStr[str]] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
@@ -463,10 +671,14 @@ async def retrieve_users(
usage, and costs.
Args:
+ asc: Whether or not to sort on the sortColumn in ascending order.
+
page: The page to return in a paginated query.
per_page: Maximum number of items to return per page.
+ sort_column: Name of the column to sort on
+
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
@@ -479,10 +691,22 @@ async def retrieve_users(
raise ValueError(
f"Expected a non-empty value for `inference_pipeline_id` but received {inference_pipeline_id!r}"
)
- return await self._get(
+ return await self._post(
path_template(
"/inference-pipelines/{inference_pipeline_id}/users", inference_pipeline_id=inference_pipeline_id
),
+ body=await async_maybe_transform(
+ {
+ "column_filters": column_filters,
+ "exclude_row_id_list": exclude_row_id_list,
+ "not_search_query_and": not_search_query_and,
+ "not_search_query_or": not_search_query_or,
+ "row_id_list": row_id_list,
+ "search_query_and": search_query_and,
+ "search_query_or": search_query_or,
+ },
+ inference_pipeline_retrieve_users_params.InferencePipelineRetrieveUsersParams,
+ ),
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
@@ -490,8 +714,10 @@ async def retrieve_users(
timeout=timeout,
query=await async_maybe_transform(
{
+ "asc": asc,
"page": page,
"per_page": per_page,
+ "sort_column": sort_column,
},
inference_pipeline_retrieve_users_params.InferencePipelineRetrieveUsersParams,
),
@@ -513,6 +739,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self.delete = to_raw_response_wrapper(
inference_pipelines.delete,
)
+ self.retrieve_sessions = to_raw_response_wrapper(
+ inference_pipelines.retrieve_sessions,
+ )
self.retrieve_users = to_raw_response_wrapper(
inference_pipelines.retrieve_users,
)
@@ -543,6 +772,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
self.delete = async_to_raw_response_wrapper(
inference_pipelines.delete,
)
+ self.retrieve_sessions = async_to_raw_response_wrapper(
+ inference_pipelines.retrieve_sessions,
+ )
self.retrieve_users = async_to_raw_response_wrapper(
inference_pipelines.retrieve_users,
)
@@ -573,6 +805,9 @@ def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self.delete = to_streamed_response_wrapper(
inference_pipelines.delete,
)
+ self.retrieve_sessions = to_streamed_response_wrapper(
+ inference_pipelines.retrieve_sessions,
+ )
self.retrieve_users = to_streamed_response_wrapper(
inference_pipelines.retrieve_users,
)
@@ -603,6 +838,9 @@ def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None
self.delete = async_to_streamed_response_wrapper(
inference_pipelines.delete,
)
+ self.retrieve_sessions = async_to_streamed_response_wrapper(
+ inference_pipelines.retrieve_sessions,
+ )
self.retrieve_users = async_to_streamed_response_wrapper(
inference_pipelines.retrieve_users,
)
diff --git a/src/openlayer/types/__init__.py b/src/openlayer/types/__init__.py
index f9100ab9..a11cd774 100644
--- a/src/openlayer/types/__init__.py
+++ b/src/openlayer/types/__init__.py
@@ -24,3 +24,9 @@
from .inference_pipeline_retrieve_users_response import (
InferencePipelineRetrieveUsersResponse as InferencePipelineRetrieveUsersResponse,
)
+from .inference_pipeline_retrieve_sessions_params import (
+ InferencePipelineRetrieveSessionsParams as InferencePipelineRetrieveSessionsParams,
+)
+from .inference_pipeline_retrieve_sessions_response import (
+ InferencePipelineRetrieveSessionsResponse as InferencePipelineRetrieveSessionsResponse,
+)
diff --git a/src/openlayer/types/inference_pipeline_retrieve_sessions_params.py b/src/openlayer/types/inference_pipeline_retrieve_sessions_params.py
new file mode 100644
index 00000000..f09be37d
--- /dev/null
+++ b/src/openlayer/types/inference_pipeline_retrieve_sessions_params.py
@@ -0,0 +1,77 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Union, Iterable, Optional
+from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict
+
+from .._types import SequenceNotStr
+from .._utils import PropertyInfo
+
+__all__ = [
+ "InferencePipelineRetrieveSessionsParams",
+ "ColumnFilter",
+ "ColumnFilterSetColumnFilter",
+ "ColumnFilterNumericColumnFilter",
+ "ColumnFilterStringColumnFilter",
+]
+
+
+class InferencePipelineRetrieveSessionsParams(TypedDict, total=False):
+ asc: bool
+ """Whether or not to sort on the sortColumn in ascending order."""
+
+ page: int
+ """The page to return in a paginated query."""
+
+ per_page: Annotated[int, PropertyInfo(alias="perPage")]
+ """Maximum number of items to return per page."""
+
+ sort_column: Annotated[str, PropertyInfo(alias="sortColumn")]
+ """Name of the column to sort on"""
+
+ column_filters: Annotated[Optional[Iterable[ColumnFilter]], PropertyInfo(alias="columnFilters")]
+
+ exclude_row_id_list: Annotated[Optional[Iterable[int]], PropertyInfo(alias="excludeRowIdList")]
+
+ not_search_query_and: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="notSearchQueryAnd")]
+
+ not_search_query_or: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="notSearchQueryOr")]
+
+ row_id_list: Annotated[Optional[Iterable[int]], PropertyInfo(alias="rowIdList")]
+
+ search_query_and: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="searchQueryAnd")]
+
+ search_query_or: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="searchQueryOr")]
+
+
+class ColumnFilterSetColumnFilter(TypedDict, total=False):
+ measurement: Required[str]
+ """The name of the column."""
+
+ operator: Required[Literal["contains_none", "contains_any", "contains_all", "one_of", "none_of"]]
+
+ value: Required[SequenceNotStr[Union[str, float]]]
+
+
+class ColumnFilterNumericColumnFilter(TypedDict, total=False):
+ measurement: Required[str]
+ """The name of the column."""
+
+ operator: Required[Literal[">", ">=", "is", "<", "<=", "!="]]
+
+ value: Required[Optional[float]]
+
+
+class ColumnFilterStringColumnFilter(TypedDict, total=False):
+ measurement: Required[str]
+ """The name of the column."""
+
+ operator: Required[Literal["is", "!="]]
+
+ value: Required[Union[str, bool]]
+
+
+ColumnFilter: TypeAlias = Union[
+ ColumnFilterSetColumnFilter, ColumnFilterNumericColumnFilter, ColumnFilterStringColumnFilter
+]
diff --git a/src/openlayer/types/inference_pipeline_retrieve_sessions_response.py b/src/openlayer/types/inference_pipeline_retrieve_sessions_response.py
new file mode 100644
index 00000000..7c07cd73
--- /dev/null
+++ b/src/openlayer/types/inference_pipeline_retrieve_sessions_response.py
@@ -0,0 +1,53 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Dict, List
+from datetime import datetime
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["InferencePipelineRetrieveSessionsResponse", "Item"]
+
+
+class Item(BaseModel):
+ id: str
+ """The unique session identifier"""
+
+ cost: float
+ """Total cost for the session"""
+
+ date_created: datetime = FieldInfo(alias="dateCreated")
+ """Latest/most recent timestamp in the session"""
+
+ date_of_first_record: datetime = FieldInfo(alias="dateOfFirstRecord")
+ """Timestamp of the first request in the session"""
+
+ date_of_last_record: datetime = FieldInfo(alias="dateOfLastRecord")
+ """Timestamp of the last request in the session"""
+
+ duration: float
+ """Duration between first and last request (in milliseconds)"""
+
+ first_record: Dict[str, object] = FieldInfo(alias="firstRecord")
+ """The complete first record in the session"""
+
+ last_record: Dict[str, object] = FieldInfo(alias="lastRecord")
+ """The complete last record in the session"""
+
+ latency: float
+ """Total latency for the session (in milliseconds)"""
+
+ records: int
+ """Total number of records/traces in the session"""
+
+ tokens: float
+ """Total token count for the session"""
+
+ user_ids: List[str] = FieldInfo(alias="userIds")
+ """List of unique user IDs that participated in this session"""
+
+
+class InferencePipelineRetrieveSessionsResponse(BaseModel):
+ items: List[Item]
+ """Array of session aggregation data"""
diff --git a/src/openlayer/types/inference_pipeline_retrieve_users_params.py b/src/openlayer/types/inference_pipeline_retrieve_users_params.py
index b14f5eac..2a394ca7 100644
--- a/src/openlayer/types/inference_pipeline_retrieve_users_params.py
+++ b/src/openlayer/types/inference_pipeline_retrieve_users_params.py
@@ -2,16 +2,76 @@
from __future__ import annotations
-from typing_extensions import Annotated, TypedDict
+from typing import Union, Iterable, Optional
+from typing_extensions import Literal, Required, Annotated, TypeAlias, TypedDict
+from .._types import SequenceNotStr
from .._utils import PropertyInfo
-__all__ = ["InferencePipelineRetrieveUsersParams"]
+__all__ = [
+ "InferencePipelineRetrieveUsersParams",
+ "ColumnFilter",
+ "ColumnFilterSetColumnFilter",
+ "ColumnFilterNumericColumnFilter",
+ "ColumnFilterStringColumnFilter",
+]
class InferencePipelineRetrieveUsersParams(TypedDict, total=False):
+ asc: bool
+ """Whether or not to sort on the sortColumn in ascending order."""
+
page: int
"""The page to return in a paginated query."""
per_page: Annotated[int, PropertyInfo(alias="perPage")]
"""Maximum number of items to return per page."""
+
+ sort_column: Annotated[str, PropertyInfo(alias="sortColumn")]
+ """Name of the column to sort on"""
+
+ column_filters: Annotated[Optional[Iterable[ColumnFilter]], PropertyInfo(alias="columnFilters")]
+
+ exclude_row_id_list: Annotated[Optional[Iterable[int]], PropertyInfo(alias="excludeRowIdList")]
+
+ not_search_query_and: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="notSearchQueryAnd")]
+
+ not_search_query_or: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="notSearchQueryOr")]
+
+ row_id_list: Annotated[Optional[Iterable[int]], PropertyInfo(alias="rowIdList")]
+
+ search_query_and: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="searchQueryAnd")]
+
+ search_query_or: Annotated[Optional[SequenceNotStr[str]], PropertyInfo(alias="searchQueryOr")]
+
+
+class ColumnFilterSetColumnFilter(TypedDict, total=False):
+ measurement: Required[str]
+ """The name of the column."""
+
+ operator: Required[Literal["contains_none", "contains_any", "contains_all", "one_of", "none_of"]]
+
+ value: Required[SequenceNotStr[Union[str, float]]]
+
+
+class ColumnFilterNumericColumnFilter(TypedDict, total=False):
+ measurement: Required[str]
+ """The name of the column."""
+
+ operator: Required[Literal[">", ">=", "is", "<", "<=", "!="]]
+
+ value: Required[Optional[float]]
+
+
+class ColumnFilterStringColumnFilter(TypedDict, total=False):
+ measurement: Required[str]
+ """The name of the column."""
+
+ operator: Required[Literal["is", "!="]]
+
+ value: Required[Union[str, bool]]
+
+
+ColumnFilter: TypeAlias = Union[
+ ColumnFilterSetColumnFilter, ColumnFilterNumericColumnFilter, ColumnFilterStringColumnFilter
+]
diff --git a/tests/api_resources/test_inference_pipelines.py b/tests/api_resources/test_inference_pipelines.py
index 1d95fa9c..9872ef72 100644
--- a/tests/api_resources/test_inference_pipelines.py
+++ b/tests/api_resources/test_inference_pipelines.py
@@ -13,6 +13,7 @@
InferencePipelineUpdateResponse,
InferencePipelineRetrieveResponse,
InferencePipelineRetrieveUsersResponse,
+ InferencePipelineRetrieveSessionsResponse,
)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -153,6 +154,68 @@ def test_path_params_delete(self, client: Openlayer) -> None:
"",
)
+ @parametrize
+ def test_method_retrieve_sessions(self, client: Openlayer) -> None:
+ inference_pipeline = client.inference_pipelines.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_method_retrieve_sessions_with_all_params(self, client: Openlayer) -> None:
+ inference_pipeline = client.inference_pipelines.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ asc=True,
+ page=1,
+ per_page=1,
+ sort_column="sortColumn",
+ column_filters=[
+ {
+ "measurement": "openlayer_token_set",
+ "operator": "contains_none",
+ "value": ["cat"],
+ }
+ ],
+ exclude_row_id_list=[0],
+ not_search_query_and=["string"],
+ not_search_query_or=["string"],
+ row_id_list=[0],
+ search_query_and=["string"],
+ search_query_or=["string"],
+ )
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_raw_response_retrieve_sessions(self, client: Openlayer) -> None:
+ response = client.inference_pipelines.with_raw_response.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference_pipeline = response.parse()
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_streaming_response_retrieve_sessions(self, client: Openlayer) -> None:
+ with client.inference_pipelines.with_streaming_response.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference_pipeline = response.parse()
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_retrieve_sessions(self, client: Openlayer) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `inference_pipeline_id` but received ''"):
+ client.inference_pipelines.with_raw_response.retrieve_sessions(
+ inference_pipeline_id="",
+ )
+
@parametrize
def test_method_retrieve_users(self, client: Openlayer) -> None:
inference_pipeline = client.inference_pipelines.retrieve_users(
@@ -164,8 +227,23 @@ def test_method_retrieve_users(self, client: Openlayer) -> None:
def test_method_retrieve_users_with_all_params(self, client: Openlayer) -> None:
inference_pipeline = client.inference_pipelines.retrieve_users(
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ asc=True,
page=1,
per_page=1,
+ sort_column="sortColumn",
+ column_filters=[
+ {
+ "measurement": "openlayer_token_set",
+ "operator": "contains_none",
+ "value": ["cat"],
+ }
+ ],
+ exclude_row_id_list=[0],
+ not_search_query_and=["string"],
+ not_search_query_or=["string"],
+ row_id_list=[0],
+ search_query_and=["string"],
+ search_query_or=["string"],
)
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
@@ -338,6 +416,68 @@ async def test_path_params_delete(self, async_client: AsyncOpenlayer) -> None:
"",
)
+ @parametrize
+ async def test_method_retrieve_sessions(self, async_client: AsyncOpenlayer) -> None:
+ inference_pipeline = await async_client.inference_pipelines.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_method_retrieve_sessions_with_all_params(self, async_client: AsyncOpenlayer) -> None:
+ inference_pipeline = await async_client.inference_pipelines.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ asc=True,
+ page=1,
+ per_page=1,
+ sort_column="sortColumn",
+ column_filters=[
+ {
+ "measurement": "openlayer_token_set",
+ "operator": "contains_none",
+ "value": ["cat"],
+ }
+ ],
+ exclude_row_id_list=[0],
+ not_search_query_and=["string"],
+ not_search_query_or=["string"],
+ row_id_list=[0],
+ search_query_and=["string"],
+ search_query_or=["string"],
+ )
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_raw_response_retrieve_sessions(self, async_client: AsyncOpenlayer) -> None:
+ response = await async_client.inference_pipelines.with_raw_response.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference_pipeline = await response.parse()
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_retrieve_sessions(self, async_client: AsyncOpenlayer) -> None:
+ async with async_client.inference_pipelines.with_streaming_response.retrieve_sessions(
+ inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference_pipeline = await response.parse()
+ assert_matches_type(InferencePipelineRetrieveSessionsResponse, inference_pipeline, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_retrieve_sessions(self, async_client: AsyncOpenlayer) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `inference_pipeline_id` but received ''"):
+ await async_client.inference_pipelines.with_raw_response.retrieve_sessions(
+ inference_pipeline_id="",
+ )
+
@parametrize
async def test_method_retrieve_users(self, async_client: AsyncOpenlayer) -> None:
inference_pipeline = await async_client.inference_pipelines.retrieve_users(
@@ -349,8 +489,23 @@ async def test_method_retrieve_users(self, async_client: AsyncOpenlayer) -> None
async def test_method_retrieve_users_with_all_params(self, async_client: AsyncOpenlayer) -> None:
inference_pipeline = await async_client.inference_pipelines.retrieve_users(
inference_pipeline_id="182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ asc=True,
page=1,
per_page=1,
+ sort_column="sortColumn",
+ column_filters=[
+ {
+ "measurement": "openlayer_token_set",
+ "operator": "contains_none",
+ "value": ["cat"],
+ }
+ ],
+ exclude_row_id_list=[0],
+ not_search_query_and=["string"],
+ not_search_query_or=["string"],
+ row_id_list=[0],
+ search_query_and=["string"],
+ search_query_or=["string"],
)
assert_matches_type(InferencePipelineRetrieveUsersResponse, inference_pipeline, path=["response"])
From 9f310531b0a6a79dfa1328ba2be0827cb80522d9 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Wed, 1 Apr 2026 22:14:42 +0000
Subject: [PATCH 2/2] release: 0.23.0
---
.release-please-manifest.json | 2 +-
CHANGELOG.md | 8 ++++++++
pyproject.toml | 2 +-
src/openlayer/_version.py | 2 +-
4 files changed, 11 insertions(+), 3 deletions(-)
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index cb9d2541..7f3f5c84 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.22.0"
+ ".": "0.23.0"
}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 81f70284..d23def51 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
+## 0.23.0 (2026-04-01)
+
+Full Changelog: [v0.22.0...v0.23.0](https://github.com/openlayer-ai/openlayer-python/compare/v0.22.0...v0.23.0)
+
+### Features
+
+* Cid/fetching endpoints ([d5fa2c2](https://github.com/openlayer-ai/openlayer-python/commit/d5fa2c238b72262b13e9c0d121dc61155d3eec30))
+
## 0.22.0 (2026-04-01)
Full Changelog: [v0.21.0...v0.22.0](https://github.com/openlayer-ai/openlayer-python/compare/v0.21.0...v0.22.0)
diff --git a/pyproject.toml b/pyproject.toml
index e2557d0f..965aa42d 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name = "openlayer"
-version = "0.22.0"
+version = "0.23.0"
description = "The official Python library for the openlayer API"
dynamic = ["readme"]
license = "Apache-2.0"
diff --git a/src/openlayer/_version.py b/src/openlayer/_version.py
index 92451487..115db620 100644
--- a/src/openlayer/_version.py
+++ b/src/openlayer/_version.py
@@ -1,4 +1,4 @@
# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
__title__ = "openlayer"
-__version__ = "0.22.0" # x-release-please-version
+__version__ = "0.23.0" # x-release-please-version