Clients#

class unify.clients.AsyncUnify(endpoint=None, model=None, provider=None, api_key=None)#

Bases: object

Class for interacting asynchronously with the Unify API.

__init__(endpoint=None, model=None, provider=None, api_key=None)#

Initialize the AsyncUnify client.

Args:

endpoint (str, optional): Endpoint name in OpenAI API format: <uploaded_by>/<model_name>@<provider_name> Defaults to None.

model (str, optional): Name of the model. If None, endpoint must be provided.

provider (str, optional): Name of the provider. If None, endpoint must be provided.

api_key (str, optional): API key for accessing the Unify API. If None, it attempts to retrieve the API key from the environment variable UNIFY_KEY. Defaults to None.

Raises:

UnifyError: If the API key is missing.

property endpoint: str#

DAR201.

Returns:

str: The endpoint name.

Type:

Get the endpoint name. # noqa

async generate(user_prompt=None, system_prompt=None, messages=None, max_tokens=None, temperature=1.0, stop=None, stream=False)#

Generate content asynchronously using the Unify API.

Return type:

Union[AsyncGenerator[str, None], str]

Args:

user_prompt (Optional[str]): A string containing the user prompt. If provided, messages must be None.

system_prompt (Optional[str]): An optional string containing the system prompt.

messages (List[Dict[str, str]]): A list of dictionaries containing the conversation history. If provided, user_prompt must be None.

max_tokens (Optional[int]): The max number of output tokens, defaults to the provider’s default max_tokens when the value is None.

temperature (Optional[float]): What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. Defaults to the provider’s default max_tokens when the value is None.

stop (Optional[List[str]]): Up to 4 sequences where the API will stop generating further tokens.

stream (bool): If True, generates content as a stream. If False, generates content as a single response. Defaults to False.

Returns:

Union[AsyncGenerator[str, None], List[str]]: If stream is True, returns an asynchronous generator yielding chunks of content. If stream is False, returns a list of string responses.

Raises:

UnifyError: If an error occurs during content generation.

get_credit_balance()#

Get the remaining credits left on your account.

Return type:

Optional[int]

Returns:

int or None: The remaining credits on the account if successful, otherwise None.

Raises:

BadRequestError: If there was an HTTP error. ValueError: If there was an error parsing the JSON response.

property model: str#

DAR201.

Returns:

str: The model name.

Type:

Get the model name. # noqa

property provider: str | None#

DAR201.

Returns:

str: The provider name.

Type:

Get the provider name. # noqa

set_endpoint(value)#

Set the model name. # noqa: DAR101.

Return type:

None

Args:

value (str): The endpoint name.

set_model(value)#

Set the model name. # noqa: DAR101.

Return type:

None

Args:

value (str): The model name.

set_provider(value)#

Set the provider name. # noqa: DAR101.

Return type:

None

Args:

value (str): The provider name.

class unify.clients.Unify(endpoint=None, model=None, provider=None, api_key=None)#

Bases: object

Class for interacting with the Unify API.

__init__(endpoint=None, model=None, provider=None, api_key=None)#

Initialize the Unify client.

Args:
endpoint (str, optional): Endpoint name in OpenAI API format:

<uploaded_by>/<model_name>@<provider_name> Defaults to None.

model (str, optional): Name of the model. If None, endpoint must be provided.

provider (str, optional): Name of the provider. If None, endpoint must be provided.

api_key (str, optional): API key for accessing the Unify API.

If None, it attempts to retrieve the API key from the environment variable UNIFY_KEY. Defaults to None.

Raises:

UnifyError: If the API key is missing.

property endpoint: str#

DAR201.

Returns:

str: The endpoint name.

Type:

Get the endpoint name. # noqa

generate(user_prompt=None, system_prompt=None, messages=None, max_tokens=1024, temperature=1.0, stop=None, stream=False)#

Generate content using the Unify API.

Return type:

Union[Generator[str, None, None], str]

Args:

user_prompt (Optional[str]): A string containing the user prompt. If provided, messages must be None.

system_prompt (Optional[str]): An optional string containing the system prompt.

messages (List[Dict[str, str]]): A list of dictionaries containing the conversation history. If provided, user_prompt must be None.

max_tokens (Optional[int]): The max number of output tokens. Defaults to the provider’s default max_tokens when the value is None.

temperature (Optional[float]): What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. Defaults to the provider’s default max_tokens when the value is None.

stop (Optional[List[str]]): Up to 4 sequences where the API will stop generating further tokens.

stream (bool): If True, generates content as a stream. If False, generates content as a single response. Defaults to False.

Returns:
Union[Generator[str, None, None], str]: If stream is True,

returns a generator yielding chunks of content. If stream is False, returns a single string response.

Raises:

UnifyError: If an error occurs during content generation.

get_credit_balance()#

Get the remaining credits left on your account.

Return type:

float

Returns:

int or None: The remaining credits on the account if successful, otherwise None.

Raises:

BadRequestError: If there was an HTTP error. ValueError: If there was an error parsing the JSON response.

property model: str#

DAR201.

Returns:

str: The model name.

Type:

Get the model name. # noqa

property provider: str | None#

DAR201.

Returns:

str: The provider name.

Type:

Get the provider name. # noqa

set_endpoint(value)#

Set the model name. # noqa: DAR101.

Return type:

None

Args:

value (str): The endpoint name.

set_model(value)#

Set the model name. # noqa: DAR101.

Return type:

None

Args:

value (str): The model name.

set_provider(value)#

Set the provider name. # noqa: DAR101.

Return type:

None

Args:

value (str): The provider name.