Utils#

dependency_control#

gptcache.utils.dependency_control.prompt_install(package: str, warn: bool = False)[source]#

Function used to prompt user to install a package.

cache_func#

gptcache.utils.cache_func.cache_all(*_, **__)[source]#

error#

exception gptcache.utils.error.CacheError[source]#

Bases: Exception

GPTCache base error

exception gptcache.utils.error.NotInitError[source]#

Bases: gptcache.utils.error.CacheError

Raise when the cache has been used before it’s inited

exception gptcache.utils.error.NotFoundError(store_type, current_type_name)[source]#

Bases: gptcache.utils.error.CacheError

Raise when getting an unsupported store.

exception gptcache.utils.error.ParamError[source]#

Bases: gptcache.utils.error.CacheError

Raise when receiving an invalid param.

exception gptcache.utils.error.PipInstallError(package)[source]#

Bases: gptcache.utils.error.CacheError

Raise when failed to install package.

gptcache.utils.error.wrap_error(e: Exception) Exception[source]#

Add a type to exception e while ensuring that the original type is not changed

Example

import openai

from gptcache.utils.error import wrap_error


def raise_error():
    try:
        raise openai.error.OpenAIError(message="test")
    except openai.error.OpenAIError as e:
        raise wrap_error(e)


try:
    raise_error()
except openai.error.OpenAIError as e:
    print("exception:")
    print(e)

print("over")

token#

gptcache.utils.token.token_counter(text)[source]#

Token Counter

response#

gptcache.utils.response.get_message_from_openai_answer(openai_resp)[source]#
gptcache.utils.response.get_stream_message_from_openai_answer(openai_data)[source]#
gptcache.utils.response.get_text_from_openai_answer(openai_resp)[source]#
gptcache.utils.response.get_image_from_openai_b64(openai_resp)[source]#
gptcache.utils.response.get_image_from_openai_url(openai_resp)[source]#
gptcache.utils.response.get_image_from_path(openai_resp)[source]#
gptcache.utils.response.get_audio_text_from_openai_answer(openai_resp)[source]#

lazy_import#

class gptcache.utils.lazy_import.LazyImport(local_name, parent_module_globals, name)[source]#

Bases: module

Lazily import a module.

softmax#

gptcache.utils.softmax.softmax(x: list)[source]#

log#

time#

gptcache.utils.time.time_cal(func, func_name=None, report_func=None)[source]#