Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 16 additions & 2 deletions .env_example
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ PLATFORM_OPENAI_CHAT_GPT4O_MODEL="gpt-4o"
AZURE_OPENAI_GPT4O_ENDPOINT="https://xxxx.openai.azure.com/openai/v1"
AZURE_OPENAI_GPT4O_KEY="xxxxx"
AZURE_OPENAI_GPT4O_MODEL="deployment-name"
# Since deployment name may be custom and differ from the actual underlying model,
# you can specify the underlying model for identifier purposes
AZURE_OPENAI_GPT4O_UNDERLYING_MODEL="gpt-4o"

AZURE_OPENAI_INTEGRATION_TEST_ENDPOINT="https://xxxxx.openai.azure.com/openai/v1"
AZURE_OPENAI_INTEGRATION_TEST_KEY="xxxxx"
Expand Down Expand Up @@ -59,6 +62,9 @@ DEFAULT_OPENAI_FRONTEND_MODEL = "gpt-4o"
OPENAI_CHAT_ENDPOINT=${PLATFORM_OPENAI_CHAT_ENDPOINT}
OPENAI_CHAT_KEY=${PLATFORM_OPENAI_CHAT_API_KEY}
OPENAI_CHAT_MODEL=${PLATFORM_OPENAI_CHAT_GPT4O_MODEL}
# The following line can be populated if using an Azure OpenAI deployment
# where the deployment name differs from the actual underlying model
OPENAI_CHAT_UNDERLYING_MODEL=""

##################################
# OPENAI RESPONSES TARGET SECRETS
Expand All @@ -80,6 +86,7 @@ AZURE_OPENAI_RESPONSES_MODEL="o4-mini"
OPENAI_RESPONSES_ENDPOINT=${PLATFORM_OPENAI_RESPONSES_ENDPOINT}
OPENAI_RESPONSES_KEY=${PLATFORM_OPENAI_RESPONSES_KEY}
OPENAI_RESPONSES_MODEL=${PLATFORM_OPENAI_RESPONSES_MODEL}
OPENAI_RESPONSES_UNDERLYING_MODEL=""

##################################
# OPENAI REALTIME TARGET SECRETS
Expand All @@ -99,6 +106,7 @@ AZURE_OPENAI_REALTIME_MODEL = "gpt-4o-realtime-preview"
OPENAI_REALTIME_ENDPOINT = ${PLATFORM_OPENAI_REALTIME_ENDPOINT}
OPENAI_REALTIME_API_KEY = ${PLATFORM_OPENAI_REALTIME_API_KEY}
OPENAI_REALTIME_MODEL = ${PLATFORM_OPENAI_REALTIME_MODEL}
OPENAI_REALTIME_UNDERLYING_MODEL = ""

##################################
# IMAGE TARGET SECRETS
Expand All @@ -115,8 +123,10 @@ OPENAI_IMAGE_ENDPOINT2 = "https://api.openai.com/v1"
OPENAI_IMAGE_API_KEY2 = "sk-xxxxx"
OPENAI_IMAGE_MODEL2 = "dall-e-3"

OPENAI_IMAGE_ENDPOINT = ${OPENAI_IMAGE_ENDPOINT2}
OPENAI_IMAGE_ENDPOINT = ${OPENAI_IMAGE_ENDPOINT2}
OPENAI_IMAGE_API_KEY = ${OPENAI_IMAGE_API_KEY2}
OPENAI_IMAGE_MODEL = ${OPENAI_IMAGE_MODEL2}
OPENAI_IMAGE_UNDERLYING_MODEL = ""


##################################
Expand All @@ -136,6 +146,8 @@ OPENAI_TTS_MODEL2 = "tts-1"

OPENAI_TTS_ENDPOINT = ${OPENAI_TTS_ENDPOINT2}
OPENAI_TTS_KEY = ${OPENAI_TTS_KEY2}
OPENAI_TTS_MODEL = ${OPENAI_TTS_MODEL2}
OPENAI_TTS_UNDERLYING_MODEL = ""

##################################
# VIDEO TARGET SECRETS
Expand All @@ -147,10 +159,12 @@ OPENAI_TTS_KEY = ${OPENAI_TTS_KEY2}
# Note: Use the base URL without API path
AZURE_OPENAI_VIDEO_ENDPOINT="https://xxxxx.cognitiveservices.azure.com/openai/v1"
AZURE_OPENAI_VIDEO_KEY="xxxxxxx"
AZURE_OPENAI_VIDEO_MODEL="sora-2"

OPENAI_VIDEO_ENDPOINT = ${AZURE_OPENAI_VIDEO_ENDPOINT}
OPENAI_VIDEO_KEY = ${AZURE_OPENAI_VIDEO_KEY}
OPENAI_VIDEO_MODEL = "sora-2"
OPENAI_VIDEO_MODEL = ${AZURE_OPENAI_VIDEO_MODEL}
OPENAI_VIDEO_UNDERLYING_MODEL = ""


##################################
Expand Down
11 changes: 10 additions & 1 deletion pyrit/prompt_target/common/prompt_chat_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def __init__(
max_requests_per_minute: Optional[int] = None,
endpoint: str = "",
model_name: str = "",
underlying_model: Optional[str] = None,
) -> None:
"""
Initialize the PromptChatTarget.
Expand All @@ -33,8 +34,16 @@ def __init__(
max_requests_per_minute (int, Optional): Maximum number of requests per minute.
endpoint (str): The endpoint URL. Defaults to empty string.
model_name (str): The model name. Defaults to empty string.
underlying_model (str, Optional): The underlying model name (e.g., "gpt-4o") for
identification purposes. This is useful when the deployment name in Azure differs
from the actual model. Defaults to None.
"""
super().__init__(max_requests_per_minute=max_requests_per_minute, endpoint=endpoint, model_name=model_name)
super().__init__(
max_requests_per_minute=max_requests_per_minute,
endpoint=endpoint,
model_name=model_name,
underlying_model=underlying_model,
)

def set_system_prompt(
self,
Expand Down
17 changes: 16 additions & 1 deletion pyrit/prompt_target/common/prompt_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def __init__(
max_requests_per_minute: Optional[int] = None,
endpoint: str = "",
model_name: str = "",
underlying_model: Optional[str] = None,
) -> None:
"""
Initialize the PromptTarget.
Expand All @@ -40,12 +41,17 @@ def __init__(
max_requests_per_minute (int, Optional): Maximum number of requests per minute.
endpoint (str): The endpoint URL. Defaults to empty string.
model_name (str): The model name. Defaults to empty string.
underlying_model (str, Optional): The underlying model name (e.g., "gpt-4o") for
identification purposes. This is useful when the deployment name in Azure differs
from the actual model. If not provided, `model_name` will be used for the identifier.
Defaults to None.
"""
self._memory = CentralMemory.get_memory_instance()
self._verbose = verbose
self._max_requests_per_minute = max_requests_per_minute
self._endpoint = endpoint
self._model_name = model_name
self._underlying_model = underlying_model

if self._verbose:
logging.basicConfig(level=logging.INFO)
Expand Down Expand Up @@ -94,13 +100,22 @@ def get_identifier(self) -> Dict[str, Any]:

Returns:
Dict[str, Any]: A dictionary containing identification attributes.

Note:
If the underlying model is specified, either passed in during instantiation or via environment variable,
it is used as the model_name for the identifier. Otherwise, self._model_name (which is often the
deployment name in Azure) is used.
"""
public_attributes: Dict[str, Any] = {}
public_attributes["__type__"] = self.__class__.__name__
public_attributes["__module__"] = self.__class__.__module__
if self._endpoint:
public_attributes["endpoint"] = self._endpoint
if self._model_name:
# if the underlying model is specified, use it as the model name for identification
# otherwise, use self._model_name (which is often the deployment name in Azure)
if self._underlying_model:
public_attributes["model_name"] = self._underlying_model
elif self._model_name:
public_attributes["model_name"] = self._model_name
# Include temperature and top_p if available (set by subclasses)
if hasattr(self, "_temperature") and self._temperature is not None:
Expand Down
10 changes: 7 additions & 3 deletions pyrit/prompt_target/openai/openai_chat_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,7 @@ class OpenAIChatTarget(OpenAITarget, PromptChatTarget):
Args:
api_key (str): The api key for the OpenAI API
endpoint (str): The endpoint for the OpenAI API
model_name (str): The model name for the OpenAI API
deployment_name (str): For Azure, the deployment name
model_name (str): The model name for the OpenAI API (or deployment name in Azure)
temperature (float): The temperature for the completion
max_completion_tokens (int): The maximum number of tokens to be returned by the model.
The total length of input tokens and generated tokens is limited by
Expand Down Expand Up @@ -149,10 +148,15 @@ def __init__(
self._n = n
self._extra_body_parameters = extra_body_parameters

def _set_openai_env_configuration_vars(self):
def _set_openai_env_configuration_vars(self) -> None:
"""
Set deployment_environment_variable, endpoint_environment_variable,
and api_key_environment_variable which are read from .env file.
"""
self.model_name_environment_variable = "OPENAI_CHAT_MODEL"
self.endpoint_environment_variable = "OPENAI_CHAT_ENDPOINT"
self.api_key_environment_variable = "OPENAI_CHAT_KEY"
self.underlying_model_environment_variable = "OPENAI_CHAT_UNDERLYING_MODEL"

def _get_target_api_paths(self) -> list[str]:
"""Return API paths that should not be in the URL."""
Expand Down
3 changes: 2 additions & 1 deletion pyrit/prompt_target/openai/openai_completion_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def __init__(
Initialize the OpenAICompletionTarget with the given parameters.

Args:
model_name (str, Optional): The name of the model.
model_name (str, Optional): The name of the model (or deployment name in Azure).
If no value is provided, the OPENAI_COMPLETION_MODEL environment variable will be used.
endpoint (str, Optional): The target URL for the OpenAI service.
api_key (str | Callable[[], str], Optional): The API key for accessing the OpenAI service,
Expand Down Expand Up @@ -75,6 +75,7 @@ def _set_openai_env_configuration_vars(self):
self.model_name_environment_variable = "OPENAI_COMPLETION_MODEL"
self.endpoint_environment_variable = "OPENAI_COMPLETION_ENDPOINT"
self.api_key_environment_variable = "OPENAI_COMPLETION_API_KEY"
self.underlying_model_environment_variable = "OPENAI_COMPLETION_UNDERLYING_MODEL"

def _get_target_api_paths(self) -> list[str]:
"""Return API paths that should not be in the URL."""
Expand Down
3 changes: 2 additions & 1 deletion pyrit/prompt_target/openai/openai_image_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def __init__(
Initialize the image target with specified parameters.

Args:
model_name (str, Optional): The name of the model.
model_name (str, Optional): The name of the model (or deployment name in Azure).
If no value is provided, the OPENAI_IMAGE_MODEL environment variable will be used.
endpoint (str, Optional): The target URL for the OpenAI service.
api_key (str | Callable[[], str], Optional): The API key for accessing the OpenAI service,
Expand Down Expand Up @@ -72,6 +72,7 @@ def _set_openai_env_configuration_vars(self):
self.model_name_environment_variable = "OPENAI_IMAGE_MODEL"
self.endpoint_environment_variable = "OPENAI_IMAGE_ENDPOINT"
self.api_key_environment_variable = "OPENAI_IMAGE_API_KEY"
self.underlying_model_environment_variable = "OPENAI_IMAGE_UNDERLYING_MODEL"

def _get_target_api_paths(self) -> list[str]:
"""Return API paths that should not be in the URL."""
Expand Down
3 changes: 2 additions & 1 deletion pyrit/prompt_target/openai/openai_realtime_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def __init__(
Initialize the Realtime target with specified parameters.

Args:
model_name (str, Optional): The name of the model.
model_name (str, Optional): The name of the model (or deployment name in Azure).
If no value is provided, the OPENAI_REALTIME_MODEL environment variable will be used.
endpoint (str, Optional): The target URL for the OpenAI service.
Defaults to the `OPENAI_REALTIME_ENDPOINT` environment variable.
Expand Down Expand Up @@ -101,6 +101,7 @@ def _set_openai_env_configuration_vars(self):
self.model_name_environment_variable = "OPENAI_REALTIME_MODEL"
self.endpoint_environment_variable = "OPENAI_REALTIME_ENDPOINT"
self.api_key_environment_variable = "OPENAI_REALTIME_API_KEY"
self.underlying_model_environment_variable = "OPENAI_REALTIME_UNDERLYING_MODEL"

def _get_target_api_paths(self) -> list[str]:
"""Return API paths that should not be in the URL."""
Expand Down
3 changes: 2 additions & 1 deletion pyrit/prompt_target/openai/openai_response_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def __init__(

Args:
custom_functions: Mapping of user-defined function names (e.g., "my_func").
model_name (str, Optional): The name of the model.
model_name (str, Optional): The name of the model (or deployment name in Azure).
If no value is provided, the OPENAI_RESPONSES_MODEL environment variable will be used.
endpoint (str, Optional): The target URL for the OpenAI service.
api_key (str, Optional): The API key for accessing the Azure OpenAI service.
Expand Down Expand Up @@ -161,6 +161,7 @@ def _set_openai_env_configuration_vars(self):
self.model_name_environment_variable = "OPENAI_RESPONSES_MODEL"
self.endpoint_environment_variable = "OPENAI_RESPONSES_ENDPOINT"
self.api_key_environment_variable = "OPENAI_RESPONSES_KEY"
self.underlying_model_environment_variable = "OPENAI_RESPONSES_UNDERLYING_MODEL"

def _get_target_api_paths(self) -> list[str]:
"""Return API paths that should not be in the URL."""
Expand Down
20 changes: 18 additions & 2 deletions pyrit/prompt_target/openai/openai_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ class OpenAITarget(PromptChatTarget):
model_name_environment_variable: str
endpoint_environment_variable: str
api_key_environment_variable: str
underlying_model_environment_variable: str

_async_client: Optional[AsyncOpenAI] = None

Expand All @@ -65,12 +66,13 @@ def __init__(
headers: Optional[str] = None,
max_requests_per_minute: Optional[int] = None,
httpx_client_kwargs: Optional[dict] = None,
underlying_model: Optional[str] = None,
) -> None:
"""
Initialize an instance of OpenAITarget.

Args:
model_name (str, Optional): The name of the model.
model_name (str, Optional): The name of the model (or name of deployment in Azure).
If no value is provided, the environment variable will be used (set by subclass).
endpoint (str, Optional): The target URL for the OpenAI service.
api_key (str | Callable[[], str], Optional): The API key for accessing the OpenAI service,
Expand All @@ -83,6 +85,11 @@ def __init__(
will be capped at the value provided.
httpx_client_kwargs (dict, Optional): Additional kwargs to be passed to the
`httpx.AsyncClient()` constructor.
underlying_model (str, Optional): The underlying model name (e.g., "gpt-4o") used solely for
target identifier purposes. This is useful when the deployment name in Azure differs
from the actual model. If not provided, will attempt to fetch from environment variable.
If it is not there either, the identifier "model_name" attribute will use the model_name.
Defaults to None.
"""
self._headers: dict = {}
self._httpx_client_kwargs = httpx_client_kwargs or {}
Expand All @@ -103,9 +110,18 @@ def __init__(
env_var_name=self.endpoint_environment_variable, passed_value=endpoint
)

# Get underlying_model from passed value or environment variable
underlying_model_value = default_values.get_non_required_value(
env_var_name=self.underlying_model_environment_variable, passed_value=underlying_model
)

# Initialize parent with endpoint and model_name
PromptChatTarget.__init__(
self, max_requests_per_minute=max_requests_per_minute, endpoint=endpoint_value, model_name=self._model_name
self,
max_requests_per_minute=max_requests_per_minute,
endpoint=endpoint_value,
model_name=self._model_name,
underlying_model=underlying_model_value,
)

# API key is required - either from parameter or environment variable
Expand Down
3 changes: 2 additions & 1 deletion pyrit/prompt_target/openai/openai_tts_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def __init__(
Initialize the TTS target with specified parameters.

Args:
model_name (str, Optional): The name of the model.
model_name (str, Optional): The name of the model (or deployment name in Azure).
If no value is provided, the OPENAI_TTS_MODEL environment variable will be used.
endpoint (str, Optional): The target URL for the OpenAI service.
api_key (str | Callable[[], str], Optional): The API key for accessing the OpenAI service,
Expand Down Expand Up @@ -67,6 +67,7 @@ def _set_openai_env_configuration_vars(self):
self.model_name_environment_variable = "OPENAI_TTS_MODEL"
self.endpoint_environment_variable = "OPENAI_TTS_ENDPOINT"
self.api_key_environment_variable = "OPENAI_TTS_KEY"
self.underlying_model_environment_variable = "OPENAI_TTS_UNDERLYING_MODEL"

def _get_target_api_paths(self) -> list[str]:
"""Return API paths that should not be in the URL."""
Expand Down
6 changes: 4 additions & 2 deletions pyrit/prompt_target/openai/openai_video_target.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,9 @@ def __init__(
Initialize the OpenAI Video Target.

Args:
model_name (str, Optional): The video model to use (e.g., "sora-2", "sora-2-pro").
If no value is provided, the OPENAI_VIDEO_MODEL environment variable will be used.
model_name (str, Optional): The video model to use (e.g., "sora-2", "sora-2-pro")
(or deployment name in Azure). If no value is provided, the OPENAI_VIDEO_MODEL
environment variable will be used.
endpoint (str, Optional): The target URL for the OpenAI service.
api_key (str | Callable[[], str], Optional): The API key for accessing the OpenAI service,
or a callable that returns an access token. For Azure endpoints with Entra authentication,
Expand Down Expand Up @@ -80,6 +81,7 @@ def _set_openai_env_configuration_vars(self) -> None:
self.model_name_environment_variable = "OPENAI_VIDEO_MODEL"
self.endpoint_environment_variable = "OPENAI_VIDEO_ENDPOINT"
self.api_key_environment_variable = "OPENAI_VIDEO_KEY"
self.underlying_model_environment_variable = "OPENAI_VIDEO_UNDERLYING_MODEL"

def _get_target_api_paths(self) -> list[str]:
"""Return API paths that should not be in the URL."""
Expand Down
Loading