Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 31 additions & 0 deletions google/genai/tests/tunings/test_tune.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,37 @@
),
exception_if_mldev="is not supported in Gemini API",
),
pytest_helper.TestTableItem(
name="test_tune_simple_dpo",
parameters=genai_types.CreateTuningJobParameters(
base_model="gemini-2.5-flash",
training_dataset=genai_types.TuningDataset(
gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl",
),
config=genai_types.CreateTuningJobConfig(
tuned_model_display_name="Model display name",
epoch_count=1,
method="PREFERENCE_TUNING",
),
),
exception_if_mldev="parameter is not supported in Gemini API.",
),
pytest_helper.TestTableItem(
name="test_tune_dpo_with_beta",
parameters=genai_types.CreateTuningJobParameters(
base_model="gemini-2.5-flash",
training_dataset=genai_types.TuningDataset(
gcs_uri="gs://cloud-samples-data/ai-platform/generative_ai/gemini-1_5/text/sft_train_data.jsonl",
),
config=genai_types.CreateTuningJobConfig(
tuned_model_display_name="Model display name",
epoch_count=1,
method=genai_types.TuningMethod.PREFERENCE_TUNING,
beta=0.5,
),
),
exception_if_mldev="parameter is not supported in Gemini API.",
),
pytest_helper.TestTableItem(
name="test_non_pretuned_model_with_checkpoint_id",
parameters=genai_types.CreateTuningJobParameters(
Expand Down
242 changes: 180 additions & 62 deletions google/genai/tunings.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,9 @@ def _CreateTuningJobConfig_to_mldev(
if getv(from_object, ['labels']) is not None:
raise ValueError('labels parameter is not supported in Gemini API.')

if getv(from_object, ['beta']) is not None:
raise ValueError('beta parameter is not supported in Gemini API.')

return to_object


Expand All @@ -138,14 +141,28 @@ def _CreateTuningJobConfig_to_vertex(
) -> dict[str, Any]:
to_object: dict[str, Any] = {}

if getv(from_object, ['validation_dataset']) is not None:
setv(
parent_object,
['supervisedTuningSpec'],
_TuningValidationDataset_to_vertex(
getv(from_object, ['validation_dataset']), to_object, root_object
),
)
discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['validation_dataset']) is not None:
setv(
parent_object,
['supervisedTuningSpec'],
_TuningValidationDataset_to_vertex(
getv(from_object, ['validation_dataset']), to_object, root_object
),
)

elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['validation_dataset']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec'],
_TuningValidationDataset_to_vertex(
getv(from_object, ['validation_dataset']), to_object, root_object
),
)

if getv(from_object, ['tuned_model_display_name']) is not None:
setv(
Expand All @@ -157,52 +174,125 @@ def _CreateTuningJobConfig_to_vertex(
if getv(from_object, ['description']) is not None:
setv(parent_object, ['description'], getv(from_object, ['description']))

if getv(from_object, ['epoch_count']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'hyperParameters', 'epochCount'],
getv(from_object, ['epoch_count']),
)
discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['epoch_count']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'hyperParameters', 'epochCount'],
getv(from_object, ['epoch_count']),
)

if getv(from_object, ['learning_rate_multiplier']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'hyperParameters', 'learningRateMultiplier'],
getv(from_object, ['learning_rate_multiplier']),
)
elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['epoch_count']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec', 'hyperParameters', 'epochCount'],
getv(from_object, ['epoch_count']),
)

if getv(from_object, ['export_last_checkpoint_only']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'exportLastCheckpointOnly'],
getv(from_object, ['export_last_checkpoint_only']),
)
discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['learning_rate_multiplier']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'hyperParameters', 'learningRateMultiplier'],
getv(from_object, ['learning_rate_multiplier']),
)

if getv(from_object, ['adapter_size']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'hyperParameters', 'adapterSize'],
getv(from_object, ['adapter_size']),
)
elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['learning_rate_multiplier']) is not None:
setv(
parent_object,
[
'preferenceOptimizationSpec',
'hyperParameters',
'learningRateMultiplier',
],
getv(from_object, ['learning_rate_multiplier']),
)

discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['export_last_checkpoint_only']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'exportLastCheckpointOnly'],
getv(from_object, ['export_last_checkpoint_only']),
)

elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['export_last_checkpoint_only']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec', 'exportLastCheckpointOnly'],
getv(from_object, ['export_last_checkpoint_only']),
)

discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['adapter_size']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'hyperParameters', 'adapterSize'],
getv(from_object, ['adapter_size']),
)

elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['adapter_size']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec', 'hyperParameters', 'adapterSize'],
getv(from_object, ['adapter_size']),
)

if getv(from_object, ['batch_size']) is not None:
raise ValueError('batch_size parameter is not supported in Vertex AI.')

if getv(from_object, ['learning_rate']) is not None:
raise ValueError('learning_rate parameter is not supported in Vertex AI.')

if getv(from_object, ['evaluation_config']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'evaluationConfig'],
_EvaluationConfig_to_vertex(
getv(from_object, ['evaluation_config']), to_object, root_object
),
)
discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['evaluation_config']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'evaluationConfig'],
_EvaluationConfig_to_vertex(
getv(from_object, ['evaluation_config']), to_object, root_object
),
)

elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['evaluation_config']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec', 'evaluationConfig'],
_EvaluationConfig_to_vertex(
getv(from_object, ['evaluation_config']), to_object, root_object
),
)

if getv(from_object, ['labels']) is not None:
setv(parent_object, ['labels'], getv(from_object, ['labels']))

if getv(from_object, ['beta']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec', 'hyperParameters', 'beta'],
getv(from_object, ['beta']),
)

return to_object


Expand All @@ -219,12 +309,8 @@ def _CreateTuningJobParametersPrivate_to_mldev(
setv(to_object, ['preTunedModel'], getv(from_object, ['pre_tuned_model']))

if getv(from_object, ['training_dataset']) is not None:
setv(
to_object,
['tuningTask', 'trainingData'],
_TuningDataset_to_mldev(
getv(from_object, ['training_dataset']), to_object, root_object
),
_TuningDataset_to_mldev(
getv(from_object, ['training_dataset']), to_object, root_object
)

if getv(from_object, ['config']) is not None:
Expand Down Expand Up @@ -501,19 +587,44 @@ def _TuningDataset_to_vertex(
root_object: Optional[Union[dict[str, Any], object]] = None,
) -> dict[str, Any]:
to_object: dict[str, Any] = {}
if getv(from_object, ['gcs_uri']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'trainingDatasetUri'],
getv(from_object, ['gcs_uri']),
)

if getv(from_object, ['vertex_dataset_resource']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'trainingDatasetUri'],
getv(from_object, ['vertex_dataset_resource']),
)
discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['gcs_uri']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'trainingDatasetUri'],
getv(from_object, ['gcs_uri']),
)

elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['gcs_uri']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec', 'trainingDatasetUri'],
getv(from_object, ['gcs_uri']),
)

discriminator = getv(root_object, ['config', 'method'])
if discriminator is None:
discriminator = 'SUPERVISED_FINE_TUNING'
if discriminator == 'SUPERVISED_FINE_TUNING':
if getv(from_object, ['vertex_dataset_resource']) is not None:
setv(
parent_object,
['supervisedTuningSpec', 'trainingDatasetUri'],
getv(from_object, ['vertex_dataset_resource']),
)

elif discriminator == 'PREFERENCE_TUNING':
if getv(from_object, ['vertex_dataset_resource']) is not None:
setv(
parent_object,
['preferenceOptimizationSpec', 'trainingDatasetUri'],
getv(from_object, ['vertex_dataset_resource']),
)

if getv(from_object, ['examples']) is not None:
raise ValueError('examples parameter is not supported in Vertex AI.')
Expand Down Expand Up @@ -635,6 +746,13 @@ def _TuningJob_from_vertex(
getv(from_object, ['supervisedTuningSpec']),
)

if getv(from_object, ['preferenceOptimizationSpec']) is not None:
setv(
to_object,
['preference_optimization_spec'],
getv(from_object, ['preferenceOptimizationSpec']),
)

if getv(from_object, ['tuningDataStats']) is not None:
setv(
to_object, ['tuning_data_stats'], getv(from_object, ['tuningDataStats'])
Expand Down Expand Up @@ -950,7 +1068,7 @@ def _tune(
training_dataset: types.TuningDatasetOrDict,
config: Optional[types.CreateTuningJobConfigOrDict] = None,
) -> types.TuningJob:
"""Creates a supervised fine-tuning job and returns the TuningJob object.
"""Creates a tuning job and returns the TuningJob object.

Args:
base_model: The name of the model to tune.
Expand Down Expand Up @@ -1023,7 +1141,7 @@ def _tune_mldev(
training_dataset: types.TuningDatasetOrDict,
config: Optional[types.CreateTuningJobConfigOrDict] = None,
) -> types.TuningOperation:
"""Creates a supervised fine-tuning job and returns the TuningJob object.
"""Creates a tuning job and returns the TuningJob object.

Args:
base_model: The name of the model to tune.
Expand Down Expand Up @@ -1419,7 +1537,7 @@ async def _tune(
training_dataset: types.TuningDatasetOrDict,
config: Optional[types.CreateTuningJobConfigOrDict] = None,
) -> types.TuningJob:
"""Creates a supervised fine-tuning job and returns the TuningJob object.
"""Creates a tuning job and returns the TuningJob object.

Args:
base_model: The name of the model to tune.
Expand Down Expand Up @@ -1492,7 +1610,7 @@ async def _tune_mldev(
training_dataset: types.TuningDatasetOrDict,
config: Optional[types.CreateTuningJobConfigOrDict] = None,
) -> types.TuningOperation:
"""Creates a supervised fine-tuning job and returns the TuningJob object.
"""Creates a tuning job and returns the TuningJob object.

Args:
base_model: The name of the model to tune.
Expand Down
Loading