Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Moving config from ADS to UI for unverified models #976

Merged
merged 20 commits into from
Oct 29, 2024
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions ads/aqua/extension/finetune_handler.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/

Expand All @@ -9,8 +8,8 @@
from tornado.web import HTTPError

from ads.aqua.common.decorator import handle_exceptions
from ads.aqua.extension.errors import Errors
from ads.aqua.extension.base_handler import AquaAPIhandler
from ads.aqua.extension.errors import Errors
from ads.aqua.extension.utils import validate_function_parameters
from ads.aqua.finetuning import AquaFineTuningApp
from ads.aqua.finetuning.entities import CreateFineTuningDetails
Expand Down
9 changes: 6 additions & 3 deletions ads/aqua/extension/ui_handler.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/

Expand All @@ -10,8 +9,8 @@

from ads.aqua.common.decorator import handle_exceptions
from ads.aqua.common.enums import Tags
from ads.aqua.extension.errors import Errors
from ads.aqua.extension.base_handler import AquaAPIhandler
from ads.aqua.extension.errors import Errors
from ads.aqua.extension.utils import validate_function_parameters
from ads.aqua.model.entities import ImportModelDetails
from ads.aqua.ui import AquaUIApp
Expand Down Expand Up @@ -180,10 +179,14 @@ def get_shape_availability(self, **kwargs):
with the given limit."""
compartment_id = self.get_argument("compartment_id", default=COMPARTMENT_OCID)
instance_shape = self.get_argument("instance_shape")
limit_name = self.get_argument("limit_name")

return self.finish(
AquaUIApp().get_shape_availability(
compartment_id=compartment_id, instance_shape=instance_shape, **kwargs
compartment_id=compartment_id,
instance_shape=instance_shape,
limit_name=limit_name,
**kwargs,
)
)

Expand Down
9 changes: 5 additions & 4 deletions ads/aqua/finetuning/entities.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/
from dataclasses import dataclass, field
Expand All @@ -14,16 +13,18 @@ class AquaFineTuningParams(DataClassSerializable):
epochs: int
learning_rate: Optional[float] = None
sample_packing: Optional[bool] = "auto"
batch_size: Optional[
int
] = None # make it batch_size for user, but internally this is micro_batch_size
batch_size: Optional[int] = (
None # make it batch_size for user, but internally this is micro_batch_size
)
sequence_len: Optional[int] = None
pad_to_sequence_len: Optional[bool] = None
lora_r: Optional[int] = None
lora_alpha: Optional[int] = None
lora_dropout: Optional[float] = None
lora_target_linear: Optional[bool] = None
lora_target_modules: Optional[List] = None
early_stopping_patience: Optional[int] = None
early_stopping_threshold: Optional[float] = None


@dataclass(repr=False)
Expand Down
21 changes: 13 additions & 8 deletions ads/aqua/finetuning/finetuning.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2024 Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/

import json
import os
from dataclasses import asdict, fields, MISSING
from dataclasses import MISSING, asdict, fields
from typing import Dict

from oci.data_science.models import (
Expand All @@ -14,7 +13,7 @@
UpdateModelProvenanceDetails,
)

from ads.aqua import ODSC_MODEL_COMPARTMENT_OCID, logger
from ads.aqua import logger
from ads.aqua.app import AquaApp
from ads.aqua.common.enums import Resource, Tags
from ads.aqua.common.errors import AquaFileExistsError, AquaValueError
Expand All @@ -31,7 +30,6 @@
UNKNOWN,
UNKNOWN_DICT,
)
from ads.aqua.config.config import get_finetuning_config_defaults
from ads.aqua.data import AquaResourceIdentifier
from ads.aqua.finetuning.constants import *
from ads.aqua.finetuning.entities import *
Expand Down Expand Up @@ -132,7 +130,7 @@ def create(
or create_fine_tuning_details.validation_set_size >= 1
):
raise AquaValueError(
f"Fine tuning validation set size should be a float number in between [0, 1)."
"Fine tuning validation set size should be a float number in between [0, 1)."
)

if create_fine_tuning_details.replica < DEFAULT_FT_REPLICA:
Expand Down Expand Up @@ -394,7 +392,7 @@ def create(
)
# track shapes that were used for fine-tune creation
self.telemetry.record_event_async(
category=f"aqua/service/finetune/create/shape/",
category="aqua/service/finetune/create/shape/",
action=f"{create_fine_tuning_details.shape_name}x{create_fine_tuning_details.replica}",
**telemetry_kwargs,
)
Expand Down Expand Up @@ -533,6 +531,12 @@ def _build_oci_launch_cmd(
oci_launch_cmd += f"--num_{key} {value} "
elif key == "lora_target_modules":
oci_launch_cmd += f"--{key} {','.join(str(k) for k in value)} "
elif key == "early_stopping_patience":
if value != 0:
oci_launch_cmd += f"--{key} {value} "
elif key == "early_stopping_threshold":
if "early_stopping_patience" in oci_launch_cmd:
oci_launch_cmd += f"--{key} {value} "
else:
oci_launch_cmd += f"--{key} {value} "

Expand All @@ -558,8 +562,9 @@ def get_finetuning_config(self, model_id: str) -> Dict:

config = self.get_config(model_id, AQUA_MODEL_FINETUNING_CONFIG)
if not config:
logger.info(f"Fetching default fine-tuning config for model: {model_id}")
config = get_finetuning_config_defaults()
logger.info(
f"default fine-tuning config will be used for model: {model_id}"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this info message can be misleading since user might see this in ads logs and expect some default to be returned, but the defaults are applied outside of ads. Maybe a better phrasing would be "Fine-tuning config for custom model {model_id} is not available." We can also use logger.debug instead, user need not see this message every time.
@mrDzurb thoughts?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Updated

)
return config

@telemetry(
Expand Down
Loading