Skip to content

Commit f6e2258

Browse files
Arsh ZahedArsh Zahed
authored andcommitted
Typos, type error
1 parent c17a405 commit f6e2258

File tree

3 files changed

+15
-15
lines changed

3 files changed

+15
-15
lines changed

src/together/cli/api/finetune.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -87,13 +87,13 @@ def fine_tuning(ctx: click.Context) -> None:
8787
"--num-cycles",
8888
type=float,
8989
default=0.5,
90-
help="Number of cycles for cosine learning rate scheduler.",
90+
help="Number of cycles for the cosine learning rate scheduler.",
9191
)
9292
@click.option(
9393
"--warmup-ratio",
9494
type=float,
9595
default=0.0,
96-
help="Warmup ratio for learning rate scheduler.",
96+
help="Warmup ratio for the learning rate scheduler.",
9797
)
9898
@click.option(
9999
"--max-grad-norm",

src/together/resources/finetune.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -298,8 +298,8 @@ def create(
298298
lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
299299
min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
300300
the learning rate scheduler. Defaults to 0.0.
301-
num_cycles (float, optional): Number of cycles for cosine learning rate scheduler. Defaults to 0.5.
302-
warmup_ratio (float, optional): Warmup ratio for learning rate scheduler.
301+
num_cycles (float, optional): Number of cycles for the cosine learning rate scheduler. Defaults to 0.5.
302+
warmup_ratio (float, optional): Warmup ratio for the learning rate scheduler.
303303
max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
304304
weight_decay (float, optional): Weight decay. Defaults to 0.0.
305305
lora (bool, optional): Whether to use LoRA adapters. Defaults to True.
@@ -677,8 +677,8 @@ async def create(
677677
lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
678678
min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
679679
the learning rate scheduler. Defaults to 0.0.
680-
num_cycles (float, optional): Number of cycles for cosine learning rate scheduler. Defaults to 0.5.
681-
warmup_ratio (float, optional): Warmup ratio for learning rate scheduler.
680+
num_cycles (float, optional): Number of cycles for the cosine learning rate scheduler. Defaults to 0.5.
681+
warmup_ratio (float, optional): Warmup ratio for the learning rate scheduler.
682682
max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
683683
weight_decay (float, optional): Weight decay. Defaults to 0.0.
684684
lora (bool, optional): Whether to use LoRA adapters. Defaults to True.

src/together/types/finetune.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -370,28 +370,28 @@ class FinetuneLRScheduler(BaseModel):
370370

371371
@field_validator("lr_scheduler_type")
372372
@classmethod
373-
def validate_scheduler_type(cls, v: str) -> str:
374-
if v not in LRSchedulerTypeToArgs:
373+
def validate_scheduler_type(cls, scheduler_type: str) -> str:
374+
if scheduler_type not in LRSchedulerTypeToArgs:
375375
raise ValueError(
376376
f"Scheduler type must be one of: {LRSchedulerTypeToArgs.keys()}"
377377
)
378-
return v
378+
return scheduler_type
379379

380380
@field_validator("lr_scheduler_args")
381381
@classmethod
382382
def validate_scheduler_args(
383-
cls, v: FinetuneLRSchedulerArgs, info: ValidationInfo
383+
cls, args: FinetuneLRSchedulerArgs, info: ValidationInfo
384384
) -> FinetuneLRSchedulerArgs:
385385
scheduler_type = str(info.data.get("lr_scheduler_type"))
386386

387-
if v is None:
388-
return v
387+
if args is None:
388+
return args
389389

390390
expected_type = LRSchedulerTypeToArgs[scheduler_type]
391-
if not isinstance(v, expected_type):
392-
raise ValueError(f"Expected {expected_type}, got {type(v)}")
391+
if not isinstance(args, expected_type):
392+
raise TypeError(f"Expected {expected_type}, got {type(args)}")
393393

394-
return v
394+
return args
395395

396396

397397
class FinetuneCheckpoint(BaseModel):

0 commit comments

Comments
 (0)