@@ -301,7 +301,7 @@ def create(
301301 lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
302302 min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
303303 the learning rate scheduler. Defaults to 0.0.
304- num_cycles (float, optional): Number of cycles for the cosine learning rate scheduler. Defaults to 0.5.
304+ num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
305305 warmup_ratio (float, optional): Warmup ratio for the learning rate scheduler.
306306 max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
307307 weight_decay (float, optional): Weight decay. Defaults to 0.0.
@@ -680,7 +680,7 @@ async def create(
680680 lr_scheduler_type (Literal["linear", "cosine"]): Learning rate scheduler type. Defaults to "linear".
681681 min_lr_ratio (float, optional): Min learning rate ratio of the initial learning rate for
682682 the learning rate scheduler. Defaults to 0.0.
683- num_cycles (float, optional): Number of cycles for the cosine learning rate scheduler. Defaults to 0.5.
683+ num_cycles (float, optional): Number or fraction of cycles for the cosine learning rate scheduler. Defaults to 0.5.
684684 warmup_ratio (float, optional): Warmup ratio for the learning rate scheduler.
685685 max_grad_norm (float, optional): Max gradient norm. Defaults to 1.0, set to 0 to disable.
686686 weight_decay (float, optional): Weight decay. Defaults to 0.0.
0 commit comments