dlk.core.schedulers package

Submodules

dlk.core.schedulers.constant module

class dlk.core.schedulers.constant.ConstantSchedule(optimizer: torch.optim.optimizer.Optimizer, config: dlk.core.schedulers.constant.ConstantScheduleConfig)[source]

Bases: dlk.core.schedulers.BaseScheduler

no schedule

get_scheduler()[source]

return the initialized constant scheduler

Returns

Schedule

class dlk.core.schedulers.constant.ConstantScheduleConfig(config: Dict)[source]

Bases: dlk.utils.config.BaseConfig

Config for ConstantSchedule

Config Example:
>>> {
>>>     "config": {
>>>         "last_epoch": -1
>>>     },
>>>     "_name": "constant",
>>> }

dlk.core.schedulers.constant_warmup module

class dlk.core.schedulers.constant_warmup.ConstantWarmupSchedule(optimizer: torch.optim.optimizer.Optimizer, config: dlk.core.schedulers.constant_warmup.ConstantWarmupScheduleConfig)[source]

Bases: dlk.core.schedulers.BaseScheduler

get_scheduler() torch.optim.lr_scheduler.LambdaLR[source]

return the initialized linear wramup then constant scheduler

Returns

Schedule

class dlk.core.schedulers.constant_warmup.ConstantWarmupScheduleConfig(config: Dict)[source]

Bases: dlk.utils.config.BaseConfig

Config for ConstantWarmupSchedule

Config Example:
>>> {
>>>     "config": {
>>>         "last_epoch": -1,
>>>         "num_warmup_steps": 0,
>>>     },
>>>     "_name": "constant_warmup",
>>> }

dlk.core.schedulers.cosine_warmup module

class dlk.core.schedulers.cosine_warmup.CosineWarmupSchedule(optimizer: torch.optim.optimizer.Optimizer, config: dlk.core.schedulers.cosine_warmup.CosineWarmupScheduleConfig)[source]

Bases: dlk.core.schedulers.BaseScheduler

get_scheduler() torch.optim.lr_scheduler.LambdaLR[source]

return the initialized linear wramup then cos decay scheduler

Returns

Schedule

class dlk.core.schedulers.cosine_warmup.CosineWarmupScheduleConfig(config: Dict)[source]

Bases: dlk.utils.config.BaseConfig

Config for CosineWarmupSchedule

Config Example:
>>> {
>>>     "config": {
>>>         "last_epoch": -1,
>>>         "num_warmup_steps": 0,
>>>         "num_training_steps": -1,
>>>         "num_cycles": 0.5,
>>>     },
>>>     "_name": "cosine_warmup",
>>> }

dlk.core.schedulers.linear_warmup module

class dlk.core.schedulers.linear_warmup.LinearWarmupSchedule(optimizer: torch.optim.optimizer.Optimizer, config: dlk.core.schedulers.linear_warmup.LinearWarmupScheduleConfig)[source]

Bases: dlk.core.schedulers.BaseScheduler

linear warmup then linear decay

get_scheduler() torch.optim.lr_scheduler.LambdaLR[source]

return the initialized linear wramup then linear decay scheduler

Returns

Schedule

class dlk.core.schedulers.linear_warmup.LinearWarmupScheduleConfig(config: Dict)[source]

Bases: dlk.utils.config.BaseConfig

Config Example:
>>> {
>>>     "config": {
>>>         "last_epoch": -1,
>>>         "num_warmup_steps": 0,
>>>         "num_training_steps": -1,
>>>     },
>>>     "_name": "linear_warmup",
>>> }

dlk.core.schedulers.multi_group_schedule module

dlk.core.schedulers.rec_decay module

class dlk.core.schedulers.rec_decay.RecDecaySchedule(optimizer: torch.optim.optimizer.Optimizer, config: dlk.core.schedulers.rec_decay.RecDecayScheduleConfig)[source]

Bases: dlk.core.schedulers.BaseScheduler

lr=lr*1/(1+decay)

get_scheduler()[source]

return the initialized rec_decay scheduler

lr=lr*1/(1+decay)

Returns

Schedule

class dlk.core.schedulers.rec_decay.RecDecayScheduleConfig(config: Dict)[source]

Bases: dlk.utils.config.BaseConfig

Config for RecDecaySchedule

Config Example:
>>> {
>>>     "config": {
>>>         "last_epoch": -1,
>>>         "num_training_steps": -1,
>>>         "decay": 0.05,
>>>         "epoch_training_steps": -1,
>>>     },
>>>     "_name": "rec_decay",
>>> }

the lr=lr*1/(1+decay)

Module contents

schedulers

class dlk.core.schedulers.BaseScheduler[source]

Bases: object

interface for Schedule

get_scheduler() torch.optim.lr_scheduler.LambdaLR[source]

return the initialized scheduler

Returns

Schedule

dlk.core.schedulers.import_schedulers(schedulers_dir, namespace)[source]