Skip to content

Commit

Permalink
added e2e tests for NORMAL and LOG_NORMAL
Browse files Browse the repository at this point in the history
Signed-off-by: Shashank Mittal <shashank.mittal.mec22@itbhu.ac.in>

sigma calculation fixed

fix

parse new arguments to mnist.py
  • Loading branch information
shashank-iitbhu committed Sep 10, 2024
1 parent fb60028 commit fddb763
Show file tree
Hide file tree
Showing 3 changed files with 47 additions and 10 deletions.
21 changes: 21 additions & 0 deletions examples/v1beta1/hp-tuning/hyperopt-distribution.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,19 @@ spec:
min: "0.5"
max: "0.9"
distribution: "logUniform"
- name: weight_decay
parameterType: double
feasibleSpace:
min: "0.01"
max: "0.05"
distribution: "normal"
- name: dropout_rate
parameterType: double
feasibleSpace:
min: "0.1"
max: "0.5"
step: "0.001"
distribution: "logNormal"
trialTemplate:
primaryContainerName: training-container
trialParameters:
Expand All @@ -37,6 +50,12 @@ spec:
- name: momentum
description: Momentum for the training model
reference: momentum
- name: weightDecay
description: Weight decay for the training model
reference: weight_decay
- name: dropoutRate
description: Dropout rate for the training model
reference: dropout_rate
trialSpec:
apiVersion: batch/v1
kind: Job
Expand All @@ -53,6 +72,8 @@ spec:
- "--batch-size=16"
- "--lr=${trialParameters.learningRate}"
- "--momentum=${trialParameters.momentum}"
- "--weight-decay=${trialParameters.weightDecay}"
- "--dropout-rate=${trialParameters.dropoutRate}"
resources:
limits:
memory: "1Gi"
Expand Down
14 changes: 14 additions & 0 deletions examples/v1beta1/trial-images/pytorch-mnist/mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,6 +150,20 @@ def main():
metavar="M",
help="SGD momentum (default: 0.5)",
)
parser.add_argument(
"--weight-decay",
type=float,
default=0.01,
metavar="WD",
help="Weight decay for regularization (default: 0.01)",
)
parser.add_argument(
"--dropout-rate",
type=float,
default=0.5,
metavar="DR",
help="Dropout rate for the model (default: 0.5)",
)
parser.add_argument(
"--no-cuda", action="store_true", default=False, help="disables CUDA training"
)
Expand Down
22 changes: 12 additions & 10 deletions pkg/suggestion/v1beta1/hyperopt/base_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,34 +93,36 @@ def create_hyperopt_domain(self):
param.name, float(param.min), float(param.max)
)
elif param.distribution == api_pb2.NORMAL:
sigma = 1
mu = (float(param.min) + float(param.max)) / 2
sigma = (float(param.max) - float(param.min)) / 6
if param.step:
hyperopt_search_space[param.name] = hyperopt.hp.qnormal(
param.name,
float((float(param.min) + float(param.max)) / 2),
float(sigma),
mu,
sigma,
float(param.step),
)
else:
hyperopt_search_space[param.name] = hyperopt.hp.normal(
param.name,
float((float(param.min) + float(param.max)) / 2),
float(sigma),
mu,
sigma,
)
elif param.distribution == api_pb2.LOG_NORMAL:
sigma = 1
mu = (float(param.min) + float(param.max)) / 2
sigma = (float(param.max) - float(param.min)) / 6
if param.step:
hyperopt_search_space[param.name] = hyperopt.hp.qlognormal(
param.name,
float((float(param.min) + float(param.max)) / 2),
float(sigma),
mu,
sigma,
float(param.step),
)
else:
hyperopt_search_space[param.name] = hyperopt.hp.lognormal(
param.name,
float((float(param.min) + float(param.max)) / 2),
float(sigma),
mu,
sigma,
)
elif param.type == CATEGORICAL or param.type == DISCRETE:
hyperopt_search_space[param.name] = hyperopt.hp.choice(
Expand Down

0 comments on commit fddb763

Please sign in to comment.