Skip to content

Commit 8a992e6

Browse files
committed
fix: incorporate smdebug_ruleconfigs pkg until availability in PyPI (#291)
1 parent 3d5e180 commit 8a992e6

13 files changed

+503
-6
lines changed

setup.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -102,4 +102,5 @@ def read_version():
102102
install_requires=required_packages,
103103
extras_require=extras,
104104
entry_points={"console_scripts": ["sagemaker=sagemaker.cli.main:main"]},
105+
include_package_data=True, # TODO-reinvent-2019 [knakad]: Remove after rule_configs is in PyPI
105106
)

src/sagemaker/debugger.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,8 +21,7 @@
2121
"""
2222
from __future__ import absolute_import
2323

24-
# TODO-reinvent-2019 [knakad]: Uncomment this once PyPI integration is complete post-re:Invent-2019
25-
# import smdebug_rulesconfig as rule_configs # noqa: F401 # pylint: disable=unused-import
24+
import smdebug_rulesconfig as rule_configs # noqa: F401 # pylint: disable=unused-import
2625

2726

2827
RULES_ECR_REPO_NAME = "sagemaker-debugger-rules"

src/smdebug_rulesconfig/__init__.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
from __future__ import absolute_import
2+
3+
from .builtin_rules import vanishing_gradient # noqa: F401 # pylint: disable=unused-import
4+
from .builtin_rules import all_zero # noqa: F401 # pylint: disable=unused-import
5+
from .builtin_rules import check_input_images # noqa: F401 # pylint: disable=unused-import
6+
from .builtin_rules import similar_across_runs # noqa: F401 # pylint: disable=unused-import
7+
from .builtin_rules import weight_update_ratio # noqa: F401 # pylint: disable=unused-import
8+
from .builtin_rules import exploding_tensor # noqa: F401 # pylint: disable=unused-import
9+
from .builtin_rules import unchanged_tensor # noqa: F401 # pylint: disable=unused-import
10+
from .builtin_rules import loss_not_decreasing # noqa: F401 # pylint: disable=unused-import
11+
from .builtin_rules import dead_relu # noqa: F401 # pylint: disable=unused-import
12+
from .builtin_rules import confusion # noqa: F401 # pylint: disable=unused-import
13+
from .builtin_rules import class_imbalance # noqa: F401 # pylint: disable=unused-import
14+
from .builtin_rules import overfit # noqa: F401 # pylint: disable=unused-import
15+
from .builtin_rules import tree_depth # noqa: F401 # pylint: disable=unused-import
16+
from .builtin_rules import tensor_variance # noqa: F401 # pylint: disable=unused-import
17+
from .builtin_rules import overtraining # noqa: F401 # pylint: disable=unused-import
18+
from .builtin_rules import poor_weight_initialization # noqa: F401 # pylint: disable=unused-import
19+
from .builtin_rules import saturated_activation # noqa: F401 # pylint: disable=unused-import
20+
from .builtin_rules import nlp_sequence_ratio # noqa: F401 # pylint: disable=unused-import
21+
22+
from ._collections import get_collection # noqa: F401 # pylint: disable=unused-import
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
from __future__ import absolute_import
2+
3+
from ._utils import _get_collection_config
4+
5+
6+
def get_collection(collection_name):
7+
return _get_collection_config(collection_name)

src/smdebug_rulesconfig/_constants.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
from __future__ import absolute_import
2+
3+
RULE_CONFIG_FILE = "rule_config_jsons/ruleConfigs.json"
4+
RULE_GROUPS_CONFIG_FILE = "rule_config_jsons/ruleGroups.json"
5+
COLLECTION_CONFIG_FILE = "rule_config_jsons/collections.json"
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from __future__ import absolute_import
2+
3+
from enum import Enum
4+
from ._utils import _get_config_for_group
5+
6+
7+
class MXNET(Enum):
8+
SIMPLE = ["VanishingGradient", "LossNotDecreasing", "WeightUpdateRatio"]
9+
ALL = []
10+
11+
12+
class TENSORFLOW(Enum):
13+
SIMPLE = ["VanishingGradient", "LossNotDecreasing", "WeightUpdateRatio"]
14+
ALL = []
15+
16+
17+
class PYTORCH(Enum):
18+
SIMPLE = ["VanishingGradient", "LossNotDecreasing", "WeightUpdateRatio"]
19+
ALL = []
20+
21+
22+
class XGBOOST(Enum):
23+
SIMPLE = ["TreeDepth", "ClassImbalance"]
24+
ALL = []
25+
26+
27+
def get_rule_groups(ruleEnum):
28+
ruleEnumVal = ruleEnum.value
29+
rules_config = _get_config_for_group(ruleEnumVal)
30+
return rules_config

src/smdebug_rulesconfig/_utils.py

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
from __future__ import absolute_import
2+
3+
import json
4+
import os
5+
from ._constants import RULE_CONFIG_FILE, RULE_GROUPS_CONFIG_FILE, COLLECTION_CONFIG_FILE
6+
7+
8+
def _get_rule_config(rule_name):
9+
rule_config = None
10+
print(rule_name)
11+
config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/" + RULE_CONFIG_FILE
12+
print(config_file_path)
13+
14+
if os.path.exists(config_file_path):
15+
with open(config_file_path) as json_data:
16+
configs = json.load(json_data)
17+
if rule_name in configs:
18+
rule_config = configs[rule_name]
19+
return rule_config
20+
21+
22+
def _get_rule_list(framework, type):
23+
rules_list = []
24+
25+
config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/" + RULE_GROUPS_CONFIG_FILE
26+
27+
if os.path.exists(config_file_path):
28+
with open(config_file_path) as json_data:
29+
configs = json.load(json_data)
30+
if framework in configs:
31+
if type in configs[framework]:
32+
rules_list = configs[framework][type]
33+
return rules_list
34+
35+
36+
def _get_config_for_group(rules):
37+
rules_config = []
38+
39+
config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/" + RULE_CONFIG_FILE
40+
41+
if os.path.exists(config_file_path):
42+
with open(config_file_path) as json_data:
43+
configs = json.load(json_data)
44+
for rule_name in rules:
45+
if rule_name in configs:
46+
rules_config.append(configs[rule_name])
47+
return rules_config
48+
49+
50+
def _get_collection_config(collection_name):
51+
coll_config = None
52+
53+
config_file_path = os.path.dirname(os.path.abspath(__file__)) + "/" + COLLECTION_CONFIG_FILE
54+
55+
if os.path.exists(config_file_path):
56+
with open(config_file_path) as json_data:
57+
configs = json.load(json_data)
58+
if collection_name in configs:
59+
coll_config = configs[collection_name]
60+
return coll_config

src/smdebug_rulesconfig/_version.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
from __future__ import absolute_import
2+
3+
__version__ = "0.1.2"
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
from __future__ import absolute_import
2+
from ._utils import _get_rule_config
3+
4+
5+
def vanishing_gradient():
6+
rule_config = _get_rule_config("VanishingGradient")
7+
return rule_config
8+
9+
10+
def similar_across_runs():
11+
rule_config = _get_rule_config("SimilarAcrossRuns")
12+
return rule_config
13+
14+
15+
def weight_update_ratio():
16+
rule_config = _get_rule_config("WeightUpdateRatio")
17+
return rule_config
18+
19+
20+
def all_zero():
21+
rule_config = _get_rule_config("AllZero")
22+
return rule_config
23+
24+
25+
def exploding_tensor():
26+
rule_config = _get_rule_config("ExplodingTensor")
27+
return rule_config
28+
29+
30+
def unchanged_tensor():
31+
rule_config = _get_rule_config("UnchangedTensor")
32+
return rule_config
33+
34+
35+
def loss_not_decreasing():
36+
rule_config = _get_rule_config("LossNotDecreasing")
37+
return rule_config
38+
39+
40+
def check_input_images():
41+
rule_config = _get_rule_config("CheckInputImages")
42+
return rule_config
43+
44+
45+
def dead_relu():
46+
rule_config = _get_rule_config("DeadRelu")
47+
return rule_config
48+
49+
50+
def confusion():
51+
rule_config = _get_rule_config("Confusion")
52+
return rule_config
53+
54+
55+
def tree_depth():
56+
rule_config = _get_rule_config("TreeDepth")
57+
return rule_config
58+
59+
60+
def class_imbalance():
61+
rule_config = _get_rule_config("ClassImbalance")
62+
return rule_config
63+
64+
65+
def overfit():
66+
rule_config = _get_rule_config("Overfit")
67+
return rule_config
68+
69+
70+
def tensor_variance():
71+
rule_config = _get_rule_config("TensorVariance")
72+
return rule_config
73+
74+
75+
def overtraining():
76+
rule_config = _get_rule_config("Overtraining")
77+
return rule_config
78+
79+
80+
def poor_weight_initialization():
81+
rule_config = _get_rule_config("PoorWeightInitialization")
82+
return rule_config
83+
84+
85+
def saturated_activation():
86+
rule_config = _get_rule_config("SaturatedActivation")
87+
return rule_config
88+
89+
90+
def nlp_sequence_ratio():
91+
rule_config = _get_rule_config("NLPSequenceRatio")
92+
return rule_config
Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,42 @@
1+
{
2+
"weights": {
3+
"CollectionName": "weights",
4+
"CollectionParameters": {
5+
"save_interval": "500"
6+
}
7+
},
8+
"gradients": {
9+
"CollectionName": "gradients",
10+
"CollectionParameters": {
11+
"save_interval": "500"
12+
}
13+
},
14+
"losses": {
15+
"CollectionName": "losses",
16+
"CollectionParameters": {
17+
"save_interval": "500"
18+
}
19+
},
20+
"input_image": {
21+
"CollectionName": "input_image",
22+
"CollectionParameters": {
23+
"include_regex": ".*hybridsequential0_input_0",
24+
"save_interval" : "500"
25+
}
26+
},
27+
"relu_output": {
28+
"include_regex": ".*relu_output",
29+
"save_interval": "500"
30+
},
31+
"relu_input": {
32+
"include_regex": ".*relu_input",
33+
"save_interval": "500"
34+
},
35+
"input_sequence": {
36+
"CollectionName": "input_sequence",
37+
"CollectionParameters": {
38+
"include_regex": ".*embedding0_input_0",
39+
"save_interval" : "500"
40+
}
41+
}
42+
}

0 commit comments

Comments
 (0)