Skip to content

Commit b911d8d

Browse files
Jingjing TangJingjing Tang
authored andcommitted
revert test cases for msa and hrr
1 parent 003ac0c commit b911d8d

File tree

1 file changed

+34
-34
lines changed

1 file changed

+34
-34
lines changed

jhu/tests/test_geo.py

Lines changed: 34 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -152,30 +152,30 @@ def test_hrr(self):
152152
}
153153
)
154154

155-
df_mega = pd.DataFrame(
156-
{
157-
"fips": ["90013", "90001"],
158-
"timestamp": ["2020-02-15", "2020-02-15"],
159-
"new_counts": [8, 2],
160-
"cumulative_counts": [80, 12],
161-
"population": [np.nan, np.nan],
162-
}
163-
)
155+
# df_mega = pd.DataFrame(
156+
# {
157+
# "fips": ["90013", "90001"],
158+
# "timestamp": ["2020-02-15", "2020-02-15"],
159+
# "new_counts": [8, 2],
160+
# "cumulative_counts": [80, 12],
161+
# "population": [np.nan, np.nan],
162+
# }
163+
# )
164164

165-
df = df.append(df_mega)
165+
# df = df.append(df_mega)
166166

167167
new_df = geo_map(df, "hrr", MAP_DF, 'new_counts')
168168

169169
exp_incidence = np.array([13, 27]) / np.array([25, 2500]) * 100000
170170
exp_cprop = np.array([60, 165]) / np.array([25, 2500]) * 100000
171171

172-
assert (new_df["geo_id"].values == [110, 147]).all()
173-
assert (new_df["timestamp"].values == ["2020-02-15", "2020-02-15"]).all()
174-
assert new_df["new_counts"].values == pytest.approx([13.0, 27.0])
175-
assert new_df["cumulative_counts"].values == pytest.approx([60, 165])
176-
assert new_df["population"].values == pytest.approx([25, 2500])
177-
assert new_df["incidence"].values == pytest.approx(exp_incidence)
178-
assert new_df["cumulative_prop"].values == pytest.approx(exp_cprop)
172+
assert (new_df["geo_id"].values == [110, 147]).all()
173+
assert (new_df["timestamp"].values == ["2020-02-15", "2020-02-15"]).all()
174+
assert new_df["new_counts"].values == pytest.approx([13.0, 27.0])
175+
assert new_df["cumulative_counts"].values == pytest.approx([60, 165])
176+
assert new_df["population"].values == pytest.approx([25, 2500])
177+
assert new_df["incidence"].values == pytest.approx(exp_incidence)
178+
assert new_df["cumulative_prop"].values == pytest.approx(exp_cprop)
179179

180180
def test_msa(self):
181181

@@ -189,27 +189,27 @@ def test_msa(self):
189189
}
190190
)
191191

192-
df_mega = pd.DataFrame(
193-
{
194-
"fips": ["90013", "90001"],
195-
"timestamp": ["2020-02-15", "2020-02-15"],
196-
"new_counts": [8, 2],
197-
"cumulative_counts": [80, 12],
198-
"population": [np.nan, np.nan],
199-
}
200-
)
192+
# df_mega = pd.DataFrame(
193+
# {
194+
# "fips": ["90013", "90001"],
195+
# "timestamp": ["2020-02-15", "2020-02-15"],
196+
# "new_counts": [8, 2],
197+
# "cumulative_counts": [80, 12],
198+
# "population": [np.nan, np.nan],
199+
# }
200+
# )
201201

202-
df = df.append(df_mega)
202+
# df = df.append(df_mega)
203203

204204
new_df = geo_map(df, "msa", MAP_DF, 'new_counts')
205205

206206
exp_incidence = np.array([2, 13]) / np.array([300, 25]) * 100000
207207
exp_cprop = np.array([45, 60]) / np.array([300, 25]) * 100000
208208

209-
assert (new_df["geo_id"].values == [31420, 49340]).all()
210-
assert (new_df["timestamp"].values == ["2020-02-15", "2020-02-15"]).all()
211-
assert new_df["new_counts"].values == pytest.approx([2.0, 13.0])
212-
assert new_df["cumulative_counts"].values == pytest.approx([45, 60])
213-
assert new_df["population"].values == pytest.approx([300, 25])
214-
assert new_df["incidence"].values == pytest.approx(exp_incidence)
215-
assert new_df["cumulative_prop"].values == pytest.approx(exp_cprop)
209+
assert (new_df["geo_id"].values == [31420, 49340]).all()
210+
assert (new_df["timestamp"].values == ["2020-02-15", "2020-02-15"]).all()
211+
assert new_df["new_counts"].values == pytest.approx([2.0, 13.0])
212+
assert new_df["cumulative_counts"].values == pytest.approx([45, 60])
213+
assert new_df["population"].values == pytest.approx([300, 25])
214+
assert new_df["incidence"].values == pytest.approx(exp_incidence)
215+
assert new_df["cumulative_prop"].values == pytest.approx(exp_cprop)

0 commit comments

Comments
 (0)