diff --git a/pandas/formats/style.py b/pandas/formats/style.py index 472fd958d35eb..dee9540dde0f9 100644 --- a/pandas/formats/style.py +++ b/pandas/formats/style.py @@ -155,7 +155,7 @@ def __init__(self, data, precision=None, table_styles=None, uuid=None, def default_display_func(x): if is_float(x): - return '{:>.{precision}g}'.format(x, precision=self.precision) + return '{:>.{precision}f}'.format(x, precision=self.precision) else: return x diff --git a/pandas/tests/formats/test_style.py b/pandas/tests/formats/test_style.py index 9a34f545bd119..a1054834edd1a 100644 --- a/pandas/tests/formats/test_style.py +++ b/pandas/tests/formats/test_style.py @@ -507,37 +507,37 @@ def test_display_subset(self): ctx = df.style.format({"a": "{:0.1f}", "b": "{0:.2%}"}, subset=pd.IndexSlice[0, :])._translate() expected = '0.1' + precise_11 = '1.123400' # with the default precision self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], '1.1234') + self.assertEqual(ctx['body'][1][1]['display_value'], precise_11) self.assertEqual(ctx['body'][0][2]['display_value'], '12.34%') - raw_11 = '1.1234' ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, :])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) + self.assertEqual(ctx['body'][1][1]['display_value'], precise_11) ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, :])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) + self.assertEqual(ctx['body'][1][1]['display_value'], precise_11) ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice['a'])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][0][2]['display_value'], '0.1234') + self.assertEqual(ctx['body'][0][2]['display_value'], '0.123400') ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[0, 'a'])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) - self.assertEqual(ctx['body'][1][1]['display_value'], raw_11) + self.assertEqual(ctx['body'][1][1]['display_value'], precise_11) ctx = df.style.format("{:0.1f}", subset=pd.IndexSlice[[0, 1], ['a']])._translate() self.assertEqual(ctx['body'][0][1]['display_value'], expected) self.assertEqual(ctx['body'][1][1]['display_value'], '1.1') - self.assertEqual(ctx['body'][0][2]['display_value'], '0.1234') - self.assertEqual(ctx['body'][1][2]['display_value'], '1.1234') + self.assertEqual(ctx['body'][0][2]['display_value'], '0.123400') + self.assertEqual(ctx['body'][1][2]['display_value'], '1.123400') def test_display_dict(self): df = pd.DataFrame([[.1234, .1234], [1.1234, 1.1234]],