From 85ab21be8342264f2b816fffb216ea406d1cbfb8 Mon Sep 17 00:00:00 2001 From: eksperimental Date: Fri, 8 Apr 2016 02:27:47 +0700 Subject: [PATCH] Formatting: add white space after comma Standardizes the use of comma leaving a white space after it whenever applicable. Note: It does not enforce this in quantifiers in regular expressions such as in: `x{1,3}` --- lib/eex/test/eex_test.exs | 2 +- lib/elixir/lib/code.ex | 6 +- lib/elixir/lib/kernel/special_forms.ex | 4 +- lib/elixir/src/elixir_errors.erl | 2 +- lib/elixir/src/elixir_interpolation.erl | 26 +-- lib/elixir/src/elixir_tokenizer.erl | 2 +- lib/elixir/src/elixir_utils.erl | 6 +- lib/elixir/test/elixir/enum_test.exs | 2 +- lib/elixir/test/elixir/file_test.exs | 34 ++-- lib/elixir/test/erlang/string_test.erl | 22 +- lib/elixir/test/erlang/tokenizer_test.erl | 204 +++++++++---------- lib/ex_unit/lib/ex_unit/doc_test.ex | 2 +- lib/ex_unit/test/ex_unit/assertions_test.exs | 2 +- lib/iex/test/iex/helpers_test.exs | 6 +- lib/iex/test/test_helper.exs | 2 +- lib/logger/lib/logger/utils.ex | 12 +- lib/mix/test/mix/rebar_test.exs | 2 +- lib/mix/test/mix/shell_test.exs | 4 +- 18 files changed, 170 insertions(+), 170 deletions(-) diff --git a/lib/eex/test/eex_test.exs b/lib/eex/test/eex_test.exs index d741deefb64..ea955e2cd51 100644 --- a/lib/eex/test/eex_test.exs +++ b/lib/eex/test/eex_test.exs @@ -99,7 +99,7 @@ defmodule EExTest do end test "evaluates with parentheses after end in end token" do - assert_eval " 101 102 103 ", "<%= Enum.map([1,2,3], (fn x -> %> <%= 100 + x %> <% end) ) %>" + assert_eval " 101 102 103 ", "<%= Enum.map([1, 2, 3], (fn x -> %> <%= 100 + x %> <% end) ) %>" end test "evaluates with defined variable" do diff --git a/lib/elixir/lib/code.ex b/lib/elixir/lib/code.ex index cc52fb42978..b318f8354a8 100644 --- a/lib/elixir/lib/code.ex +++ b/lib/elixir/lib/code.ex @@ -311,7 +311,7 @@ defmodule Code do ## Examples - Code.load_file("eex_test.exs","../eex/test") |> List.first + Code.load_file("eex_test.exs", "../eex/test") |> List.first #=> {EExTest.Compiled, <<70, 79, 82, 49, ...>>} """ @@ -343,11 +343,11 @@ defmodule Code do If the code is already loaded, it returns `nil`: - Code.require_file("eex_test.exs","../eex/test") #=> nil + Code.require_file("eex_test.exs", "../eex/test") #=> nil If the code is not loaded yet, it returns the same as `load_file/2`: - Code.require_file("eex_test.exs","../eex/test") |> List.first + Code.require_file("eex_test.exs", "../eex/test") |> List.first #=> {EExTest.Compiled, <<70, 79, 82, 49, ...>>} """ diff --git a/lib/elixir/lib/kernel/special_forms.ex b/lib/elixir/lib/kernel/special_forms.ex index 529780c888f..a4d7cbe4f55 100644 --- a/lib/elixir/lib/kernel/special_forms.ex +++ b/lib/elixir/lib/kernel/special_forms.ex @@ -1074,7 +1074,7 @@ defmodule Kernel.SpecialForms do defmodule Hygiene do defmacrop get_length do quote do - length([1,2,3]) + length([1, 2, 3]) end end @@ -1617,7 +1617,7 @@ defmodule Kernel.SpecialForms do evaluates to a truthy value. cond do - hd([1,2,3]) -> + hd([1, 2, 3]) -> "1 is considered as true" end #=> "1 is considered as true" diff --git a/lib/elixir/src/elixir_errors.erl b/lib/elixir/src/elixir_errors.erl index 2387cabdb12..c3ece3d2657 100644 --- a/lib/elixir/src/elixir_errors.erl +++ b/lib/elixir/src/elixir_errors.erl @@ -66,7 +66,7 @@ parse_error(Line, File, <<"syntax error before: ">>, <<"{sigil,", _Rest/binary>> true -> Content; false -> <<>> end, - Message = <<"syntax error before: sigil ~", Sigil," starting with content '", Content2/binary, "'">>, + Message = <<"syntax error before: sigil ~", Sigil, " starting with content '", Content2/binary, "'">>, do_raise(Line, File, 'Elixir.SyntaxError', Message); %% Aliases are wrapped in [''] diff --git a/lib/elixir/src/elixir_interpolation.erl b/lib/elixir/src/elixir_interpolation.erl index 071309125e7..af4e102a557 100644 --- a/lib/elixir/src/elixir_interpolation.erl +++ b/lib/elixir/src/elixir_interpolation.erl @@ -111,27 +111,27 @@ unescape_hex(<>, Map, Acc) when ?is_hex(A) -> io:format(standard_error, "warning: \\xH inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), append_codepoint(Rest, Map, [A], Acc, 16); -unescape_hex(<<${,A,$}, Rest/binary>>, Map, Acc) when ?is_hex(A) -> +unescape_hex(<<${, A, $}, Rest/binary>>, Map, Acc) when ?is_hex(A) -> io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), append_codepoint(Rest, Map, [A], Acc, 16); -unescape_hex(<<${,A,B,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B) -> +unescape_hex(<<${, A, B, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B) -> io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), append_codepoint(Rest, Map, [A, B], Acc, 16); -unescape_hex(<<${,A,B,C,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> +unescape_hex(<<${, A, B, C, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), append_codepoint(Rest, Map, [A, B, C], Acc, 16); -unescape_hex(<<${,A,B,C,D,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> +unescape_hex(<<${, A, B, C, D, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), append_codepoint(Rest, Map, [A, B, C, D], Acc, 16); -unescape_hex(<<${,A,B,C,D,E,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> +unescape_hex(<<${, A, B, C, D, E, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), append_codepoint(Rest, Map, [A, B, C, D, E], Acc, 16); -unescape_hex(<<${,A,B,C,D,E,F,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> +unescape_hex(<<${, A, B, C, D, E, F, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> io:format(standard_error, "warning: \\x{H*} inside strings/sigils/chars is deprecated, please use \\xHH (byte) or \\uHHHH (codepoint) instead~n", []), append_codepoint(Rest, Map, [A, B, C, D, E, F], Acc, 16); @@ -141,25 +141,25 @@ unescape_hex(<<_/binary>>, _Map, _Acc) -> %% Finish deprecated sequences -unescape_unicode(<>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> +unescape_unicode(<>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> append_codepoint(Rest, Map, [A, B, C, D], Acc, 16); -unescape_unicode(<<${,A,$}, Rest/binary>>, Map, Acc) when ?is_hex(A) -> +unescape_unicode(<<${, A, $}, Rest/binary>>, Map, Acc) when ?is_hex(A) -> append_codepoint(Rest, Map, [A], Acc, 16); -unescape_unicode(<<${,A,B,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B) -> +unescape_unicode(<<${, A, B, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B) -> append_codepoint(Rest, Map, [A, B], Acc, 16); -unescape_unicode(<<${,A,B,C,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> +unescape_unicode(<<${, A, B, C, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C) -> append_codepoint(Rest, Map, [A, B, C], Acc, 16); -unescape_unicode(<<${,A,B,C,D,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> +unescape_unicode(<<${, A, B, C, D, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D) -> append_codepoint(Rest, Map, [A, B, C, D], Acc, 16); -unescape_unicode(<<${,A,B,C,D,E,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> +unescape_unicode(<<${, A, B, C, D, E, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E) -> append_codepoint(Rest, Map, [A, B, C, D, E], Acc, 16); -unescape_unicode(<<${,A,B,C,D,E,F,$}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> +unescape_unicode(<<${, A, B, C, D, E, F, $}, Rest/binary>>, Map, Acc) when ?is_hex(A), ?is_hex(B), ?is_hex(C), ?is_hex(D), ?is_hex(E), ?is_hex(F) -> append_codepoint(Rest, Map, [A, B, C, D, E, F], Acc, 16); unescape_unicode(<<_/binary>>, _Map, _Acc) -> diff --git a/lib/elixir/src/elixir_tokenizer.erl b/lib/elixir/src/elixir_tokenizer.erl index 7d26069b569..dd1752fbfd3 100644 --- a/lib/elixir/src/elixir_tokenizer.erl +++ b/lib/elixir/src/elixir_tokenizer.erl @@ -401,7 +401,7 @@ tokenize([T|Rest], Line, Column, Scope, Tokens) when ?pipe_op(T) -> % Others -tokenize([$%,${|T], Line, Column, Scope, Tokens) -> +tokenize([$%, ${|T], Line, Column, Scope, Tokens) -> tokenize([${|T], Line, Column + 1, Scope, [{'%{}', {Line, Column, Column + 1}}|Tokens]); tokenize([$%|T], Line, Column, Scope, Tokens) -> diff --git a/lib/elixir/src/elixir_utils.erl b/lib/elixir/src/elixir_utils.erl index 9ded5ad2f5a..03a0390a1b4 100644 --- a/lib/elixir/src/elixir_utils.erl +++ b/lib/elixir/src/elixir_utils.erl @@ -30,11 +30,11 @@ get_line(Opts) when is_list(Opts) -> get_ann(Opts) when is_list(Opts) -> get_ann(Opts, [], 0). -get_ann([{generated,Gen}|T], Acc, Line) -> get_ann(T, [{generated,Gen}|Acc], Line); -get_ann([{line,Line}|T], Acc, _) -> get_ann(T, Acc, Line); +get_ann([{generated, Gen}|T], Acc, Line) -> get_ann(T, [{generated, Gen}|Acc], Line); +get_ann([{line, Line}|T], Acc, _) -> get_ann(T, Acc, Line); get_ann([_|T], Acc, Line) -> get_ann(T, Acc, Line); get_ann([], [], Line) -> Line; -get_ann([], Acc, Line) -> [{location,Line}|Acc]. +get_ann([], Acc, Line) -> [{location, Line}|Acc]. split_last([]) -> {[], []}; split_last(List) -> split_last(List, []). diff --git a/lib/elixir/test/elixir/enum_test.exs b/lib/elixir/test/elixir/enum_test.exs index 889ba04d6d7..626585361f5 100644 --- a/lib/elixir/test/elixir/enum_test.exs +++ b/lib/elixir/test/elixir/enum_test.exs @@ -211,7 +211,7 @@ defmodule EnumTest do assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2] assert Enum.into([1, 2, 3], "numbers: ", &to_string/1) == "numbers: 123" assert Enum.into(1..3, []) == [1, 2, 3] - assert Enum.into(["H","i"], "") == "Hi" + assert Enum.into(["H", "i"], "") == "Hi" assert_raise FunctionClauseError, fn -> Enum.into([2, 3], %{}, &(&1)) end diff --git a/lib/elixir/test/elixir/file_test.exs b/lib/elixir/test/elixir/file_test.exs index 31425629c72..9fdeda8b1b3 100644 --- a/lib/elixir/test/elixir/file_test.exs +++ b/lib/elixir/test/elixir/file_test.exs @@ -25,22 +25,22 @@ defmodule FileTest do # # Renaming files # :ok -> rename file to existing file default behaviour - # {:error,:eisdir} -> rename file to existing empty dir - # {:error,:eisdir} -> rename file to existing non empty dir + # {:error, :eisdir} -> rename file to existing empty dir + # {:error, :eisdir} -> rename file to existing non empty dir # :ok -> rename file to non existing location - # {:error,:eexist} -> rename file to existing file + # {:error, :eexist} -> rename file to existing file # :ok -> rename file to itself # Renaming dirs - # {:error,:enotdir} -> rename dir to existing file - # :ok -> rename dir to non existing leaf location - # {:error,??} -> rename dir to non existing parent location - # :ok -> rename dir to itself - # :ok -> rename dir to existing empty dir default behaviour - # {:error,:eexist} -> rename dir to existing empty dir - # {:error, :einval} -> rename parent dir to existing sub dir - # {:error, :einval} -> rename parent dir to non existing sub dir - # {:error,:eexist} -> rename dir to existing non empty dir + # {:error, :enotdir} -> rename dir to existing file + # :ok -> rename dir to non existing leaf location + # {:error, ??} -> rename dir to non existing parent location + # :ok -> rename dir to itself + # :ok -> rename dir to existing empty dir default behaviour + # {:error, :eexist} -> rename dir to existing empty dir + # {:error, :einval} -> rename parent dir to existing sub dir + # {:error, :einval} -> rename parent dir to non existing sub dir + # {:error, :eexist} -> rename dir to existing non empty dir # other tests # {:error, :enoent} -> rename unknown source @@ -70,7 +70,7 @@ defmodule FileTest do try do File.mkdir(dest) - assert File.rename(src, dest) == {:error,:eisdir} + assert File.rename(src, dest) == {:error, :eisdir} assert File.exists?(src) refute File.exists?(tmp_path("tmp/file.txt")) after @@ -84,8 +84,8 @@ defmodule FileTest do dest = tmp_path("tmp") try do - File.mkdir_p(Path.join(dest,"a")) - assert File.rename(src, dest) == {:error,:eisdir} + File.mkdir_p(Path.join(dest, "a")) + assert File.rename(src, dest) == {:error, :eisdir} assert File.exists?(src) refute File.exists?(Path.join(dest, "file.txt")) after @@ -293,7 +293,7 @@ defmodule FileTest do assert File.exists?(src) refute File.exists?(tmp_path("tmp/a")) - assert File.rename(src, dest) == {:error,:eexist} + assert File.rename(src, dest) == {:error, :eexist} assert File.exists?(tmp_path("tmp/x")) assert File.exists?(src) @@ -333,7 +333,7 @@ defmodule FileTest do def tmp_fixture_path(extra) do src = fixture_path(extra) dest = tmp_path(extra) - File.cp_r(src,dest) + File.cp_r(src, dest) dest end end diff --git a/lib/elixir/test/erlang/string_test.erl b/lib/elixir/test/erlang/string_test.erl index 20d1d2618a5..e6d7524b734 100644 --- a/lib/elixir/test/erlang/string_test.erl +++ b/lib/elixir/test/erlang/string_test.erl @@ -28,51 +28,51 @@ extract_interpolations_with_escaped_interpolation_test() -> extract_interpolations_with_interpolation_test() -> [<<"f">>, - {{1,2,7}, [{atom, {1,4,6}, o}]}, + {{1, 2, 7}, [{atom, {1, 4, 6}, o}]}, <<"o">>] = extract_interpolations("f#{:o}o"). extract_interpolations_with_two_interpolations_test() -> [<<"f">>, - {{1,2,7}, [{atom, {1,4,6}, o}]}, {{1,7,12}, [{atom, {1,9,11}, o}]}, + {{1, 2, 7}, [{atom, {1, 4, 6}, o}]}, {{1, 7, 12}, [{atom, {1, 9, 11}, o}]}, <<"o">>] = extract_interpolations("f#{:o}#{:o}o"). extract_interpolations_with_only_two_interpolations_test() -> - [{{1,1,6}, [{atom, {1,3,5}, o}]}, - {{1,6,11}, [{atom, {1,8,10}, o}]}] = extract_interpolations("#{:o}#{:o}"). + [{{1, 1, 6}, [{atom, {1, 3, 5}, o}]}, + {{1, 6, 11}, [{atom, {1, 8, 10}, o}]}] = extract_interpolations("#{:o}#{:o}"). extract_interpolations_with_tuple_inside_interpolation_test() -> [<<"f">>, - {{1,2,8}, [{'{', {1,4,5}}, {number, {1,5,6}, 1}, {'}', {1,6,7}}]}, + {{1, 2, 8}, [{'{', {1, 4, 5}}, {number, {1, 5, 6}, 1}, {'}', {1, 6, 7}}]}, <<"o">>] = extract_interpolations("f#{{1}}o"). extract_interpolations_with_many_expressions_inside_interpolation_test() -> [<<"f">>, - {{1,2,3}, [{number, {1,4,5}, 1}, {eol, {1,5,6}}, {number, {2,1,2}, 2}]}, + {{1, 2, 3}, [{number, {1, 4, 5}, 1}, {eol, {1, 5, 6}}, {number, {2, 1, 2}, 2}]}, <<"o">>] = extract_interpolations("f#{1\n2}o"). extract_interpolations_with_right_curly_inside_string_inside_interpolation_test() -> [<<"f">>, - {{1,2,10}, [{bin_string, {1,4,9}, [<<"f}o">>]}]}, + {{1, 2, 10}, [{bin_string, {1, 4, 9}, [<<"f}o">>]}]}, <<"o">>] = extract_interpolations("f#{\"f}o\"}o"). extract_interpolations_with_left_curly_inside_string_inside_interpolation_test() -> [<<"f">>, - {{1,2,10}, [{bin_string, {1,4,9}, [<<"f{o">>]}]}, + {{1, 2, 10}, [{bin_string, {1, 4, 9}, [<<"f{o">>]}]}, <<"o">>] = extract_interpolations("f#{\"f{o\"}o"). extract_interpolations_with_escaped_quote_inside_string_inside_interpolation_test() -> [<<"f">>, - {{1,2,11}, [{bin_string, {1,4,10}, [<<"f\"o">>]}]}, + {{1, 2, 11}, [{bin_string, {1, 4, 10}, [<<"f\"o">>]}]}, <<"o">>] = extract_interpolations("f#{\"f\\\"o\"}o"). extract_interpolations_with_less_than_operation_inside_interpolation_test() -> [<<"f">>, - {{1,2,8}, [{number, {1,4,5}, 1}, {rel_op, {1,5,6}, '<'}, {number, {1,6,7}, 2}]}, + {{1, 2, 8}, [{number, {1, 4, 5}, 1}, {rel_op, {1, 5, 6}, '<'}, {number, {1, 6, 7}, 2}]}, <<"o">>] = extract_interpolations("f#{1<2}o"). extract_interpolations_with_an_escaped_character_test() -> [<<"f">>, - {{1,2,17}, [{char, {1,4,7}, 7}, {rel_op, {1,8,9}, '>'}, {char, {1,10,13}, 7}]} + {{1, 2, 17}, [{char, {1, 4, 7}, 7}, {rel_op, {1, 8, 9}, '>'}, {char, {1, 10, 13}, 7}]} ] = extract_interpolations("f#{?\\a > ?\\a }"). extract_interpolations_with_invalid_expression_inside_interpolation_test() -> diff --git a/lib/elixir/test/erlang/tokenizer_test.erl b/lib/elixir/test/erlang/tokenizer_test.erl index 5db824bdde1..a28848690e7 100644 --- a/lib/elixir/test/erlang/tokenizer_test.erl +++ b/lib/elixir/test/erlang/tokenizer_test.erl @@ -10,162 +10,162 @@ tokenize_error(String) -> Error. type_test() -> - [{number, {1,1,2}, 1}, {type_op, {1,3,5}, '::'}, {number, {1,6,7}, 3}] = tokenize("1 :: 3"), - [{identifier, {1,1,5}, name}, - {'.', {1,5,6}}, - {paren_identifier, {1,6,8}, '::'}, - {'(', {1,8,9}}, - {number, {1,9,10}, 3}, - {')', {1,10,11}}] = tokenize("name.::(3)"). + [{number, {1, 1, 2}, 1}, {type_op, {1, 3, 5}, '::'}, {number, {1, 6, 7}, 3}] = tokenize("1 :: 3"), + [{identifier, {1, 1, 5}, name}, + {'.', {1, 5, 6}}, + {paren_identifier, {1, 6, 8}, '::'}, + {'(', {1, 8, 9}}, + {number, {1, 9, 10}, 3}, + {')', {1, 10, 11}}] = tokenize("name.::(3)"). arithmetic_test() -> - [{number, {1,1,2}, 1}, {dual_op, {1,3,4}, '+'}, {number, {1,5,6}, 2}, {dual_op, {1,7,8}, '+'}, {number, {1,9,10}, 3}] = tokenize("1 + 2 + 3"). + [{number, {1, 1, 2}, 1}, {dual_op, {1, 3, 4}, '+'}, {number, {1, 5, 6}, 2}, {dual_op, {1, 7, 8}, '+'}, {number, {1, 9, 10}, 3}] = tokenize("1 + 2 + 3"). op_kw_test() -> - [{atom, {1,1,5}, foo}, {dual_op, {1,5,6}, '+'}, {atom, {1,6,10}, bar}] = tokenize(":foo+:bar"). + [{atom, {1, 1, 5}, foo}, {dual_op, {1, 5, 6}, '+'}, {atom, {1, 6, 10}, bar}] = tokenize(":foo+:bar"). scientific_test() -> - [{number, {1,1,7}, 0.1}] = tokenize("1.0e-1"). + [{number, {1, 1, 7}, 0.1}] = tokenize("1.0e-1"). hex_bin_octal_test() -> - [{number, {1,1,5}, 255}] = tokenize("0xFF"), - [{number, {1,1,5}, 255}] = tokenize("0xF_F"), - [{number, {1,1,5}, 63}] = tokenize("0o77"), - [{number, {1,1,5}, 63}] = tokenize("0o7_7"), - [{number, {1,1,5}, 3}] = tokenize("0b11"), - [{number, {1,1,5}, 3}] = tokenize("0b1_1"). + [{number, {1, 1, 5}, 255}] = tokenize("0xFF"), + [{number, {1, 1, 5}, 255}] = tokenize("0xF_F"), + [{number, {1, 1, 5}, 63}] = tokenize("0o77"), + [{number, {1, 1, 5}, 63}] = tokenize("0o7_7"), + [{number, {1, 1, 5}, 3}] = tokenize("0b11"), + [{number, {1, 1, 5}, 3}] = tokenize("0b1_1"). unquoted_atom_test() -> - [{atom, {1,1,3}, '+'}] = tokenize(":+"), - [{atom, {1,1,3}, '-'}] = tokenize(":-"), - [{atom, {1,1,3}, '*'}] = tokenize(":*"), - [{atom, {1,1,3}, '/'}] = tokenize(":/"), - [{atom, {1,1,3}, '='}] = tokenize(":="), - [{atom, {1,1,4}, '&&'}] = tokenize(":&&"). + [{atom, {1, 1, 3}, '+'}] = tokenize(":+"), + [{atom, {1, 1, 3}, '-'}] = tokenize(":-"), + [{atom, {1, 1, 3}, '*'}] = tokenize(":*"), + [{atom, {1, 1, 3}, '/'}] = tokenize(":/"), + [{atom, {1, 1, 3}, '='}] = tokenize(":="), + [{atom, {1, 1, 4}, '&&'}] = tokenize(":&&"). quoted_atom_test() -> - [{atom_unsafe, {1,1,11}, [<<"foo bar">>]}] = tokenize(":\"foo bar\""). + [{atom_unsafe, {1, 1, 11}, [<<"foo bar">>]}] = tokenize(":\"foo bar\""). oversized_atom_test() -> OversizedAtom = [$:|string:copies("a", 256)], {1, "atom length must be less than system limit", ":"} = tokenize_error(OversizedAtom). op_atom_test() -> - [{atom, {1,1,6}, f0_1}] = tokenize(":f0_1"). + [{atom, {1, 1, 6}, f0_1}] = tokenize(":f0_1"). kw_test() -> - [{kw_identifier, {1,1,4}, do}] = tokenize("do: "), - [{kw_identifier, {1,1,4}, a@}] = tokenize("a@: "), - [{kw_identifier, {1,1,4}, 'A@'}] = tokenize("A@: "), - [{kw_identifier, {1,1,5}, a@b}] = tokenize("a@b: "), - [{kw_identifier, {1,1,5}, 'A@!'}] = tokenize("A@!: "), - [{kw_identifier, {1,1,5}, 'a@!'}] = tokenize("a@!: "), - [{kw_identifier_unsafe, {1,1,10}, [<<"foo bar">>]}] = tokenize("\"foo bar\": "). + [{kw_identifier, {1, 1, 4}, do}] = tokenize("do: "), + [{kw_identifier, {1, 1, 4}, a@}] = tokenize("a@: "), + [{kw_identifier, {1, 1, 4}, 'A@'}] = tokenize("A@: "), + [{kw_identifier, {1, 1, 5}, a@b}] = tokenize("a@b: "), + [{kw_identifier, {1, 1, 5}, 'A@!'}] = tokenize("A@!: "), + [{kw_identifier, {1, 1, 5}, 'a@!'}] = tokenize("a@!: "), + [{kw_identifier_unsafe, {1, 1, 10}, [<<"foo bar">>]}] = tokenize("\"foo bar\": "). integer_test() -> - [{number, {1,1,4}, 123}] = tokenize("123"), - [{number, {1,1,4}, 123}, {';', {1,4,5}}] = tokenize("123;"), - [{eol, {1,1,2}}, {number, {3,1,4}, 123}] = tokenize("\n\n123"), - [{number, {1,3,6}, 123}, {number, {1,8,11}, 234}] = tokenize(" 123 234 "). + [{number, {1, 1, 4}, 123}] = tokenize("123"), + [{number, {1, 1, 4}, 123}, {';', {1, 4, 5}}] = tokenize("123;"), + [{eol, {1, 1, 2}}, {number, {3, 1, 4}, 123}] = tokenize("\n\n123"), + [{number, {1, 3, 6}, 123}, {number, {1, 8, 11}, 234}] = tokenize(" 123 234 "). float_test() -> - [{number, {1,1,5}, 12.3}] = tokenize("12.3"), - [{number, {1,1,5}, 12.3},{';', {1,5,6}}] = tokenize("12.3;"), - [{eol, {1,1,2}}, {number, {3,1,5}, 12.3}] = tokenize("\n\n12.3"), - [{number, {1,3,7}, 12.3}, {number, {1,9,13}, 23.4}] = tokenize(" 12.3 23.4 "). + [{number, {1, 1, 5}, 12.3}] = tokenize("12.3"), + [{number, {1, 1, 5}, 12.3}, {';', {1, 5, 6}}] = tokenize("12.3;"), + [{eol, {1, 1, 2}}, {number, {3, 1, 5}, 12.3}] = tokenize("\n\n12.3"), + [{number, {1, 3, 7}, 12.3}, {number, {1, 9, 13}, 23.4}] = tokenize(" 12.3 23.4 "). comments_test() -> - [{number, {1,1,2}, 1},{eol, {1,3,4}},{number,{2,1,2},2}] = tokenize("1 # Comment\n2"). + [{number, {1, 1, 2}, 1}, {eol, {1, 3, 4}}, {number, {2, 1, 2}, 2}] = tokenize("1 # Comment\n2"). identifier_test() -> - [{identifier,{1,1,4},abc}] = tokenize("abc "), - [{identifier,{1,1,5},'abc?'}] = tokenize("abc?"), - [{identifier,{1,1,5},'abc!'}] = tokenize("abc!"), - [{identifier,{1,1,5},'a0c!'}] = tokenize("a0c!"), - [{paren_identifier,{1,1,4},'a0c'},{'(',{1,4,5}},{')',{1,5,6}}] = tokenize("a0c()"), - [{paren_identifier,{1,1,5},'a0c!'},{'(',{1,5,6}},{')',{1,6,7}}] = tokenize("a0c!()"). + [{identifier, {1, 1, 4}, abc}] = tokenize("abc "), + [{identifier, {1, 1, 5}, 'abc?'}] = tokenize("abc?"), + [{identifier, {1, 1, 5}, 'abc!'}] = tokenize("abc!"), + [{identifier, {1, 1, 5}, 'a0c!'}] = tokenize("a0c!"), + [{paren_identifier, {1, 1, 4}, 'a0c'}, {'(', {1, 4, 5}}, {')', {1, 5, 6}}] = tokenize("a0c()"), + [{paren_identifier, {1, 1, 5}, 'a0c!'}, {'(', {1, 5, 6}}, {')', {1, 6, 7}}] = tokenize("a0c!()"). module_macro_test() -> - [{identifier, {1,1,11}, '__MODULE__'}] = tokenize("__MODULE__"). + [{identifier, {1, 1, 11}, '__MODULE__'}] = tokenize("__MODULE__"). triple_dot_test() -> - [{identifier, {1,1,4}, '...'}] = tokenize("..."), - [{'.', {1,1,2}}, {identifier, {1,3,5}, '..'}] = tokenize(". .."). + [{identifier, {1, 1, 4}, '...'}] = tokenize("..."), + [{'.', {1, 1, 2}}, {identifier, {1, 3, 5}, '..'}] = tokenize(". .."). dot_test() -> - [{identifier, {1,1,4}, foo}, - {'.', {1,4,5}}, - {identifier, {1,5,8}, bar}, - {'.', {1,8,9}}, - {identifier, {1,9,12}, baz}] = tokenize("foo.bar.baz"). + [{identifier, {1, 1, 4}, foo}, + {'.', {1, 4, 5}}, + {identifier, {1, 5, 8}, bar}, + {'.', {1, 8, 9}}, + {identifier, {1, 9, 12}, baz}] = tokenize("foo.bar.baz"). dot_keyword_test() -> - [{identifier, {1,1,4}, foo}, - {'.', {1,4,5}}, - {identifier, {1,5,7}, do}] = tokenize("foo.do"). + [{identifier, {1, 1, 4}, foo}, + {'.', {1, 4, 5}}, + {identifier, {1, 5, 7}, do}] = tokenize("foo.do"). newline_test() -> - [{identifier, {1,1,4}, foo}, - {'.', {2,1,2}}, - {identifier, {2,2,5}, bar}] = tokenize("foo\n.bar"), - [{number, {1,1,2}, 1}, - {two_op, {2,1,3}, '++'}, - {number, {2,3,4}, 2}] = tokenize("1\n++2"). + [{identifier, {1, 1, 4}, foo}, + {'.', {2, 1, 2}}, + {identifier, {2, 2, 5}, bar}] = tokenize("foo\n.bar"), + [{number, {1, 1, 2}, 1}, + {two_op, {2, 1, 3}, '++'}, + {number, {2, 3, 4}, 2}] = tokenize("1\n++2"). dot_newline_operator_test() -> - [{identifier,{1,1,4},foo}, - {'.',{2,4,5}}, - {identifier,{2,1,2},'+'}, - {number,{2,2,3},1}] = tokenize("foo.\n+1"), - [{identifier,{1,1,4},foo}, - {'.',{2,4,5}}, - {identifier,{2,1,2},'+'}, - {number,{2,2,3},1}] = tokenize("foo.#bar\n+1"). + [{identifier, {1, 1, 4}, foo}, + {'.', {2, 4, 5}}, + {identifier, {2, 1, 2}, '+'}, + {number, {2, 2, 3}, 1}] = tokenize("foo.\n+1"), + [{identifier, {1, 1, 4}, foo}, + {'.', {2, 4, 5}}, + {identifier, {2, 1, 2}, '+'}, + {number, {2, 2, 3}, 1}] = tokenize("foo.#bar\n+1"). aliases_test() -> - [{'aliases', {1,1,4}, ['Foo']}] = tokenize("Foo"), - [{'aliases', {1,1,4}, ['Foo']}, - {'.', {1,4,5}}, - {'aliases', {1,5,8}, ['Bar']}, - {'.', {1,8,9}}, - {'aliases', {1,9,12}, ['Baz']}] = tokenize("Foo.Bar.Baz"). + [{'aliases', {1, 1, 4}, ['Foo']}] = tokenize("Foo"), + [{'aliases', {1, 1, 4}, ['Foo']}, + {'.', {1, 4, 5}}, + {'aliases', {1, 5, 8}, ['Bar']}, + {'.', {1, 8, 9}}, + {'aliases', {1, 9, 12}, ['Baz']}] = tokenize("Foo.Bar.Baz"). string_test() -> - [{bin_string, {1,1,6}, [<<"foo">>]}] = tokenize("\"foo\""), - [{bin_string, {1,1,6}, [<<"f\"">>]}] = tokenize("\"f\\\"\""), - [{list_string, {1,1,6}, [<<"foo">>]}] = tokenize("'foo'"). + [{bin_string, {1, 1, 6}, [<<"foo">>]}] = tokenize("\"foo\""), + [{bin_string, {1, 1, 6}, [<<"f\"">>]}] = tokenize("\"f\\\"\""), + [{list_string, {1, 1, 6}, [<<"foo">>]}] = tokenize("'foo'"). empty_string_test() -> - [{bin_string, {1,1,3}, [<<>>]}] = tokenize("\"\""), - [{list_string, {1,1,3}, [<<>>]}] = tokenize("''"). + [{bin_string, {1, 1, 3}, [<<>>]}] = tokenize("\"\""), + [{list_string, {1, 1, 3}, [<<>>]}] = tokenize("''"). addadd_test() -> - [{identifier, {1,1,2}, x}, {two_op, {1,3,5}, '++'}, {identifier, {1,6,7}, y}] = tokenize("x ++ y"). + [{identifier, {1, 1, 2}, x}, {two_op, {1, 3, 5}, '++'}, {identifier, {1, 6, 7}, y}] = tokenize("x ++ y"). space_test() -> - [{op_identifier, {1,1,4}, foo}, {dual_op, {1,5,6}, '-'}, {number, {1,6,7}, 2}] = tokenize("foo -2"), - [{op_identifier, {1,1,4}, foo}, {dual_op, {1,6,7}, '-'}, {number, {1,7,8}, 2}] = tokenize("foo -2"). + [{op_identifier, {1, 1, 4}, foo}, {dual_op, {1, 5, 6}, '-'}, {number, {1, 6, 7}, 2}] = tokenize("foo -2"), + [{op_identifier, {1, 1, 4}, foo}, {dual_op, {1, 6, 7}, '-'}, {number, {1, 7, 8}, 2}] = tokenize("foo -2"). chars_test() -> - [{char, {1,1,3}, 97}] = tokenize("?a"), - [{char, {1,1,3}, 99}] = tokenize("?c"), - [{char, {1,1,4}, 0}] = tokenize("?\\0"), - [{char, {1,1,4}, 7}] = tokenize("?\\a"), - [{char, {1,1,4}, 10}] = tokenize("?\\n"), - [{char, {1,1,4}, 92}] = tokenize("?\\\\"). + [{char, {1, 1, 3}, 97}] = tokenize("?a"), + [{char, {1, 1, 3}, 99}] = tokenize("?c"), + [{char, {1, 1, 4}, 0}] = tokenize("?\\0"), + [{char, {1, 1, 4}, 7}] = tokenize("?\\a"), + [{char, {1, 1, 4}, 10}] = tokenize("?\\n"), + [{char, {1, 1, 4}, 92}] = tokenize("?\\\\"). interpolation_test() -> - [{bin_string, {1,1,9}, [<<"f">>, - {{1,3,8}, [{identifier, {1,5,7}, oo}]}]}, - {two_op, {1,10,12}, '<>'}, {bin_string, {1,13,15}, + [{bin_string, {1, 1, 9}, [<<"f">>, + {{1, 3, 8}, [{identifier, {1, 5, 7}, oo}]}]}, + {two_op, {1, 10, 12}, '<>'}, {bin_string, {1, 13, 15}, [<<>>]}] = tokenize("\"f#{oo}\" <> \"\""). capture_test() -> - [{capture_op, {1,1,2}, '&'}, - {identifier, {1,2,4}, '||'}, - {mult_op, {1,4,5}, '/'}, - {number, {1,5,6}, 2}] = tokenize("&||/2"), - [{capture_op, {1,1,2}, '&'}, - {identifier, {1,2,4}, 'or'}, - {mult_op, {1,4,5}, '/'}, - {number, {1,5,6}, 2}] = tokenize("&or/2"). + [{capture_op, {1, 1, 2}, '&'}, + {identifier, {1, 2, 4}, '||'}, + {mult_op, {1, 4, 5}, '/'}, + {number, {1, 5, 6}, 2}] = tokenize("&||/2"), + [{capture_op, {1, 1, 2}, '&'}, + {identifier, {1, 2, 4}, 'or'}, + {mult_op, {1, 4, 5}, '/'}, + {number, {1, 5, 6}, 2}] = tokenize("&or/2"). diff --git a/lib/ex_unit/lib/ex_unit/doc_test.ex b/lib/ex_unit/lib/ex_unit/doc_test.ex index 8ac6d70923c..8c68c65e7de 100644 --- a/lib/ex_unit/lib/ex_unit/doc_test.ex +++ b/lib/ex_unit/lib/ex_unit/doc_test.ex @@ -553,7 +553,7 @@ defmodule ExUnit.DocTest do end # Encountered an empty line, store pending test - defp extract_tests([{"",_}|lines], expr_acc, expected_acc, [test=%{exprs: exprs}|t], _) do + defp extract_tests([{"", _}|lines], expr_acc, expected_acc, [test=%{exprs: exprs}|t], _) do test = %{test | exprs: [{expr_acc, {:test, expected_acc}} | exprs]} extract_tests(lines, "", "", [test|t], true) end diff --git a/lib/ex_unit/test/ex_unit/assertions_test.exs b/lib/ex_unit/test/ex_unit/assertions_test.exs index 53d03da58bb..51b4663899f 100644 --- a/lib/ex_unit/test/ex_unit/assertions_test.exs +++ b/lib/ex_unit/test/ex_unit/assertions_test.exs @@ -427,7 +427,7 @@ defmodule ExUnit.AssertionsTest do rescue ExUnit.AssertionError -> stacktrace = System.stacktrace - [{Not.Defined, :function, [1,2,3], _}|_] = stacktrace + [{Not.Defined, :function, [1, 2, 3], _}|_] = stacktrace end test "assert raise with erlang error" do diff --git a/lib/iex/test/iex/helpers_test.exs b/lib/iex/test/iex/helpers_test.exs index 257180bbffb..b5d01512a4f 100644 --- a/lib/iex/test/iex/helpers_test.exs +++ b/lib/iex/test/iex/helpers_test.exs @@ -436,10 +436,10 @@ defmodule IEx.HelpersTest do end test "pid/3 helper" do - assert "#PID<0.32767.3276>" == capture_iex("pid(0,32767,3276)") - assert "#PID<0.5.6>" == capture_iex("pid(0,5,6)") + assert "#PID<0.32767.3276>" == capture_iex("pid(0, 32767, 3276)") + assert "#PID<0.5.6>" == capture_iex("pid(0, 5, 6)") assert "** (FunctionClauseError) no function clause matching in IEx.Helpers.pid/3" <> _ = - capture_iex("pid(0,6,-6)") + capture_iex("pid(0, 6, -6)") end test "i helper" do diff --git a/lib/iex/test/test_helper.exs b/lib/iex/test/test_helper.exs index 8e62791890f..84df2e65215 100644 --- a/lib/iex/test/test_helper.exs +++ b/lib/iex/test/test_helper.exs @@ -38,7 +38,7 @@ defmodule IEx.Case do setup do on_exit fn -> env = @iex_env - Enum.each(env, fn {k,_} -> Application.delete_env(:iex, k) end) + Enum.each(env, fn {k, _} -> Application.delete_env(:iex, k) end) IEx.configure(env) end :ok diff --git a/lib/logger/lib/logger/utils.ex b/lib/logger/lib/logger/utils.ex index 42ae9727662..29b826b3751 100644 --- a/lib/logger/lib/logger/utils.ex +++ b/lib/logger/lib/logger/utils.ex @@ -141,11 +141,11 @@ defmodule Logger.Utils do ## pad char - defp collect_cc(:pad_char, [?.,?*|t], [arg|args], used_format, used_args, opts), - do: collect_cc(:encoding, t, args, [?*,?.|used_format], [arg|used_args], opts) + defp collect_cc(:pad_char, [?., ?*|t], [arg|args], used_format, used_args, opts), + do: collect_cc(:encoding, t, args, [?*, ?.|used_format], [arg|used_args], opts) - defp collect_cc(:pad_char, [?.,p|t], args, used_format, used_args, opts), - do: collect_cc(:encoding, t, args, [p,?.|used_format], used_args, opts) + defp collect_cc(:pad_char, [?., p|t], args, used_format, used_args, opts), + do: collect_cc(:encoding, t, args, [p, ?.|used_format], used_args, opts) defp collect_cc(:pad_char, t, args, used_format, used_args, opts), do: collect_cc(:encoding, t, args, used_format, used_args, opts) @@ -180,8 +180,8 @@ defmodule Logger.Utils do {t, args, [h|used_format], used_args} end - defp collect_cc(?x, [a,prefix|args], used), do: {args, [prefix, a|used]} - defp collect_cc(?X, [a,prefix|args], used), do: {args, [prefix, a|used]} + defp collect_cc(?x, [a, prefix|args], used), do: {args, [prefix, a|used]} + defp collect_cc(?X, [a, prefix|args], used), do: {args, [prefix, a|used]} defp collect_cc(?s, [a|args], used), do: {args, [a|used]} defp collect_cc(?e, [a|args], used), do: {args, [a|used]} defp collect_cc(?f, [a|args], used), do: {args, [a|used]} diff --git a/lib/mix/test/mix/rebar_test.exs b/lib/mix/test/mix/rebar_test.exs index 72ee3131cad..a73819b3983 100644 --- a/lib/mix/test/mix/rebar_test.exs +++ b/lib/mix/test/mix/rebar_test.exs @@ -47,7 +47,7 @@ defmodule Mix.RebarTest do end test "parse rebar dependencies" do - config = [deps: [{:git_rebar, '~> 1.0',}]] + config = [deps: [{:git_rebar, '~> 1.0'}]] assert [{:git_rebar, "~> 1.0"}] == Mix.Rebar.deps(:foo, config, []) diff --git a/lib/mix/test/mix/shell_test.exs b/lib/mix/test/mix/shell_test.exs index ba4c5984cf1..6226e887aba 100644 --- a/lib/mix/test/mix/shell_test.exs +++ b/lib/mix/test/mix/shell_test.exs @@ -4,11 +4,11 @@ defmodule Mix.ShellTest do use MixTest.Case defp capture_io(somefunc) do - ExUnit.CaptureIO.capture_io(somefunc) |> String.replace("\r\n","\n") + ExUnit.CaptureIO.capture_io(somefunc) |> String.replace("\r\n", "\n") end defp capture_io(from, somefunc) do - ExUnit.CaptureIO.capture_io(from, somefunc) |> String.replace("\r\n","\n") + ExUnit.CaptureIO.capture_io(from, somefunc) |> String.replace("\r\n", "\n") end setup do