Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Erlang27 #4320

Closed
wants to merge 18 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
253 changes: 128 additions & 125 deletions .circleci/template.yml

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
## the full list of supported (prebuilt) OTP versions for ubuntu-22.04 runners
## can be found here:
## https://builds.hex.pm/builds/otp/ubuntu-22.04/builds.txt
otp: [ '25.3.2.9', '26.2.2' ]
otp: [ '26.2.2', '27.0' ]
runs-on: ubuntu-22.04
env:
PRESET: 'small_tests'
Expand Down Expand Up @@ -66,7 +66,7 @@ jobs:
matrix:
preset: [internal_mnesia, pgsql_mnesia, mysql_redis, odbc_mssql_mnesia,
ldap_mnesia, elasticsearch_and_cassandra_mnesia]
otp: [ '26.1.2' ]
otp: [ '27.0' ]
include:
- test-spec: "default.spec"
- preset: elasticsearch_and_cassandra_mnesia
Expand Down Expand Up @@ -101,7 +101,7 @@ jobs:
fail-fast: false
matrix:
preset: [pgsql_mnesia, mysql_redis, odbc_mssql_mnesia]
otp: [ '26.1.2' ]
otp: [ '27.0' ]
test-spec: ["dynamic_domains.spec"]
include:
- preset: pgsql_mnesia
Expand Down
12 changes: 9 additions & 3 deletions big_tests/rebar.config
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,17 @@
{proper, "1.4.0"},
{gun, "2.1.0"},
{fusco, "0.1.1"},
{escalus, {git, "git@github.com:esl/escalus.git", {branch, "ws-stream-management"}}},
% {escalus, "4.2.12"},
{escalus, {git, "https://github.com/esl/escalus.git", {branch, "ws-stream-management"}}},
{fast_tls, "1.1.21"},
{cowboy, "2.12.0"},
{csv, "3.0.3", {pkg, csve}},
{amqp_client, "3.12.14"},
{amqp_client, "3.13.0-rc.2"},
{rabbit_common, "3.13.4"},
{esip, "1.0.52"},
{jid, "2.1.0", {pkg, mongoose_jid}}
]}.

{overrides, [
%% Disable warnings_as_errors
{override, worker_pool, [{erl_opts, []}]}
]}.
24 changes: 12 additions & 12 deletions big_tests/rebar.lock
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{"1.2.0",
[{<<"amqp_client">>,{pkg,<<"amqp_client">>,<<"3.12.14">>},0},
[{<<"amqp_client">>,{pkg,<<"amqp_client">>,<<"3.13.0-rc.2">>},0},
{<<"base16">>,{pkg,<<"base16">>,<<"2.0.1">>},0},
{<<"bbmustache">>,{pkg,<<"bbmustache">>,<<"1.12.2">>},0},
{<<"cowboy">>,{pkg,<<"cowboy">>,<<"2.12.0">>},0},
Expand All @@ -9,14 +9,14 @@
1},
{<<"csv">>,{pkg,<<"csve">>,<<"3.0.3">>},0},
{<<"escalus">>,
{git,"git@github.com:esl/escalus.git",
{git,"https://github.com/esl/escalus.git",
{ref,"f99d6efe1028f645d8a17c9ab3fb8e4adab69a76"}},
0},
{<<"esip">>,{pkg,<<"esip">>,<<"1.0.52">>},0},
{<<"exml">>,{pkg,<<"hexml">>,<<"3.4.1">>},0},
{<<"fast_pbkdf2">>,{pkg,<<"fast_pbkdf2">>,<<"1.0.5">>},2},
{<<"fast_scram">>,{pkg,<<"fast_scram">>,<<"0.6.0">>},1},
{<<"fast_tls">>,{pkg,<<"fast_tls">>,<<"1.1.19">>},1},
{<<"fast_tls">>,{pkg,<<"fast_tls">>,<<"1.1.21">>},0},
{<<"fusco">>,{pkg,<<"fusco">>,<<"0.1.1">>},0},
{<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.9">>},1},
{<<"gun">>,{pkg,<<"gun">>,<<"2.1.0">>},0},
Expand All @@ -27,17 +27,17 @@
{<<"p1_utils">>,{pkg,<<"p1_utils">>,<<"1.0.25">>},1},
{<<"proper">>,{pkg,<<"proper">>,<<"1.4.0">>},0},
{<<"quickrand">>,{pkg,<<"quickrand">>,<<"2.0.7">>},2},
{<<"rabbit_common">>,{pkg,<<"rabbit_common">>,<<"3.12.14">>},1},
{<<"rabbit_common">>,{pkg,<<"rabbit_common">>,<<"3.13.4">>},0},
{<<"ranch">>,{pkg,<<"ranch">>,<<"1.8.0">>},1},
{<<"recon">>,{pkg,<<"recon">>,<<"2.5.3">>},2},
{<<"recon">>,{pkg,<<"recon">>,<<"2.5.3">>},1},
{<<"stringprep">>,{pkg,<<"stringprep">>,<<"1.0.29">>},1},
{<<"stun">>,{pkg,<<"stun">>,<<"1.2.12">>},1},
{<<"thoas">>,{pkg,<<"thoas">>,<<"1.0.0">>},2},
{<<"thoas">>,{pkg,<<"thoas">>,<<"1.0.0">>},1},
{<<"uuid">>,{pkg,<<"uuid_erl">>,<<"2.0.7">>},1},
{<<"worker_pool">>,{pkg,<<"worker_pool">>,<<"6.2.0">>},1}]}.
[
{pkg_hash,[
{<<"amqp_client">>, <<"2B677BC3F2E2234BA7517042B25D72071A79735042E91F9116BD3C176854B622">>},
{<<"amqp_client">>, <<"2C9975DA52319D8E613B5DBD3E931374B41AF6F571BD7A3F96CE50AD6C27441B">>},
{<<"base16">>, <<"F0549F732E03BE8124ED0D19FD5EE52146CC8BE24C48CBC3F23AB44B157F11A2">>},
{<<"bbmustache">>, <<"0CABDCE0DB9FE6D3318131174B9F2B351328A4C0AFBEB3E6E99BB0E02E9B621D">>},
{<<"cowboy">>, <<"F276D521A1FF88B2B9B4C54D0E753DA6C66DD7BE6C9FCA3D9418B561828A3731">>},
Expand All @@ -48,7 +48,7 @@
{<<"exml">>, <<"9581FE6512D9772C61BBE611CD4A8E5BB90B4D4481275325EC520F7A931A9393">>},
{<<"fast_pbkdf2">>, <<"6045138C4C209FC8222A0B18B2CB1D7BD7407EF4ADAD0F14C5E0F7F4726E3E41">>},
{<<"fast_scram">>, <<"70724F584A118DA147A51EE38DEE56203F217D58AD61E0BB2C2EF834C16B35B8">>},
{<<"fast_tls">>, <<"F52731A4B35259FA06CF23E2A0732920AD9EFCE7C3D68377F129A474998747BB">>},
{<<"fast_tls">>, <<"65D7D547A09EEFB37A1C0D04D8601FAC4F3E6E2C1EDE859A7787081670F9648D">>},
{<<"fusco">>, <<"3DD6A90151DFEF30EA1937CC44E9A59177C0094918388D9BCAA2F2DC5E2AE4AA">>},
{<<"goldrush">>, <<"F06E5D5F1277DA5C413E84D5A2924174182FB108DABB39D5EC548B27424CD106">>},
{<<"gun">>, <<"B4E4CBBF3026D21981C447E9E7CA856766046EFF693720BA43114D7F5DE36E87">>},
Expand All @@ -59,7 +59,7 @@
{<<"p1_utils">>, <<"2D39B5015A567BBD2CC7033EEB93A7C60D8C84EFE1EF69A3473FAA07FA268187">>},
{<<"proper">>, <<"89A44B8C39D28BB9B4BE8E4D715D534905B325470F2E0EC5E004D12484A79434">>},
{<<"quickrand">>, <<"D2BD76676A446E6A058D678444B7FDA1387B813710D1AF6D6E29BB92186C8820">>},
{<<"rabbit_common">>, <<"466123EE7346A3CDAC078C0C302BCD36DA4523E8ACD678C1B992F7B4DF1F7914">>},
{<<"rabbit_common">>, <<"68650FBCB8CE204CE28C116451C030E294BF916ED722B43A369D7586DA2A5605">>},
{<<"ranch">>, <<"8C7A100A139FD57F17327B6413E4167AC559FBC04CA7448E9BE9057311597A1D">>},
{<<"recon">>, <<"739107B9050EA683C30E96DE050BC59248FD27EC147696F79A8797FF9FA17153">>},
{<<"stringprep">>, <<"02F23E8C3A219A3DFE40A22E908BECE3A2F68AF0FF599EA8A7B714ECB21E62EE">>},
Expand All @@ -68,7 +68,7 @@
{<<"uuid">>, <<"B2078D2CC814F53AFA52D36C91E08962C7E7373585C623F4C0EA6DFB04B2AF94">>},
{<<"worker_pool">>, <<"506DE38C528A81ED2C6A80A419B83DDE6DA5E295BD320BDF4D35A69AFEB0247A">>}]},
{pkg_hash_ext,[
{<<"amqp_client">>, <<"5F70B6C3B1A739790080DA4FDDC94A867E99F033C4B1EDC20D6FF8B8FB4BD160">>},
{<<"amqp_client">>, <<"E86FD4B95291799BDE1571AF81077B5A5DB9653824FBE429F5BADAD831C52642">>},
{<<"base16">>, <<"06EA2D48343282E712160BA89F692B471DB8B36ABE8394F3445FF9032251D772">>},
{<<"bbmustache">>, <<"688B33A4D5CC2D51F575ADF0B3683FC40A38314A2F150906EDCFC77F5B577B3B">>},
{<<"cowboy">>, <<"8A7ABE6D183372CEB21CAA2709BEC928AB2B72E18A3911AA1771639BEF82651E">>},
Expand All @@ -79,7 +79,7 @@
{<<"exml">>, <<"D8E7894E2544402B4986EEB2443C15B51B14F686266F091DBF2777D1D99A2FA2">>},
{<<"fast_pbkdf2">>, <<"BC3B5A3CAB47AD114FF8BB815FEDE62A6187ACD14D8B37412F2AF8236A089CEF">>},
{<<"fast_scram">>, <<"771D034341599CFC6A6C5E56CF924B68D2C7478088CAF17419E3147B66914667">>},
{<<"fast_tls">>, <<"DB34322C8782D4C5139CCB80709D8EC8C38089B44262EDD0C2F660AC495BD389">>},
{<<"fast_tls">>, <<"131542913937025E48CD80AA81F00359686D5501B75621E72026A87B5229505B">>},
{<<"fusco">>, <<"6343551BD1E824F2A6CA85E1158C5B37C320FD449FBFEC7450A73F192AAF9022">>},
{<<"goldrush">>, <<"99CB4128CFFCB3227581E5D4D803D5413FA643F4EB96523F77D9E6937D994CEB">>},
{<<"gun">>, <<"52FC7FC246BFC3B00E01AEA1C2854C70A366348574AB50C57DFE796D24A0101D">>},
Expand All @@ -90,7 +90,7 @@
{<<"p1_utils">>, <<"9219214428F2C6E5D3187FF8EB9A8783695C2427420BE9A259840E07ADA32847">>},
{<<"proper">>, <<"18285842185BD33EFBDA97D134A5CB5A0884384DB36119FEE0E3CFA488568CBB">>},
{<<"quickrand">>, <<"B8ACBF89A224BC217C3070CA8BEBC6EB236DBE7F9767993B274084EA044D35F0">>},
{<<"rabbit_common">>, <<"70C31A51F7401CC0204DDEF2745D98680C2E0DF67E3B0C9E198916881FDE3293">>},
{<<"rabbit_common">>, <<"C0E8F2909EAECABC732990D26B75DE092E265D14E8AC9EAFD4813438F1CC2233">>},
{<<"ranch">>, <<"49FBCFD3682FAB1F5D109351B61257676DA1A2FDBE295904176D5E521A2DDFE5">>},
{<<"recon">>, <<"6C6683F46FD4A1DFD98404B9F78DCABC7FCD8826613A89DCB984727A8C3099D7">>},
{<<"stringprep">>, <<"928EBA304C3006EB1512110EBD7B87DB163B00859A09375A1E4466152C6C462A">>},
Expand Down
105 changes: 22 additions & 83 deletions big_tests/run_common_test.erl
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,6 @@ save_count(Test, Configs) ->

run_test(Test, PresetsToRun, CoverOpts) ->
{ConfigFiles, Props} = get_ct_config(Test),
prepare_cover(Props, CoverOpts),
error_logger:info_msg("Presets to run ~p", [PresetsToRun]),
case get_presets(Props) of
{ok, Presets} ->
Expand Down Expand Up @@ -234,7 +233,6 @@ preset_names(Presets) ->
[Preset||{Preset, _} <- Presets].

do_run_quick_test(Test, CoverOpts) ->
prepare_cover(Test, CoverOpts),
load_test_modules(Test),
Result = ct:run_test(Test),
case Result of
Expand Down Expand Up @@ -344,46 +342,13 @@ call(Node, M, F, A) ->
Result
end.

prepare_cover(Props, true) ->
io:format("Preparing cover~n"),
prepare(Props);
prepare_cover(_, _) ->
ok.

analyze_coverage(Props, true) ->
analyze(Props, true);
analyze_coverage(Props, ModuleList) when is_list(ModuleList) ->
analyze(Props, ModuleList);
analyze_coverage(_, _) ->
ok.

prepare(Props) ->
Nodes = get_mongoose_nodes(Props),
maybe_compile_cover(Nodes).

maybe_compile_cover([]) ->
io:format("cover: skip cover compilation~n", []),
ok;
maybe_compile_cover(Nodes) ->
io:format("cover: compiling modules for nodes ~p~n", [Nodes]),
import_code_paths(hd(Nodes)),

cover:start(Nodes),
Dir = call(hd(Nodes), code, lib_dir, [mongooseim, ebin]),

%% Time is in microseconds
{Time, Compiled} = timer:tc(fun() ->
Results = cover:compile_beam_directory(Dir),
Ok = [X || X = {ok, _} <- Results],
NotOk = Results -- Ok,
#{ok => length(Ok), failed => NotOk}
end),
github_actions_fold("cover compiled output", fun() ->
io:format("cover: compiled ~p~n", [Compiled])
end),
report_progress("~nCover compilation took ~ts~n", [microseconds_to_string(Time)]),
ok.

analyze(Props, CoverOpts) ->
io:format("Coverage analyzing~n"),
Nodes = get_mongoose_nodes(Props),
Expand All @@ -392,33 +357,34 @@ analyze(Props, CoverOpts) ->
analyze(_Props, _CoverOpts, []) ->
ok;
analyze(_Props, CoverOpts, Nodes) ->
deduplicate_cover_server_console_prints(),
MainNode = hd(Nodes),
%% Import small tests cover
Files = filelib:wildcard(repo_dir() ++ "/_build/**/cover/*.coverdata"),
io:format("Files: ~p", [Files]),
Import = fun(File) ->
ok = rpc:call(MainNode, cover, import, [File])
end,
report_time("Import cover data into run_common_test node", fun() ->
[cover:import(File) || File <- Files]
[Import(File) || File <- Files]
end),
%% Gather cover data to the MainNode
{ExportedFiles, _} = report_time("Export cover data from each node", fun() ->
rpc:multicall(tl(Nodes), ejabberd_app, export_cover, ["/tmp"])
end),
report_time("Export cover data from each node", fun() ->
[Import(File) || File <- ExportedFiles]
end),
report_time("Export merged cover data", fun() ->
cover:export("/tmp/mongoose_combined.coverdata")
ok = rpc:call(MainNode, cover, export, ["/tmp/mongoose_combined.coverdata"])
end),
case os:getenv("GITHUB_RUN_ID") of
false ->
make_html(modules_to_analyze(CoverOpts));
make_html(MainNode, modules_to_analyze(MainNode, CoverOpts));
_ ->
ok
end,
case os:getenv("KEEP_COVER_RUNNING") of
"1" ->
io:format("Skip stopping cover~n"),
ok;
_ ->
report_time("Stopping cover on MongooseIM nodes", fun() ->
cover:stop([node()|Nodes])
end)
end.

make_html(Modules) ->
make_html(MainNode, Modules) ->
{ok, Root} = file:get_cwd(),
SortScript = Root ++ "/priv/sorttable.js",
os:cmd("cp " ++ SortScript ++ " " ++ ?CT_REPORT),
Expand All @@ -436,12 +402,11 @@ make_html(Modules) ->
Fun = fun(Module, {CAcc, NCAcc}) ->
FileName = lists:flatten(io_lib:format("~s.COVER.html",[Module])),

%% We assume that import_code_paths/1 was called earlier
case cover:analyse(Module, module) of
case rpc:call(MainNode, cover, analyse, [Module, module]) of
{ok, {Module, {C, NC}}} ->
file:write(File, row(atom_to_list(Module), C, NC, percent(C,NC),"coverage/"++FileName)),
FilePathC = filename:join([CoverageDir, FileName]),
catch cover:analyse_to_file(Module, FilePathC, [html]),
catch rpc:call(MainNode, cover, analyse_to_file, [Module, FilePathC, [html]]),
{CAcc + C, NCAcc + NC};
Reason ->
error_logger:error_msg("issue=cover_analyse_failed module=~p reason=~p",
Expand Down Expand Up @@ -503,9 +468,11 @@ module_list(undefined) ->
module_list(ModuleList) ->
[ list_to_atom(L) || L <- string:tokens(ModuleList, ", ") ].

modules_to_analyze(true) ->
lists:usort(cover:imported_modules() ++ cover:modules());
modules_to_analyze(ModuleList) when is_list(ModuleList) ->
modules_to_analyze(MainNode, true) ->
Mods1 = rpc:call(MainNode, cover, imported_modules, []),
Mods2 = rpc:call(MainNode, cover, modules, []),
lists:usort(Mods1 ++ Mods2);
modules_to_analyze(_MainNode, ModuleList) when is_list(ModuleList) ->
ModuleList.

add({X1, X2, X3, X4},
Expand Down Expand Up @@ -602,24 +569,6 @@ report_progress(Format, Args) ->
Message = io_lib:format(Format, Args),
file:write_file("/tmp/progress", Message, [append]).

github_actions_fold(Description, Fun) ->
case os:getenv("GITHUB_RUN_ID") of
false ->
Fun();
_ ->
io:format("github_actions_fold:start:~ts~n", [Description]),
Result = Fun(),
io:format("github_actions_fold:end:~ts~n", [Description]),
Result
end.

%% Import code paths from a running node.
%% It allows cover:analyse/2 to find source file by calling
%% Module:module_info(compiled).
import_code_paths(FromNode) when is_atom(FromNode) ->
Paths = call(FromNode, code, get_path, []),
code:add_paths(Paths).

%% Gets result of file operation and prints filename, if we have any issues.
handle_file_error(FileName, {error, Reason}) ->
error_logger:error_msg("issue=file_operation_error filename=~p reason=~p",
Expand All @@ -630,16 +579,6 @@ handle_file_error(_FileName, Other) ->

%% ------------------------------------------------------------------

%% cover_server process is using io:format too much.
%% This code removes duplicate io:formats.
%%
%% Example of a message we want to write only once:
%% "Analysis includes data from imported files" from cover.erl in Erlang/R19
deduplicate_cover_server_console_prints() ->
%% Set a new group leader for cover_server
CoverPid = whereis(cover_server),
dedup_proxy_group_leader:start_proxy_group_leader_for(CoverPid).

ct_run_dirs() ->
filelib:wildcard("ct_report/ct_run*").

Expand Down
73 changes: 0 additions & 73 deletions big_tests/src/dedup_proxy_group_leader.erl

This file was deleted.

Loading