From 5a3a4114dc2083c79d4a6103707bc909502d27cc Mon Sep 17 00:00:00 2001 From: Shunichi Shinohara Date: Tue, 8 Sep 2015 15:48:25 +0900 Subject: [PATCH 1/7] Add OTP 17 compatibility --- include/riak_cs.hrl | 8 ++++++++ rebar.config | 9 ++++++--- src/riak_cs_get_fsm.erl | 8 +++++++- src/riak_cs_list_objects_ets_cache.erl | 16 +++++++++++----- src/riak_cs_oos_rewrite.erl | 6 +++--- src/riak_cs_s3_rewrite.erl | 8 ++++---- src/riak_cs_s3_rewrite_legacy.erl | 6 ++++-- src/riak_cs_utils.erl | 2 +- src/twop_set.erl | 12 +++++++++--- test/twop_set_eqc.erl | 14 ++++++++++---- 10 files changed, 63 insertions(+), 26 deletions(-) diff --git a/include/riak_cs.hrl b/include/riak_cs.hrl index 2b5883a5f..65872fa22 100644 --- a/include/riak_cs.hrl +++ b/include/riak_cs.hrl @@ -518,3 +518,11 @@ -define(BLOCK_BUCKET_PREFIX, <<"0b:">>). % Version # = 0 -define(MAX_S3_KEY_LENGTH, 1024). + +-ifdef(namespaced_types). +-type mochiweb_headers() :: gb_trees:tree(). +-type robj_md() :: dict:dict(). +-else. +-type mochiweb_headers() :: gb_tree(). +-type robj_md() :: dict(). +-endif. diff --git a/rebar.config b/rebar.config index fc2db7a97..3f50231df 100644 --- a/rebar.config +++ b/rebar.config @@ -1,6 +1,6 @@ {sub_dirs, ["rel"]}. -{require_otp_vsn, "R16"}. +{require_otp_vsn, "R16|17"}. {cover_enabled, false}. @@ -9,7 +9,10 @@ {lib_dirs, ["deps", "apps"]}. -{erl_opts, [debug_info, warnings_as_errors, {parse_transform, lager_transform}]}. +{erl_opts, [debug_info, + warnings_as_errors, + {parse_transform, lager_transform}, + {platform_define, "^[0-9]+", namespaced_types}]}. {xref_checks, []}. {xref_queries, @@ -46,7 +49,7 @@ {poolboy, "0.8.*", {git, "git://github.com/basho/poolboy", "0.8.1p3"}}, {exometer_core, ".*", {git, "git://github.com/Feuerlabs/exometer_core", {tag, "1.2"}}}, {cluster_info, ".*", {git, "git://github.com/basho/cluster_info", {tag, "2.0.3"}}}, - {xmerl, ".*", {git, "git://github.com/shino/xmerl", "b35bcb05abaf27f183cfc3d85d8bffdde0f59325"}}, + {xmerl, ".*", {git, "git://github.com/shino/xmerl", "1b016a05473e086abadbb3c12f63d167fe96c00f"}}, {erlcloud, ".*", {git, "git://github.com/basho/erlcloud.git", {tag, "0.4.5"}}}, {rebar_lock_deps_plugin, ".*", {git, "git://github.com/seth/rebar_lock_deps_plugin.git", {tag, "3.1.0"}}} ]}. diff --git a/src/riak_cs_get_fsm.erl b/src/riak_cs_get_fsm.erl index 980577b4d..52bb4176a 100644 --- a/src/riak_cs_get_fsm.erl +++ b/src/riak_cs_get_fsm.erl @@ -67,6 +67,12 @@ -type block_name() :: {binary(), integer()}. +-ifdef(namespaced_types). +-type block_queue() :: queue:queue(). +-else. +-type block_queue() :: queue(). +-endif. + -record(state, {from :: {pid(), reference()}, riak_client :: riak_client(), mani_fsm_pid :: pid(), @@ -78,7 +84,7 @@ got_blocks=orddict:new() :: orddict:orddict(), manifest :: term(), blocks_order :: [block_name()], - blocks_intransit=queue:new() :: queue(), + blocks_intransit=queue:new() :: block_queue(), test=false :: boolean(), total_blocks :: pos_integer(), num_sent=0 :: non_neg_integer(), diff --git a/src/riak_cs_list_objects_ets_cache.erl b/src/riak_cs_list_objects_ets_cache.erl index da1808d6e..b91ed48cd 100644 --- a/src/riak_cs_list_objects_ets_cache.erl +++ b/src/riak_cs_list_objects_ets_cache.erl @@ -52,9 +52,15 @@ -define(DICTMODULE, dict). +-ifdef(namespaced_types). +-type dictionary() :: dict:dict(). +-else. +-type dictionary() :: dict(). +-endif. + -record(state, {tid :: ets:tid(), - monitor_to_timer = ?DICTMODULE:new() :: ?DICTMODULE(), - key_to_monitor = ?DICTMODULE:new() :: ?DICTMODULE()}). + monitor_to_timer = ?DICTMODULE:new() :: dictionary(), + key_to_monitor = ?DICTMODULE:new() :: dictionary()}). -type state() :: #state{}. -type cache_lookup_result() :: {true, [binary()]} | false. @@ -257,7 +263,7 @@ handle_down(MonitorRef, State=#state{monitor_to_timer=MonToTimer}) -> NewMonToTimer = remove_timer(MonitorRef, MonToTimer), State#state{monitor_to_timer=NewMonToTimer}. --spec remove_monitor(binary(), ?DICTMODULE()) -> ?DICTMODULE(). +-spec remove_monitor(binary(), dictionary()) -> dictionary(). remove_monitor(ExpiredKey, KeyToMon) -> RefResult = safe_fetch(ExpiredKey, KeyToMon), case RefResult of @@ -268,7 +274,7 @@ remove_monitor(ExpiredKey, KeyToMon) -> end, ?DICTMODULE:erase(ExpiredKey, KeyToMon). --spec remove_timer(reference(), ?DICTMODULE()) -> ?DICTMODULE(). +-spec remove_timer(reference(), dictionary()) -> dictionary(). remove_timer(MonitorRef, MonToTimer) -> RefResult = safe_fetch(MonitorRef, MonToTimer), _ = case RefResult of @@ -281,7 +287,7 @@ remove_timer(MonitorRef, MonToTimer) -> end, ?DICTMODULE:erase(MonitorRef, MonToTimer). --spec safe_fetch(Key :: term(), Dict :: ?DICTMODULE()) -> +-spec safe_fetch(Key :: term(), Dict :: dictionary()) -> {ok, term()} | {error, term()}. safe_fetch(Key, Dict) -> try diff --git a/src/riak_cs_oos_rewrite.erl b/src/riak_cs_oos_rewrite.erl index d0ce41acd..bb71729fa 100644 --- a/src/riak_cs_oos_rewrite.erl +++ b/src/riak_cs_oos_rewrite.erl @@ -34,8 +34,8 @@ -endif. %% @doc Function to rewrite headers prior to processing by webmachine. --spec rewrite(atom(), atom(), {integer(), integer()}, gb_tree(), string()) -> - {gb_tree(), string()}. +-spec rewrite(atom(), atom(), {integer(), integer()}, mochiweb_headers(), string()) -> + {mochiweb_headers(), string()}. rewrite(Method, _Scheme, _Vsn, Headers, RawPath) -> riak_cs_dtrace:dt_wm_entry(?MODULE, <<"rewrite">>), {Path, QueryString, _} = mochiweb_util:urlsplit_path(RawPath), @@ -63,7 +63,7 @@ parse_path(Path) -> {ApiVsn, Account, "/" ++ string:join(RestPath, "/")}. %% @doc Add headers for the raw path, the API version, and the account. --spec rewrite_headers(gb_tree(), string(), string(), string()) -> gb_tree(). +-spec rewrite_headers(mochiweb_headers(), string(), string(), string()) -> mochiweb_headers(). rewrite_headers(Headers, RawPath, ApiVsn, Account) -> UpdHdrs0 = mochiweb_headers:default(?RCS_REWRITE_HEADER, RawPath, Headers), UpdHdrs1 = mochiweb_headers:enter(?OOS_API_VSN_HEADER, ApiVsn, UpdHdrs0), diff --git a/src/riak_cs_s3_rewrite.erl b/src/riak_cs_s3_rewrite.erl index d2f8b443c..d228c0035 100644 --- a/src/riak_cs_s3_rewrite.erl +++ b/src/riak_cs_s3_rewrite.erl @@ -61,8 +61,8 @@ -type subresources() :: [subresource()]. %% @doc Function to rewrite headers prior to processing by webmachine. --spec rewrite(atom(), atom(), {integer(), integer()}, gb_tree(), string()) -> - {gb_tree(), string()}. +-spec rewrite(atom(), atom(), {integer(), integer()}, mochiweb_headers(), string()) -> + {mochiweb_headers(), string()}. rewrite(Method, _Scheme, _Vsn, Headers, Url) -> riak_cs_dtrace:dt_wm_entry(?MODULE, <<"rewrite">>), {Path, QueryString, _} = mochiweb_util:urlsplit_path(Url), @@ -86,8 +86,8 @@ raw_url(RD) -> {Path, mochiweb_util:parse_qs(QS)} end. --spec rewrite_path_and_headers(atom(), gb_tree(), string(), string(), string()) -> - {gb_tree(), string()}. +-spec rewrite_path_and_headers(atom(), mochiweb_headers(), string(), string(), string()) -> + {mochiweb_headers(), string()}. rewrite_path_and_headers(Method, Headers, Url, Path, QueryString) -> Host = mochiweb_headers:get_value("host", Headers), HostBucket = bucket_from_host(Host), diff --git a/src/riak_cs_s3_rewrite_legacy.erl b/src/riak_cs_s3_rewrite_legacy.erl index 824e045cf..e239e772c 100644 --- a/src/riak_cs_s3_rewrite_legacy.erl +++ b/src/riak_cs_s3_rewrite_legacy.erl @@ -22,9 +22,11 @@ -export([rewrite/5]). +-include("riak_cs.hrl"). + %% @doc Function to rewrite headers prior to processing by webmachine. --spec rewrite(atom(), atom(), {integer(), integer()}, gb_tree(), string()) -> - {gb_tree(), string()}. +-spec rewrite(atom(), atom(), {integer(), integer()}, mochiweb_headers(), string()) -> + {mochiweb_headers(), string()}. rewrite(Method, _Scheme, _Vsn, Headers, Url) -> riak_cs_dtrace:dt_wm_entry(?MODULE, <<"rewrite">>), %% Unquote the path to accomodate some naughty client libs (looking diff --git a/src/riak_cs_utils.erl b/src/riak_cs_utils.erl index fe10c5af5..206ceff0a 100644 --- a/src/riak_cs_utils.erl +++ b/src/riak_cs_utils.erl @@ -258,7 +258,7 @@ handle_active_manifests({error, no_active_manifest}) -> {error, notfound}. %% @doc Determine if a set of contents of a riak object has a tombstone. --spec has_tombstone({dict(), binary()}) -> boolean(). +-spec has_tombstone({robj_md(), binary()}) -> boolean(). has_tombstone({_, <<>>}) -> true; has_tombstone({MD, _V}) -> diff --git a/src/twop_set.erl b/src/twop_set.erl index 1a6a2ae08..0a7a0daf9 100644 --- a/src/twop_set.erl +++ b/src/twop_set.erl @@ -53,7 +53,13 @@ resolve/1 ]). --type twop_set() :: {set(), set()}. +-ifdef(namespaced_types). +-type stdlib_set() :: sets:set(). +-else. +-type stdlib_set() :: set(). +-endif. + +-type twop_set() :: {stdlib_set(), stdlib_set()}. -export_type([twop_set/0]). %%%=================================================================== @@ -140,11 +146,11 @@ resolution_test() -> %%% Test API %%%=================================================================== --spec adds(twop_set()) -> set(). +-spec adds(twop_set()) -> stdlib_set(). adds({Adds, _}) -> Adds. --spec dels(twop_set()) -> set(). +-spec dels(twop_set()) -> stdlib_set(). dels({_, Dels}) -> Dels. diff --git a/test/twop_set_eqc.erl b/test/twop_set_eqc.erl index fd4d93b36..f86fde5f5 100644 --- a/test/twop_set_eqc.erl +++ b/test/twop_set_eqc.erl @@ -52,8 +52,14 @@ -define(QC_OUT(P), eqc:on_output(fun(Str, Args) -> io:format(user, Str, Args) end, P)). --record(eqc_state, {adds=sets:new() :: set(), - deletes=sets:new() :: set(), +-ifdef(namespaced_types). +-type stdlib_set() :: sets:set(). +-else. +-type stdlib_set() :: set(). +-endif. + +-record(eqc_state, {adds=sets:new() :: stdlib_set(), + deletes=sets:new() :: stdlib_set(), operation_count=0 :: non_neg_integer(), operation_limit=500 :: pos_integer(), set :: twop_set:twop_set(), @@ -66,8 +72,8 @@ eqc_test_() -> {spawn, [ - {timeout, 20, ?_assertEqual(true, quickcheck(numtests(?TEST_ITERATIONS, ?QC_OUT(prop_twop_set_api()))))}, - {timeout, 20, ?_assertEqual(true, quickcheck(numtests(?TEST_ITERATIONS, ?QC_OUT(prop_twop_set_resolution()))))} + {timeout, 60, ?_assertEqual(true, quickcheck(numtests(?TEST_ITERATIONS, ?QC_OUT(prop_twop_set_api()))))}, + {timeout, 60, ?_assertEqual(true, quickcheck(numtests(?TEST_ITERATIONS, ?QC_OUT(prop_twop_set_resolution()))))} ] }. From 92c1bbdabcb900171853bdbd4579eab16d779ec7 Mon Sep 17 00:00:00 2001 From: Shunichi Shinohara Date: Tue, 8 Sep 2015 16:09:17 +0900 Subject: [PATCH 2/7] Use erlcloud instead of curl command execution --- riak_test/src/rtcs.erl | 1 - riak_test/src/rtcs_admin.erl | 113 ++++++++++++++-------------------- riak_test/tests/user_test.erl | 83 +++++++++++-------------- 3 files changed, 82 insertions(+), 115 deletions(-) diff --git a/riak_test/src/rtcs.erl b/riak_test/src/rtcs.erl index 5e9438f51..9b5909457 100644 --- a/riak_test/src/rtcs.erl +++ b/riak_test/src/rtcs.erl @@ -243,7 +243,6 @@ set_advanced_conf(DevPath, NameValuePairs) -> [rtcs_dev:update_app_config_file(RiakConf, NameValuePairs) || RiakConf <- AdvancedConfs], ok. - assert_error_log_empty(N) -> assert_error_log_empty(current, N). diff --git a/riak_test/src/rtcs_admin.erl b/riak_test/src/rtcs_admin.erl index 0d4b6b7f8..b35c0ae06 100644 --- a/riak_test/src/rtcs_admin.erl +++ b/riak_test/src/rtcs_admin.erl @@ -26,6 +26,7 @@ create_user/4, create_admin_user/1, update_user/5, + get_user/4, list_users/4, make_authorization/5, make_authorization/6, @@ -49,7 +50,7 @@ storage_stats_json_request(AdminConfig, UserConfig, Begin, End) -> {struct, Slice} = latest(Samples, undefined), by_bucket_list(Slice, []). --spec(create_admin_user(atom()) -> #aws_config{}). +-spec create_admin_user(atom()) -> #aws_config{}. create_admin_user(Node) -> User = "admin", Email = "admin@me.com", @@ -60,7 +61,7 @@ create_admin_user(Node) -> lager:info("Id = ~p",[Id]), UserConfig. --spec(create_user(atom(), non_neg_integer()) -> #aws_config{}). +-spec create_user(atom(), non_neg_integer()) -> #aws_config{}. create_user(Node, UserIndex) -> {A, B, C} = erlang:now(), User = "Test User" ++ integer_to_list(UserIndex), @@ -71,78 +72,58 @@ create_user(Node, UserIndex) -> UserConfig#aws_config.secret_access_key]), UserConfig. --spec(create_user(non_neg_integer(), string(), string()) -> {#aws_config{}, string()}). +-spec create_user(non_neg_integer(), string(), string()) -> {#aws_config{}, string()}. create_user(Port, EmailAddr, Name) -> - create_user(Port, undefined, EmailAddr, Name). + %% create_user(Port, undefined, EmailAddr, Name). + create_user(Port, aws_config("admin-key", "admin-secret", Port), EmailAddr, Name). --spec(create_user(non_neg_integer(), string(), string(), string()) -> {#aws_config{}, string()}). +-spec create_user(non_neg_integer(), string(), string(), string()) -> {#aws_config{}, string()}. create_user(Port, UserConfig, EmailAddr, Name) -> lager:debug("Trying to create user ~p", [EmailAddr]), Resource = "/riak-cs/user", - Date = httpd_util:rfc1123_date(), - Cmd="curl -s -H 'Content-Type: application/json' " ++ - "-H 'Date: " ++ Date ++ "' " ++ - case UserConfig of - undefined -> ""; - _ -> - "-H 'Authorization: " ++ - make_authorization("POST", Resource, "application/json", - UserConfig, Date) ++ - "' " - end ++ - "http://localhost:" ++ - integer_to_list(Port) ++ - Resource ++ - " --data '{\"email\":\"" ++ EmailAddr ++ "\", \"name\":\"" ++ Name ++"\"}'", - lager:debug("Cmd: ~p", [Cmd]), + ReqBody = "{\"email\":\"" ++ EmailAddr ++ "\", \"name\":\"" ++ Name ++"\"}", Delay = rt_config:get(rt_retry_delay), Retries = rt_config:get(rt_max_wait_time) div Delay, - OutputFun = fun() -> rt:cmd(Cmd) end, - Condition = fun({Status, Res}) -> - lager:debug("Return (~p), Res: ~p", [Status, Res]), - Status =:= 0 andalso Res /= [] + OutputFun = fun() -> catch erlcloud_s3:s3_request( + UserConfig, post, "", Resource, [], "", + {ReqBody, "application/json"}, []) end, - {_Status, Output} = rtcs:wait_until(OutputFun, Condition, Retries, Delay), - lager:debug("Create user output=~p~n",[Output]), - {struct, JsonData} = mochijson2:decode(Output), - KeyId = binary_to_list(proplists:get_value(<<"key_id">>, JsonData)), - KeySecret = binary_to_list(proplists:get_value(<<"key_secret">>, JsonData)), - Id = binary_to_list(proplists:get_value(<<"id">>, JsonData)), + Condition = fun({'EXIT', Res}) -> + lager:debug("create_user failing, Res: ~p", [Res]), + false; + ({_ResHeader, _ResBody}) -> + true + end, + {_ResHeader, ResBody} = rtcs:wait_until(OutputFun, Condition, Retries, Delay), + lager:debug("ResBody: ~s", [ResBody]), + JsonData = mochijson2:decode(ResBody), + [KeyId, KeySecret, Id] = [binary_to_list(rtcs:json_get([K], JsonData)) || + K <- [<<"key_id">>, <<"key_secret">>, <<"id">>]], {aws_config(KeyId, KeySecret, Port), Id}. --spec(update_user(#aws_config{}, non_neg_integer(), string(), string(), string()) -> string()). -update_user(UserConfig, Port, Resource, ContentType, UpdateDoc) -> - Date = httpd_util:rfc1123_date(), - Cmd="curl -s -X PUT -H 'Date: " ++ Date ++ - "' -H 'Content-Type: " ++ ContentType ++ - "' -H 'Authorization: " ++ - make_authorization("PUT", Resource, ContentType, UserConfig, Date) ++ - "' http://localhost:" ++ integer_to_list(Port) ++ - Resource ++ " --data-binary " ++ UpdateDoc, - Delay = rt_config:get(rt_retry_delay), - Retries = rt_config:get(rt_max_wait_time) div Delay, - OutputFun = fun() -> os:cmd(Cmd) end, - Condition = fun(Res) -> Res /= [] end, - Output = rtcs:wait_until(OutputFun, Condition, Retries, Delay), - lager:debug("Update user output=~p~n",[Output]), - Output. - --spec(list_users(#aws_config{}, non_neg_integer(), string(), string()) -> string()). -list_users(UserConfig, Port, Resource, AcceptContentType) -> - Date = httpd_util:rfc1123_date(), - Cmd="curl -s -H 'Date: " ++ Date ++ - "' -H 'Accept: " ++ AcceptContentType ++ - "' -H 'Authorization: " ++ - make_authorization("GET", Resource, "", UserConfig, Date) ++ - "' http://localhost:" ++ integer_to_list(Port) ++ - Resource, - Delay = rt_config:get(rt_retry_delay), - Retries = rt_config:get(rt_max_wait_time) div Delay, - OutputFun = fun() -> os:cmd(Cmd) end, - Condition = fun(Res) -> Res /= [] end, - Output = rtcs:wait_until(OutputFun, Condition, Retries, Delay), - lager:debug("List users output=~p~n",[Output]), - Output. +-spec update_user(#aws_config{}, non_neg_integer(), string(), string(), string()) -> string(). +update_user(UserConfig, _Port, Resource, ContentType, UpdateDoc) -> + {_ResHeader, ResBody} = erlcloud_s3:s3_request( + UserConfig, put, "", Resource, [], "", + {UpdateDoc, ContentType}, []), + lager:debug("ResBody: ~s", [ResBody]), + ResBody. + +-spec get_user(#aws_config{}, non_neg_integer(), string(), string()) -> string(). +get_user(UserConfig, _Port, Resource, AcceptContentType) -> + lager:debug("Retreiving user record"), + Headers = [{"Accept", AcceptContentType}], + {_ResHeader, ResBody} = erlcloud_s3:s3_request( + UserConfig, get, "", Resource, [], "", "", Headers), + lager:debug("ResBody: ~s", [ResBody]), + ResBody. + +-spec list_users(#aws_config{}, non_neg_integer(), string(), string()) -> string(). +list_users(UserConfig, _Port, Resource, AcceptContentType) -> + Headers = [{"Accept", AcceptContentType}], + {_ResHeader, ResBody} = erlcloud_s3:s3_request( + UserConfig, get, "", Resource, [], "", "", Headers), + ResBody. -spec(make_authorization(string(), string(), string(), #aws_config{}, string()) -> string()). make_authorization(Method, Resource, ContentType, Config, Date) -> @@ -166,7 +147,7 @@ make_authorization(Type, Method, Resource, ContentType, Config, Date, AmzHeaders base64:encode_to_string(rtcs:sha_mac(Config#aws_config.secret_access_key, StringToSign)), lists:flatten([Prefix, " ", Config#aws_config.access_key_id, $:, Signature]). --spec(aws_config(string(), string(), non_neg_integer()) -> #aws_config{}). +-spec aws_config(string(), string(), non_neg_integer()) -> #aws_config{}. aws_config(Key, Secret, Port) -> erlcloud_s3:new(Key, Secret, @@ -177,7 +158,7 @@ aws_config(Key, Secret, Port) -> Port, []). --spec(aws_config(#aws_config{}, [{atom(), term()}]) -> #aws_config{}). +-spec aws_config(#aws_config{}, [{atom(), term()}]) -> #aws_config{}. aws_config(UserConfig, []) -> UserConfig; aws_config(UserConfig, [{port, Port}|Props]) -> diff --git a/riak_test/tests/user_test.erl b/riak_test/tests/user_test.erl index 79d98ffad..d34dccd55 100644 --- a/riak_test/tests/user_test.erl +++ b/riak_test/tests/user_test.erl @@ -141,11 +141,11 @@ update_user_test(AdminConfig, Node, ContentType, Users) -> %% Fetch the user record using the user's own credentials UserResult1 = parse_user_record( - get_user_record(UserConfig, Port, UserResource, ContentType), + rtcs_admin:get_user(UserConfig, Port, UserResource, ContentType), ContentType), %% Fetch the user record using the admin credentials UserResult2 = parse_user_record( - get_user_record(AdminConfig, Port, AdminResource, ContentType), + rtcs_admin:get_user(AdminConfig, Port, AdminResource, ContentType), ContentType), ?assertMatch({Email1, User1, _, Secret, "enabled"}, UserResult1), ?assertMatch({Email1, User1, _, Secret, "enabled"}, UserResult2), @@ -156,12 +156,12 @@ update_user_test(AdminConfig, Node, ContentType, Users) -> InvalidUpdateDoc = update_email_and_name_doc(ContentType, "admin@me.com", "admin"), ErrorResult = parse_error_code( - rtcs_admin:update_user(UserConfig, - Port, - Resource, - ContentType, - InvalidUpdateDoc)), - ?assertEqual("UserAlreadyExists", ErrorResult), + catch rtcs_admin:update_user(UserConfig, + Port, + Resource, + ContentType, + InvalidUpdateDoc)), + ?assertEqual({409, "UserAlreadyExists"}, ErrorResult), %% Test updating the user's name and email UpdateDoc = update_email_and_name_doc(ContentType, Email2, User2), @@ -169,11 +169,11 @@ update_user_test(AdminConfig, Node, ContentType, Users) -> %% Fetch the user record using the user's own credentials UserResult3 = parse_user_record( - get_user_record(UserConfig, Port, UserResource, ContentType), + rtcs_admin:get_user(UserConfig, Port, UserResource, ContentType), ContentType), %% Fetch the user record using the admin credentials UserResult4 = parse_user_record( - get_user_record(AdminConfig, Port, AdminResource, ContentType), + rtcs_admin:get_user(AdminConfig, Port, AdminResource, ContentType), ContentType), ?assertMatch({Email2, User2, _, Secret, "enabled"}, UserResult3), ?assertMatch({Email2, User2, _, Secret, "enabled"}, UserResult4), @@ -183,12 +183,12 @@ update_user_test(AdminConfig, Node, ContentType, Users) -> UpdateDoc2 = update_status_doc(ContentType, "disabled"), Resource = "/riak-cs/user/" ++ Key, ErrorResult2 = parse_error_code( - rtcs_admin:update_user(BadUserConfig, - Port, - Resource, - ContentType, - UpdateDoc2)), - ?assertEqual("AccessDenied", ErrorResult2), + catch rtcs_admin:update_user(BadUserConfig, + Port, + Resource, + ContentType, + UpdateDoc2)), + ?assertEqual({403, "AccessDenied"}, ErrorResult2), %% Test updating a user's own status Resource = "/riak-cs/user/" ++ Key, @@ -197,15 +197,16 @@ update_user_test(AdminConfig, Node, ContentType, Users) -> %% Fetch the user record using the user's own credentials. Since %% the user is now disabled this should return an error. UserResult5 = parse_error_code( - get_user_record(UserConfig, Port, UserResource, ContentType)), + catch rtcs_admin:get_user(UserConfig, Port, + UserResource, ContentType)), %% Fetch the user record using the admin credentials. The user is %% not able to retrieve their own account information now that the %% account is disabled. UserResult6 = parse_user_record( - get_user_record(AdminConfig, Port, AdminResource, ContentType), + rtcs_admin:get_user(AdminConfig, Port, AdminResource, ContentType), ContentType), - ?assertEqual("AccessDenied", UserResult5), + ?assertEqual({403, "AccessDenied"}, UserResult5), ?assertMatch({Email2, User2, _, Secret, "disabled"}, UserResult6), %% Re-enable the user @@ -228,47 +229,32 @@ update_user_test(AdminConfig, Node, ContentType, Users) -> %% Fetch the user record using the user's own credentials UserResult7 = parse_user_record( - get_user_record(UserConfig2, Port, UserResource, ContentType), + rtcs_admin:get_user(UserConfig2, Port, UserResource, ContentType), ContentType), %% Fetch the user record using the admin credentials UserResult8 = parse_user_record( - get_user_record(AdminConfig, Port, AdminResource, ContentType), + rtcs_admin:get_user(AdminConfig, Port, AdminResource, ContentType), ContentType), ?assertMatch({_, _, _, UpdSecret1, _}, UserResult7), ?assertMatch({_, _, _, UpdSecret1, _}, UserResult8), ?assertMatch({Email2, User2, _, _, "enabled"}, UserResult7), ?assertMatch({Email2, User2, _, _, "enabled"}, UserResult8). -get_user_record(UserConfig, Port, Resource, ContentType) -> - lager:debug("Retreiving user record"), - Date = httpd_util:rfc1123_date(), - Cmd="curl -s -H 'Date: " ++ Date ++ - "' -H 'Accept: " ++ ContentType ++ - "' -H 'Content-Type: " ++ ContentType ++ - "' -H 'Authorization: " ++ - rtcs_admin:make_authorization("GET", Resource, ContentType, UserConfig, Date) ++ - "' http://localhost:" ++ - integer_to_list(Port) ++ Resource, - lager:info("User retrieval cmd: ~p", [Cmd]), - Output = os:cmd(Cmd), - lager:debug("User record=~p~n",[Output]), - Output. - new_key_secret_doc(?JSON) -> - "'{\"new_key_secret\": true}'"; + "{\"new_key_secret\": true}"; new_key_secret_doc(?XML) -> - "'true'". + "true". update_status_doc(?JSON, Status) -> - "'{\"status\":\"" ++ Status ++ "\"}'"; + "{\"status\":\"" ++ Status ++ "\"}"; update_status_doc(?XML, Status) -> - "'" ++ Status ++ "'". + "" ++ Status ++ "". update_email_and_name_doc(?JSON, Email, Name) -> - "'{\"email\":\"" ++ Email ++ "\", \"name\":\"" ++ Name ++"\"}'"; + "{\"email\":\"" ++ Email ++ "\", \"name\":\"" ++ Name ++"\"}"; update_email_and_name_doc(?XML, Email, Name) -> - "'" ++ Email ++ - "" ++ Name ++ "enabled'". + "" ++ Email ++ + "" ++ Name ++ "enabled". parse_user_record(Output, ?JSON) -> {struct, JsonData} = mochijson2:decode(Output), @@ -356,10 +342,11 @@ xml_text_value(XmlText) -> binary_to_list(unicode:characters_to_binary(XmlText#xmlText.value)). parse_error_code(Output) -> - {ParsedData, _Rest} = xmerl_scan:string(Output, []), - lists:foldl(fun error_code_from_xml/2, - undefined, - ParsedData#xmlElement.content). + {'EXIT', {{aws_error, {http_error, Status, _, Body}}, _Backtrace}} = Output, + {ParsedData, _Rest} = xmerl_scan:string(Body, []), + {Status, lists:foldl(fun error_code_from_xml/2, + undefined, + ParsedData#xmlElement.content)}. error_code_from_xml(#xmlText{}, Acc) -> Acc; @@ -384,7 +371,7 @@ parse_user_info(Output) -> parse_user_info([_LastToken], _, Users) -> ordsets:from_list(Users); parse_user_info(["Content-Type: application/xml", RawXml | RestTokens], - Boundary, Users) -> + Boundary, Users) -> UpdUsers = parse_user_records(RawXml, ?XML) ++ Users, parse_user_info(RestTokens, Boundary, UpdUsers); parse_user_info(["Content-Type: application/json", RawJson | RestTokens], From 28255e25547b8c173b5c8f06836b2f41823f4089 Mon Sep 17 00:00:00 2001 From: Shunichi Shinohara Date: Wed, 1 Apr 2015 10:16:16 +0900 Subject: [PATCH 3/7] Reduce concurrency to create user requests Because request rate increased by using erlcloud instead of curl, request pool can be exhaused. --- riak_test/tests/user_test.erl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak_test/tests/user_test.erl b/riak_test/tests/user_test.erl index d34dccd55..249718c50 100644 --- a/riak_test/tests/user_test.erl +++ b/riak_test/tests/user_test.erl @@ -66,8 +66,8 @@ japanese_aiueo() -> create_200_users(Port) -> From = self(), - Processes = 10, - PerProcess = 20, + Processes = 5, + PerProcess = 40, [spawn(fun() -> Users = create_users( Port, From 952480a84f8b11f1002e768065559df8f0490484 Mon Sep 17 00:00:00 2001 From: Shunichi Shinohara Date: Tue, 8 Sep 2015 16:38:19 +0900 Subject: [PATCH 4/7] Update lager to 2.2.0 --- rebar.config | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rebar.config b/rebar.config index 3f50231df..bbfe79962 100644 --- a/rebar.config +++ b/rebar.config @@ -37,13 +37,13 @@ ]}. {deps, [ + {lager, ".*", {git, "git://github.com/basho/lager", {tag, "2.2.0"}}}, + {lager_syslog, ".*", {git, "git://github.com/basho/lager_syslog", {tag, "2.1.1"}}}, {cuttlefish, ".*", {git, "git://github.com/basho/cuttlefish.git", {tag, "2.0.4"}}}, {node_package, ".*", {git, "git://github.com/basho/node_package", {tag, "2.0.3"}}}, {getopt, ".*", {git, "git://github.com/jcomellas/getopt.git", {tag, "v0.8.2"}}}, {webmachine, ".*", {git, "git://github.com/basho/webmachine", {tag, "1.10.8"}}}, {riakc, ".*", {git, "git://github.com/basho/riak-erlang-client", {tag, "2.1.1"}}}, - {lager, ".*", {git, "git://github.com/basho/lager", {tag, "2.1.1"}}}, - {lager_syslog, ".*", {git, "git://github.com/basho/lager_syslog", {tag, "2.1.1"}}}, {eper, ".*", {git, "git://github.com/basho/eper.git", "0.92-basho1"}}, {druuid, ".*", {git, "git://github.com/kellymclaughlin/druuid.git", {tag, "0.2"}}}, {poolboy, "0.8.*", {git, "git://github.com/basho/poolboy", "0.8.1p3"}}, From e47d334fe934923e8ef6bb91e0cccb063af38711 Mon Sep 17 00:00:00 2001 From: Shunichi Shinohara Date: Thu, 10 Sep 2015 10:04:25 +0900 Subject: [PATCH 5/7] Update riak_cs_multbag to 2.1.0-pre6 --- rebar.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rebar.config b/rebar.config index bbfe79962..1e85040e9 100644 --- a/rebar.config +++ b/rebar.config @@ -56,5 +56,5 @@ {deps_ee, [ {riak_repl_pb_api,".*",{git,"git@github.com:basho/riak_repl_pb_api.git", {tag, "2.1.1"}}}, - {riak_cs_multibag,".*",{git,"git@github.com:basho/riak_cs_multibag.git", {tag, "2.1.0-pre5"}}} + {riak_cs_multibag,".*",{git,"git@github.com:basho/riak_cs_multibag.git", {tag, "2.1.0-pre6"}}} ]}. From 273e353578ca3a4ed6ce1ce3c3448a0902075ec1 Mon Sep 17 00:00:00 2001 From: Shunichi Shinohara Date: Thu, 10 Sep 2015 10:07:11 +0900 Subject: [PATCH 6/7] Use riakc_obj:metadata() instead of custom one --- include/riak_cs.hrl | 2 -- src/riak_cs_utils.erl | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/include/riak_cs.hrl b/include/riak_cs.hrl index 65872fa22..b112b7216 100644 --- a/include/riak_cs.hrl +++ b/include/riak_cs.hrl @@ -521,8 +521,6 @@ -ifdef(namespaced_types). -type mochiweb_headers() :: gb_trees:tree(). --type robj_md() :: dict:dict(). -else. -type mochiweb_headers() :: gb_tree(). --type robj_md() :: dict(). -endif. diff --git a/src/riak_cs_utils.erl b/src/riak_cs_utils.erl index 206ceff0a..8b37a1fbb 100644 --- a/src/riak_cs_utils.erl +++ b/src/riak_cs_utils.erl @@ -258,7 +258,7 @@ handle_active_manifests({error, no_active_manifest}) -> {error, notfound}. %% @doc Determine if a set of contents of a riak object has a tombstone. --spec has_tombstone({robj_md(), binary()}) -> boolean(). +-spec has_tombstone({riakc_obj:metadata(), binary()}) -> boolean(). has_tombstone({_, <<>>}) -> true; has_tombstone({MD, _V}) -> From 6f1e4023555fcbb4e3fc78fe2bbb91ab350605c0 Mon Sep 17 00:00:00 2001 From: Shunichi Shinohara Date: Thu, 10 Sep 2015 11:39:36 +0900 Subject: [PATCH 7/7] Wait for registered name is unregistered to avoid race with next start --- test/riak_cs_gc_manager_eqc.erl | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/test/riak_cs_gc_manager_eqc.erl b/test/riak_cs_gc_manager_eqc.erl index 4050a1e13..826f9db5b 100644 --- a/test/riak_cs_gc_manager_eqc.erl +++ b/test/riak_cs_gc_manager_eqc.erl @@ -102,7 +102,7 @@ prop_set_interval() -> equals(Interval, State2#gc_manager_state.interval)} ]) after - ok = gen_fsm:sync_send_all_state_event(Pid, stop) + stop_and_hold_until_unregistered(Pid, riak_cs_gc_manager) end end). @@ -124,7 +124,7 @@ prop_manual_commands() -> end, equals(ok, Res))) after - ok = gen_fsm:sync_send_all_state_event(Pid, stop) + stop_and_hold_until_unregistered(Pid, riak_cs_gc_manager) end end). @@ -197,4 +197,18 @@ expected_result(_From, idle, cancel_batch) -> %% weight(feeding_workers, waiting_for_workers, _) -> 3; %% weight(_, _, _) -> 1. +stop_and_hold_until_unregistered(Pid, RegName) -> + ok = gen_fsm:sync_send_all_state_event(Pid, stop), + hold_until_unregisterd(RegName, 50). + +hold_until_unregisterd(_RegName, 0) -> + {error, not_unregistered_so_long_time}; +hold_until_unregisterd(RegName, N) -> + case whereis(RegName) of + undefined -> ok; + _ -> + timer:sleep(1), + hold_until_unregisterd(RegName, N - 1) + end. + -endif.