diff --git a/.gitignore b/.gitignore index 53c705d37..617432719 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,10 @@ _build/ +rebar.lock ebin/ -deps/ tests/ /basho_bench /rel/basho_bench package -.rebar *~ #*# .DS_Store diff --git a/Makefile b/Makefile index 8beab1f9a..7e9b324d7 100644 --- a/Makefile +++ b/Makefile @@ -6,37 +6,19 @@ PKG_ID = basho-bench-$(PKG_VERSION) PKG_BUILD = 1 BASE_DIR = $(shell pwd) ERLANG_BIN = $(shell dirname $(shell which erl)) -REBAR ?= $(BASE_DIR)/rebar +REBAR ?= $(BASE_DIR)/rebar3 OVERLAY_VARS ?= -all: deps compile - $(REBAR) skip_deps=true escriptize +all: compile + $(REBAR) escriptize -.PHONY: deps compile rel lock locked-all locked-deps +.PHONY: compile rel -rel: deps compile - cd rel && $(REBAR) generate skip_deps=true $(OVERLAY_VARS) +rel: compile + cd rel && $(REBAR) generate $(OVERLAY_VARS) -deps: - $(REBAR) get-deps - -## -## Lock Targets -## -## see https://github.com/seth/rebar_lock_deps_plugin -lock: deps compile - $(REBAR) lock-deps - -locked-all: locked-deps compile - -locked-deps: - @echo "Using rebar.config.lock file to fetch dependencies" - $(REBAR) -C rebar.config.lock get-deps - -compile: deps - # Temp hack to work around https://github.com/basho/riak-erlang-client/issues/151 - (cd deps/riak_pb ; $(REBAR) clean compile deps_dir=..) +compile: @($(REBAR) compile) clean: @@ -74,31 +56,3 @@ JOBS := $(addprefix job,${TARGETS}) all_results: ${JOBS} ; echo "$@ successfully generated." ${JOBS}: job%: ; Rscript --vanilla priv/summary.r -i tests/$* - -## -## Packaging targets -## -.PHONY: package -export PKG_VERSION PKG_ID PKG_BUILD BASE_DIR ERLANG_BIN REBAR OVERLAY_VARS RELEASE - -package.src: deps - mkdir -p package - rm -rf package/$(PKG_ID) - git archive --format=tar --prefix=$(PKG_ID)/ $(PKG_REVISION)| (cd package && tar -xf -) - ${MAKE} -C package/$(PKG_ID) locked-deps - for dep in package/$(PKG_ID)/deps/*; do \ - echo "Processing dep: $${dep}"; \ - mkdir -p $${dep}/priv; \ - git --git-dir=$${dep}/.git describe --always --tags >$${dep}/priv/vsn.git; \ - done - find package/$(PKG_ID) -depth -name ".git" -exec rm -rf {} \; - tar -C package -czf package/$(PKG_ID).tar.gz $(PKG_ID) - -dist: package.src - cp package/$(PKG_ID).tar.gz . - -package: package.src - ${MAKE} -C package -f $(PKG_ID)/deps/node_package/Makefile - -pkgclean: distclean - rm -rf package diff --git a/include/basho_bench.hrl b/include/basho_bench.hrl index a61b65632..af7a056ce 100644 --- a/include/basho_bench.hrl +++ b/include/basho_bench.hrl @@ -2,12 +2,12 @@ -define(FAIL_MSG(Str, Args), ?ERROR(Str, Args), basho_bench_app:stop_or_kill()). -define(STD_ERR(Str, Args), io:format(standard_error, Str, Args)). --define(CONSOLE(Str, Args), lager:info(Str, Args)). +-define(CONSOLE(Str, Args), logger:info(Str, Args)). --define(DEBUG(Str, Args), lager:debug(Str, Args)). --define(INFO(Str, Args), lager:info(Str, Args)). --define(WARN(Str, Args), lager:warning(Str, Args)). --define(ERROR(Str, Args), lager:error(Str, Args)). +-define(DEBUG(Str, Args), logger:debug(Str, Args)). +-define(INFO(Str, Args), logger:info(Str, Args)). +-define(WARN(Str, Args), logger:warning(Str, Args)). +-define(ERROR(Str, Args), logger:error(Str, Args)). -define(FMT(Str, Args), lists:flatten(io_lib:format(Str, Args))). diff --git a/pkg.vars.config b/pkg.vars.config deleted file mode 100644 index 0377c816e..000000000 --- a/pkg.vars.config +++ /dev/null @@ -1,24 +0,0 @@ -%% -*- tab-width: 4;erlang-indent-level: 4;indent-tabs-mode: nil -*- -%% ex: ts=4 sw=4 et - -%% -%% Packaging -%% -{package_name, "basho-bench"}. -{package_install_name, "basho_bench"}. -{package_install_user, "basho-bench"}. -{package_install_group, "basho-bench"}. -{package_install_user_desc, "Basho-bench user"}. -{package_shortdesc, "Basho benchmarking tool"}. -{package_desc, "Benchmarking tool"}. -{package_commands, {list, [[{name, "basho_bench"}]]}}. -{package_patch_dir, "basho-patches"}. -{bin_or_sbin, "bin"}. -{license_type, "OSS"}. -{copyright, "2014 Basho Technologies, Inc"}. -{vendor_name, "Basho Technologies, Inc"}. -{vendor_url, "http://basho.com"}. -{vendor_contact_name, "Basho Package Maintainer"}. -{vendor_contact_email, "packaging@basho.com"}. -{license_full_text, "This software is provided under license from Basho Technologies."}. -{solaris_pkgname, "BASHObasho-bench"}. diff --git a/rebar.config b/rebar.config index 69f9b8a33..ffa7d8045 100644 --- a/rebar.config +++ b/rebar.config @@ -1,31 +1,21 @@ -{require_otp_vsn, "20"}. +%% -*- mode: erlang -*- +{require_otp_vsn, "22"}. {deps, [ {folsom, "0.8.7"}, - {lager, "2.*", {git, "git://github.com/erlang-lager/lager", {tag, "3.8.0"}}}, - {ibrowse, ".*", - {git, "git://github.com/basho/ibrowse.git", {branch, "develop-3.0"}}}, - {riakc, ".*", - {git, "git://github.com/basho/riak-erlang-client", {branch, "develop-3.0"}}}, - {mochiweb, "2.9.*", - {git, "git://github.com/basho/mochiweb", {branch, "develop-3.0"}}}, - {getopt, ".*", - {git, "git://github.com/jcomellas/getopt", {tag, "v0.8.2"}}}, - {bitcask, ".*", - {git, "git://github.com/basho/bitcask", {branch, "develop-3.0"}}}, - {eleveldb, ".*", - {git, "git://github.com/basho/eleveldb", {branch, "develop-3.0"}}} + {ibrowse, {git, "https://github.com/basho/ibrowse.git", {branch, "develop-3.0"}}}, + {riakc, {git, "https://github.com/basho/riak-erlang-client", {branch, "develop"}}}, + {mochiweb, {git, "https://github.com/basho/mochiweb", {branch, "develop-3.0"}}}, + {getopt, {git, "https://github.com/jcomellas/getopt", {tag, "v0.8.2"}}}, + {bitcask, {git, "https://github.com/basho/bitcask", {branch, "develop"}}}, + {eleveldb, {git, "https://github.com/basho/eleveldb", {branch, "develop"}}} ]}. -{erl_opts, [{src_dirs, [src]}, - {parse_transform, lager_transform}]}. - {escript_incl_apps, [ folsom, getopt, ibrowse, - lager, mochiweb, riakc, bitcask, diff --git a/rebar.lock b/rebar.lock deleted file mode 100644 index dc7cf0f2f..000000000 --- a/rebar.lock +++ /dev/null @@ -1,50 +0,0 @@ -{"1.1.0", -[{<<"bear">>,{pkg,<<"bear">>,<<"0.8.7">>},1}, - {<<"bitcask">>, - {git,"git://github.com/basho/bitcask", - {ref,"dd96f6dd84a1cad68ccc21959ba56b0426434d85"}}, - 0}, - {<<"eleveldb">>, - {git,"git://github.com/basho/eleveldb", - {ref,"3dee1dd7173507a4ad29df1ceb99cb70d6142478"}}, - 0}, - {<<"folsom">>,{pkg,<<"folsom">>,<<"0.8.7">>},0}, - {<<"getopt">>, - {git,"git://github.com/jcomellas/getopt", - {ref,"388dc95caa7fb97ec7db8cfc39246a36aba61bd8"}}, - 0}, - {<<"goldrush">>,{pkg,<<"goldrush">>,<<"0.1.9">>},1}, - {<<"ibrowse">>, - {git,"git://github.com/basho/ibrowse.git", - {ref,"cdde6fe5e80ce71e7b5570e40a45d9f862721499"}}, - 0}, - {<<"lager">>, - {git,"git://github.com/erlang-lager/lager", - {ref,"22e62f28e5afabe90a6f31bcde367a2b5799fc94"}}, - 0}, - {<<"meck">>, - {git,"https://github.com/eproxus/meck.git", - {ref,"4ecc1ae9089edc6977e8c8c4cd41081513cc5590"}}, - 3}, - {<<"mochiweb">>, - {git,"git://github.com/basho/mochiweb", - {ref,"8471d2197afd412fcec239dd9ad5cbe90e64eefa"}}, - 0}, - {<<"protobuffs">>, - {git,"https://github.com/basho/erlang_protobuffs.git", - {ref,"098efad8f85dfe556d64e2cf6ce31f2075808f67"}}, - 2}, - {<<"riak_pb">>, - {git,"https://github.com/basho/riak_pb", - {ref,"3129374435d67086e05f97a92482ef7f280aa7c7"}}, - 1}, - {<<"riakc">>, - {git,"git://github.com/basho/riak-erlang-client", - {ref,"07c9582c9fd97f8252e0a3e488d6ebc08a9a4172"}}, - 0}]}. -[ -{pkg_hash,[ - {<<"bear">>, <<"16264309AE5D005D03718A5C82641FCC259C9E8F09ADEB6FD79CA4271168656F">>}, - {<<"folsom">>, <<"A885F0AEEE4C84270954C88A55A5A473D6B2C7493E32FFDC5765412DD555A951">>}, - {<<"goldrush">>, <<"F06E5D5F1277DA5C413E84D5A2924174182FB108DABB39D5EC548B27424CD106">>}]} -]. diff --git a/rebar3 b/rebar3 index 77e0daeaa..c1597bbc5 100755 Binary files a/rebar3 and b/rebar3 differ diff --git a/rel/reltool.config b/rel/reltool.config deleted file mode 100644 index e60cfc75f..000000000 --- a/rel/reltool.config +++ /dev/null @@ -1,63 +0,0 @@ -%% -*- mode: erlang -*- -%% ex: ft=erlang -{sys, [ - {lib_dirs, ["../deps"]}, - {erts, [{mod_cond, derived}, {app_file, strip}]}, - {app_file, strip}, - {rel, "basho_bench", "0.10.0", - [ - kernel, - stdlib, - bear, - lager, - folsom, - goldrush, - riakc, - ibrowse, - mochiweb - ]}, - {rel, "start_clean", "", - [ - kernel, - stdlib - ]}, - {boot_rel, "basho_bench"}, - {profile, embedded}, - {incl_cond, derived}, - {excl_archive_filters, [".*"]}, %% Do not archive built libs - {excl_sys_filters, ["^bin/(?!start_clean.boot)", - "^erts.*/bin/(dialyzer|typer)", - "^erts.*/(doc|info|include|lib|man|src)"]}, - {excl_app_filters, ["\.gitignore"]}, - {app, basho_bench, [{mod_cond, app}, {incl_cond, include}, {lib_dir, ".."}]}, - {app, hipe, [{incl_cond, exclude}]} - ]}. - -{target_dir, "basho_bench"}. -{overlay_vars, "vars.config"}. - -{overlay, [ - {template, "../deps/node_package/priv/base/env.sh", - "lib/env.sh"}, - {mkdir, "data/b_b"}, - - %% Copy base files for starting and interacting w/ node - {copy, "../deps/node_package/priv/base/erl", - "{{erts_vsn}}/bin/erl"}, - {copy, "../deps/node_package/priv/base/nodetool", - "{{erts_vsn}}/bin/nodetool"}, - {template, "../deps/node_package/priv/base/env.sh", - "lib/env.sh"}, - {copy, "files/vm.args", "etc/vm.args"}, - - {template, "files/basho_bench", "bin/basho_bench"}, - - {copy, "../examples/cs.config.sample", "etc/cs.config"}, - {copy, "../examples/riakc_pb.config", "etc/riakc_pb.config"}, - {copy, "../examples/httpraw.config", "etc/httpraw.config"}, - {copy, "../examples/http.config", "etc/http.config"}, - {copy, "../examples/null_test.config", "etc/null_test.config"} - - %%{copy, "files/install_upgrade.escript", "bin/install_upgrade.escript"}, - - ]}. diff --git a/src/basho_bench.app.src b/src/basho_bench.app.src index c9a81521f..6c1b7ffa5 100644 --- a/src/basho_bench.app.src +++ b/src/basho_bench.app.src @@ -5,14 +5,13 @@ {registered, [ basho_bench_sup ]}, {applications, [kernel, stdlib, - sasl, - lager]}, + sasl]}, {mod, {basho_bench_app, []}}, {env, [ - %% Run mode: How should basho_bench started as a separate node, or part of an - %% other node. The default is standalone, other option is included. - {app_run_mode, standalone}, - + %% Run mode: How should basho_bench started as a separate node, or part of an + %% other node. The default is standalone, other option is included. + {app_run_mode, standalone}, + %% %% Mode of load generation: %% max - Generate as many requests as possible per worker diff --git a/src/basho_bench.erl b/src/basho_bench.erl index bcda7554a..f7bb37a3d 100644 --- a/src/basho_bench.erl +++ b/src/basho_bench.erl @@ -23,6 +23,7 @@ -export([main/1, md5/1]). -include("basho_bench.hrl"). +-include_lib("kernel/include/logger.hrl"). %% ==================================================================== %% API @@ -56,29 +57,10 @@ main(Args) -> {ok, _Pid} = basho_bench_config:start_link(), basho_bench_config:set(test_id, BenchName), - application:load(lager), - ConsoleLagerLevel = basho_bench_config:get(log_level, debug), - ErrorLog = filename:join([TestDir, "error.log"]), - ConsoleLog = filename:join([TestDir, "console.log"]), - CrashLog = filename:join([TestDir, "crash.log"]), - application:set_env(lager, - handlers, - [{lager_console_backend, ConsoleLagerLevel}, - {lager_file_backend, [{file, ErrorLog}, {level, error}, {size, 10485760}, {date, "$D0"}, {count, 5}]}, - {lager_file_backend, [{file, ConsoleLog}, {level, debug}, {size, 10485760}, {date, "$D0"}, {count, 5}]} - ]), - application:set_env(lager, crash_log, CrashLog), - lager:start(), - %% Make sure this happens after starting lager or failures wont %% show. basho_bench_config:load(Configs), - %% Log level can be overriden by the config files - CustomLagerLevel = basho_bench_config:get(log_level), - lager:set_loglevel(lager_console_backend, CustomLagerLevel), - lager:set_loglevel(lager_file_backend, ConsoleLog, CustomLagerLevel), - %% Init code path add_code_paths(basho_bench_config:get(code_paths, [])), @@ -94,7 +76,7 @@ main(Args) -> %% Copy the config into the test dir for posterity [ begin {ok, _} = file:copy(Config, filename:join(TestDir, filename:basename(Config))) end || Config <- Configs ], - case basho_bench_config:get(distribute_work, false) of + case basho_bench_config:get(distribute_work, false) of true -> setup_distributed_work(); false -> ok end, @@ -288,8 +270,8 @@ get_addr_args() -> StrAddrs = [inet:ntoa(Addr) || Addr <- Addrs], string:join(StrAddrs, " "). setup_distributed_work() -> - case node() of - 'nonode@nohost' -> + case node() of + 'nonode@nohost' -> ?STD_ERR("Basho bench not started in distributed mode, and distribute_work = true~n", []), halt(1); _ -> ok @@ -307,13 +289,13 @@ setup_distributed_work() -> [pool:attach(SlaveName) || SlaveName <- SlaveNames], CodePaths = code:get_path(), rpc:multicall(SlaveNames, code, set_path, [CodePaths]), - Apps = [lager, basho_bench, getopt, bear, folsom, ibrowse, riakc, riak_pb, mochiweb, protobuffs, goldrush], + Apps = [basho_bench, getopt, bear, folsom, ibrowse, riakc, riak_pb, mochiweb, protobuffs, goldrush], [distribute_app(App) || App <- Apps]. deploy_module(Module) -> - case basho_bench_config:get(distribute_work, false) of - true -> + case basho_bench_config:get(distribute_work, false) of + true -> Nodes = nodes(), {Module, Binary, Filename} = code:get_object_code(Module), rpc:multicall(Nodes, code, load_binary, [Module, Filename, Binary]); @@ -330,19 +312,19 @@ distribute_app(App) -> EbinsDir = lists:filter(fun(CodePathDir) -> string:substr(CodePathDir, 1, LibDirLen) == LibDir end, code:get_path()), StripEndFun = fun(Path) -> PathLen = string:len(Path), - case string:substr(Path, PathLen - string:len(CodeExtension) + 1, string:len(Path)) of + case string:substr(Path, PathLen - string:len(CodeExtension) + 1, string:len(Path)) of CodeExtension -> {true, string:substr(Path, 1, PathLen - string:len(CodeExtension))}; _ -> false end - end, + end, EbinDirDistributeFun = fun(EbinDir) -> {ok, Beams} = erl_prim_loader:list_dir(EbinDir), Modules = lists:filtermap(StripEndFun, Beams), ModulesLoaded = [code:load_abs(filename:join(EbinDir, ModFileName)) || ModFileName <- Modules], lists:foreach(fun({module, Module}) -> deploy_module(Module) end, ModulesLoaded) end, - lists:foreach(EbinDirDistributeFun, EbinsDir), + lists:foreach(EbinDirDistributeFun, EbinsDir), ok. %% just a utility, should be in basho_bench_utils.erl %% but 's' is for multiple utilities, and so far this diff --git a/src/basho_bench_driver_cluster.erl b/src/basho_bench_driver_cluster.erl index 08a983b00..cbc544748 100644 --- a/src/basho_bench_driver_cluster.erl +++ b/src/basho_bench_driver_cluster.erl @@ -1,6 +1,6 @@ %% ------------------------------------------------------------------- %% -%% basho_bench: benchmark service on any within Erlang cluster +%% basho_bench: benchmark service on any within Erlang cluster %% using distribution protocol %% %% Copyright (c) 2015 Dmitry Kolesnikov @@ -27,6 +27,8 @@ run/4 ]). +-include_lib("kernel/include/logger.hrl"). + -record(state, {actor}). %% ==================================================================== @@ -39,11 +41,11 @@ new(Id) -> {Name, Node} = Actor = lists:nth(Nth, Actors), case net_adm:ping(Node) of pang -> - lager:error("~s is not available", [Node]), + logger:error("~s is not available", [Node]), {ok, #state{actor = undefined}}; pong -> - lager:info("worker ~b is bound to ~s on ~s", [Id, Name, Node]), + logger:info("worker ~b is bound to ~s on ~s", [Id, Name, Node]), {ok, #state{actor = Actor}} end. diff --git a/src/basho_bench_driver_cs.erl b/src/basho_bench_driver_cs.erl index 47b600c7b..90ad6bfad 100644 --- a/src/basho_bench_driver_cs.erl +++ b/src/basho_bench_driver_cs.erl @@ -26,6 +26,8 @@ -module(basho_bench_driver_cs). +-include_lib("kernel/include/logger.hrl"). + -define(BLOCK, (1024*1024)). -define(VERYLONG_TIMEOUT, (300*1000)). -define(REPORTFUN_APPKEY, report_fun). @@ -76,7 +78,7 @@ new(ID) -> N = mb_sec -> {N, fun(X) -> X / (1000 * 1000) end}; N = mib_sec -> {N, fun(X) -> X / (1024 * 1024) end}; _ -> - lager:log( + logger:log( error, self(), "Unrecognized value for cs_measurement_units.\n", []), exit(unrecognized_value) @@ -84,7 +86,7 @@ new(ID) -> if ID == 1 -> application:start(ibrowse), application:start(crypto), - lager:log(info, self(), "Reporting factor = ~p\n", [RF_name]); + logger:log(info, self(), "Reporting factor = ~p\n", [RF_name]); true -> ok end, @@ -99,7 +101,7 @@ new(ID) -> lists:keymember(delete, 1, OpsList) andalso length(OpsList) > 1 of true -> - lager:log( + logger:log( warning, self(), "Mixing delete and non-delete operations together with " "~p measurements unit can yield nonsense results!\n\n", @@ -123,9 +125,9 @@ new(ID) -> {ProxyH, ProxyP} = lists:nth((ID rem length(ProxyTargets)+1), ProxyTargets), ID_max = 30, if ID == ID_max -> - lager:log(info, self(), "Suppressing additional proxy info", []); + logger:log(info, self(), "Suppressing additional proxy info", []); ID < ID_max -> - lager:log(info, self(), "ID ~p Proxy host ~p TCP port ~p\n", + logger:log(info, self(), "ID ~p Proxy host ~p TCP port ~p\n", [ID, ProxyH, ProxyP]); true -> ok @@ -253,7 +255,7 @@ perhaps_sleep() -> bigfile_valgen(Id, Props) -> if Id == 1 -> - lager:log(info, self(), "~p value gen props: ~p\n", [?MODULE, Props]); + logger:log(info, self(), "~p value gen props: ~p\n", [?MODULE, Props]); true -> ok end, @@ -312,7 +314,7 @@ insert(KeyGen, ValueGen, {Host, Port}, Bucket, State) -> %% tell us its content length. {ValueGen, ValFunc(get_content_length)}; _ -> - lager:log( + logger:log( error, self(), "This driver cannot use the standard basho_bench " "generator functions, please see refer to " @@ -615,7 +617,7 @@ setup_user_and_bucket(State) -> DisplayName = basho_bench_config:get(cs_display_name, "test-user"), ok = maybe_create_user(DisplayName, State), {ok, {_DisplayName, KeyId, KeySecret}} = fetch_user_info(DisplayName, State), - lager:info("Target User: ~p", [{DisplayName, KeyId, KeySecret}]), + logger:info("Target User: ~p", [{DisplayName, KeyId, KeySecret}]), ok = basho_bench_config:set(cs_access_key, KeyId), ok = basho_bench_config:set(cs_secret_key, KeySecret); _ -> @@ -631,10 +633,10 @@ maybe_create_user(DisplayName, #state{hosts=Hosts} = State) -> Headers = [{'Content-Type', 'application/json'}], case send_request({Host, Port}, Url, Headers, post, Json, proxy_opts(State)) of {ok, "201", _Header, Body} -> - lager:debug("User created: ~p~n", [Body]), + logger:debug("User created: ~p~n", [Body]), ok; {ok, "409", _Header, Body} -> - lager:debug("User already exists: ~p~n", [Body]), + logger:debug("User already exists: ~p~n", [Body]), ok; {ok, Code, Header, Body} -> {error, {user_creation, Code, Header, Body}}; @@ -688,12 +690,12 @@ maybe_create_bucket(Bucket, #state{hosts=Hosts} = State) -> Url = url(Host, Port, Bucket, undefined), case send_request({Host, Port}, Url, [], put, [], proxy_opts(State)) of {ok, "200", _Headers, _Body} -> - lager:debug("Bucket created (maybe): ~p~n", [Bucket]), + logger:debug("Bucket created (maybe): ~p~n", [Bucket]), ok; {ok, Code, Header, Body} -> - lager:error("Create bucket: ~p~n", [{Code, Header, Body}]), + logger:error("Create bucket: ~p~n", [{Code, Header, Body}]), {error, {bucket_creation, Code, Header, Body}}; {error, Reason} -> - lager:error("Create bucket: ~p~n", [Reason]), + logger:error("Create bucket: ~p~n", [Reason]), {error, {bucket_creation, Reason}} end. diff --git a/src/basho_bench_driver_nhs.erl b/src/basho_bench_driver_nhs.erl index efeb09ba4..fa316f6b6 100644 --- a/src/basho_bench_driver_nhs.erl +++ b/src/basho_bench_driver_nhs.erl @@ -29,6 +29,7 @@ run_segmentfold/1]). -include("basho_bench.hrl"). +-include_lib("kernel/include/logger.hrl"). -record(state, { pb_pid, @@ -64,21 +65,21 @@ -define(FORCEAAE_FREQ, 10). % Every 10 seconds -define(POSTCODE_AREAS, - [{1, "AB"}, {2, "AL"}, {3, "B"}, {4, "BA"}, {5, "BB"}, - {6, "BD"}, {7, "BH"}, {8, "BL"}, {9, "BN"}, {10, "BR"}, - {11, "BS"}, {12, "BT"}, {13, "CA"}, {14, "CB"}, {15, "CF"}, - {16, "CH"}, {17, "CM"}, {18, "CO"}, {19, "CR"}, {20, "CT"}, - {21, "CV"}, {22, "CW"}, {23, "DA"}, {24, "DD"}, {25, "DE"}, - {26, "DG"}, {27, "DH"}, {28, "DL"}, {29, "DN"}, {30, "DT"}, - {31, "DU"}, {32, "E"}, {33, "EC"}, {34, "EH"}, {35, "EN"}, - {36, "EX"}, {37, "FK"}, {38, "FY"}, {39, "G"}, {40, "GL"}, - {41, "GU"}, {42, "HA"}, {43, "HD"}, {44, "HG"}, {45, "HP"}, - {46, "HR"}, {47, "HS"}, {48, "HU"}, {49, "HX"}, {50, "IG"}, - {51, "IP"}, {52, "IV"}, {53, "KA"}, {54, "KT"}, {55, "KW"}, - {56, "KY"}, {57, "L"}, {58, "LA"}, {59, "LD"}, {60, "LE"}, - {61, "LL"}, {62, "LS"}, {63, "LU"}, {64, "M"}, {65, "ME"}, - {66, "MK"}, {67, "ML"}, {68, "N"}, {69, "NE"}, {70, "NG"}, - {71, "MM"}, {72, "NP"}, {73, "NR"}, {74, "NW"}, {75, "OL"}, + [{1, "AB"}, {2, "AL"}, {3, "B"}, {4, "BA"}, {5, "BB"}, + {6, "BD"}, {7, "BH"}, {8, "BL"}, {9, "BN"}, {10, "BR"}, + {11, "BS"}, {12, "BT"}, {13, "CA"}, {14, "CB"}, {15, "CF"}, + {16, "CH"}, {17, "CM"}, {18, "CO"}, {19, "CR"}, {20, "CT"}, + {21, "CV"}, {22, "CW"}, {23, "DA"}, {24, "DD"}, {25, "DE"}, + {26, "DG"}, {27, "DH"}, {28, "DL"}, {29, "DN"}, {30, "DT"}, + {31, "DU"}, {32, "E"}, {33, "EC"}, {34, "EH"}, {35, "EN"}, + {36, "EX"}, {37, "FK"}, {38, "FY"}, {39, "G"}, {40, "GL"}, + {41, "GU"}, {42, "HA"}, {43, "HD"}, {44, "HG"}, {45, "HP"}, + {46, "HR"}, {47, "HS"}, {48, "HU"}, {49, "HX"}, {50, "IG"}, + {51, "IP"}, {52, "IV"}, {53, "KA"}, {54, "KT"}, {55, "KW"}, + {56, "KY"}, {57, "L"}, {58, "LA"}, {59, "LD"}, {60, "LE"}, + {61, "LL"}, {62, "LS"}, {63, "LU"}, {64, "M"}, {65, "ME"}, + {66, "MK"}, {67, "ML"}, {68, "N"}, {69, "NE"}, {70, "NG"}, + {71, "MM"}, {72, "NP"}, {73, "NR"}, {74, "NW"}, {75, "OL"}, {76, "OX"}]). -define(DATETIME_FORMAT, "~4..0w-~2..0w-~2..0wT~2..0w:~2..0w:~2..0w"). -define(DATE_FORMAT, "~b~2..0b~2..0b"). @@ -127,28 +128,28 @@ new(Id) -> {ReplTargetIp, ReplTargetPort} = lists:nth((Id rem length(ReplTargets) + 1), ReplTargets), - ?INFO("Using repl target ~p:~p for worker ~p\n", + ?INFO("Using repl target ~p:~p for worker ~p\n", [ReplTargetIp, ReplTargetPort, Id]), - {AGMaxKC, AGMinKC, AGKeyOrder} = + {AGMaxKC, AGMinKC, AGKeyOrder} = basho_bench_config:get(alwaysget, {1, 1, key_order}), {DocSize, DocKeyOrder} = basho_bench_config:get(unique, {8000, key_order}), - - NodeID = basho_bench_config:get(node_name, node()), + + NodeID = basho_bench_config:get(node_name, node()), KeyIDint = erlang:phash2(Id) bxor erlang:phash2(NodeID), - ?INFO("Using Node ID ~w to generate ID ~w\n", [node(), KeyIDint]), + ?INFO("Using Node ID ~w to generate ID ~w\n", [node(), KeyIDint]), case riakc_pb_socket:start_link(PBTargetIp, PBTargetPort) of {ok, Pid} -> NominatedID = Id == 7, - ReplPid = + ReplPid = case riakc_pb_socket:start_link(ReplTargetIp, ReplTargetPort) of {ok, RP} -> RP; _ -> - lager:info("Starting with no repl check"), + logger:info("Starting with no repl check"), no_repl_check end, {ok, #state { @@ -196,13 +197,13 @@ run(alwaysget_http, _KeyGen, _ValueGen, State) -> Port = State#state.http_port, Bucket = State#state.recordBucket, AGKC = State#state.alwaysget_key_count, - case AGKC > State#state.alwaysget_perworker_minkeycount of + case AGKC > State#state.alwaysget_perworker_minkeycount of true -> - KeyInt = eightytwenty_keycount(AGKC), - Key = generate_uniquekey(KeyInt, State#state.keyid, + KeyInt = eightytwenty_keycount(AGKC), + Key = generate_uniquekey(KeyInt, State#state.keyid, State#state.alwaysget_keyorder), - URL = - io_lib:format("http://~s:~p/buckets/~s/keys/~s", + URL = + io_lib:format("http://~s:~p/buckets/~s/keys/~s", [Host, Port, Bucket, Key]), case get_existing(URL, State#state.http_timeout) of @@ -221,14 +222,14 @@ run(alwaysget_pb, _KeyGen, _ValueGen, State) -> Pid = State#state.pb_pid, Bucket = State#state.recordBucket, AGKC = State#state.alwaysget_key_count, - case AGKC > State#state.alwaysget_perworker_minkeycount of + case AGKC > State#state.alwaysget_perworker_minkeycount of true -> - KeyInt = eightytwenty_keycount(AGKC), - Key = generate_uniquekey(KeyInt, State#state.keyid, + KeyInt = eightytwenty_keycount(AGKC), + Key = generate_uniquekey(KeyInt, State#state.keyid, State#state.alwaysget_keyorder), - case riakc_pb_socket:get(Pid, - Bucket, Key, + case riakc_pb_socket:get(Pid, + Bucket, Key, State#state.pb_timeout) of {ok, _Obj} -> {ok, State}; @@ -239,7 +240,7 @@ run(alwaysget_pb, _KeyGen, _ValueGen, State) -> false -> {silent, State} - + end; run(alwaysget_updatewith2i, _KeyGen, ValueGen, State) -> @@ -248,20 +249,20 @@ run(alwaysget_updatewith2i, _KeyGen, ValueGen, State) -> AGKC = State#state.alwaysget_key_count, Value = ValueGen(), KeyInt = eightytwenty_keycount(AGKC), - ToExtend = + ToExtend = rand:uniform(State#state.alwaysget_perworker_maxkeycount) > AGKC, - {Robj0, NewAGKC} = - case ToExtend of + {Robj0, NewAGKC} = + case ToExtend of true -> % Expand the key count - ExpansionKey = + ExpansionKey = generate_uniquekey(AGKC + 1, State#state.keyid, State#state.alwaysget_keyorder), case {AGKC rem 1000, State#state.nominated_id} of {0, true} -> - lager:info("Always grow key count passing ~w " - ++ "for nominated worker", + logger:info("Always grow key count passing ~w " + ++ "for nominated worker", [AGKC]); _ -> ok @@ -270,16 +271,16 @@ run(alwaysget_updatewith2i, _KeyGen, ValueGen, State) -> AGKC + 1}; false -> % update an existing key - ExistingKey = + ExistingKey = generate_uniquekey(KeyInt, State#state.keyid, State#state.alwaysget_keyorder), {ok, Robj} = - riakc_pb_socket:get(Pid, - Bucket, ExistingKey, + riakc_pb_socket:get(Pid, + Bucket, ExistingKey, State#state.pb_timeout), {Robj, AGKC} end, - + MD0 = riakc_obj:get_update_metadata(Robj0), MD1 = riakc_obj:clear_secondary_indexes(MD0), MD2 = riakc_obj:set_secondary_index(MD1, generate_binary_indexes()), @@ -300,20 +301,20 @@ run(alwaysget_updatewithout2i, _KeyGen, ValueGen, State) -> AGKC = State#state.alwaysget_key_count, Value = ValueGen(), KeyInt = eightytwenty_keycount(AGKC), - ToExtend = + ToExtend = rand:uniform(State#state.alwaysget_perworker_maxkeycount) > AGKC, - {Robj0, NewAGKC} = - case ToExtend of + {Robj0, NewAGKC} = + case ToExtend of true -> % Expand the key count - ExpansionKey = + ExpansionKey = generate_uniquekey(AGKC + 1, State#state.keyid, State#state.alwaysget_keyorder), case {AGKC rem 1000, State#state.nominated_id} of {0, true} -> - lager:info("Always grow key count passing ~w " - ++ "for nominated worker", + logger:info("Always grow key count passing ~w " + ++ "for nominated worker", [AGKC]); _ -> ok @@ -322,16 +323,16 @@ run(alwaysget_updatewithout2i, _KeyGen, ValueGen, State) -> AGKC + 1}; false -> % update an existing key - ExistingKey = + ExistingKey = generate_uniquekey(KeyInt, State#state.keyid, State#state.alwaysget_keyorder), {ok, Robj} = - riakc_pb_socket:get(Pid, - Bucket, ExistingKey, + riakc_pb_socket:get(Pid, + Bucket, ExistingKey, State#state.pb_timeout), {Robj, AGKC} end, - + % MD0 = riakc_obj:get_update_metadata(Robj0), % MD1 = riakc_obj:clear_secondary_indexes(MD0), % MD2 = riakc_obj:set_secondary_index(MD1, generate_binary_indexes()), @@ -353,7 +354,7 @@ run(update_with2i, KeyGen, ValueGen, State) -> Bucket = State#state.recordBucket, Key = to_binary(KeyGen()), Value = ValueGen(), - + Robj0 = case riakc_pb_socket:get(Pid, Bucket, Key, State#state.pb_timeout) of {ok, Robj} -> @@ -361,7 +362,7 @@ run(update_with2i, KeyGen, ValueGen, State) -> {error, notfound} -> riakc_obj:new(Bucket, to_binary(Key)) end, - + MD0 = riakc_obj:get_update_metadata(Robj0), MD1 = riakc_obj:clear_secondary_indexes(MD0), MD2 = riakc_obj:set_secondary_index(MD1, generate_binary_indexes()), @@ -378,23 +379,23 @@ run(update_with2i, KeyGen, ValueGen, State) -> %% Put an object with a unique key and a non-compressable value run(put_unique_bet365, _KeyGen, _ValueGen, State) -> Pid = State#state.pb_pid, - - Bucket = + + Bucket = case erlang:phash2(Pid) rem 2 of 0 -> <<"abcdefghijklmnopqrstuvwxyz_1">>; 1 -> <<"abcdefghijklmnopqrstuvwxyz_2">> end, - + UKC = State#state.unique_key_count, - Key = - generate_uniquekey(UKC, - State#state.keyid, + Key = + generate_uniquekey(UKC, + State#state.keyid, State#state.unique_keyorder), - + Value = non_compressible_value(State#state.unique_size), - + Robj0 = riakc_obj:new(Bucket, to_binary(Key)), MD2 = riakc_obj:get_update_metadata(Robj0), % MD2 = riakc_obj:set_secondary_index(MD1, generate_binary_indexes()), @@ -428,7 +429,7 @@ run(put_unique_checkrepl, _KeyGen, _ValueGen, State) -> {error, Reason} -> {error, Reason, State} end; -run(get_unique, _KeyGen, _ValueGen, State) -> +run(get_unique, _KeyGen, _ValueGen, State) -> % Get one of the objects with unique keys Pid = State#state.pb_pid, Bucket = State#state.documentBucket, @@ -473,13 +474,13 @@ run(postcodequery_http, _KeyGen, _ValueGen, State) -> Host = inet_parse:ntoa(State#state.http_host), Port = State#state.http_port, Bucket = State#state.recordBucket, - + L = length(?POSTCODE_AREAS), {_, Area} = lists:keyfind(rand:uniform(L), 1, ?POSTCODE_AREAS), District = Area ++ integer_to_list(rand:uniform(26)), StartKey = District ++ "|" ++ "a", EndKey = District ++ "|" ++ "h", - URL = io_lib:format("http://~s:~p/buckets/~s/index/postcode_bin/~s/~s", + URL = io_lib:format("http://~s:~p/buckets/~s/index/postcode_bin/~s/~s", [Host, Port, Bucket, StartKey, EndKey]), case json_get(URL, State#state.http_timeout) of @@ -487,11 +488,11 @@ run(postcodequery_http, _KeyGen, _ValueGen, State) -> Results = proplists:get_value(<<"keys">>, Proplist), C0 = State#state.postcodeq_count, S0 = State#state.postcodeq_sum, - {C1, S1} = - case {C0, C0 rem State#state.query_logfreq} of + {C1, S1} = + case {C0, C0 rem State#state.query_logfreq} of {C0, 0} when C0 > 0 -> Avg = float_to_list(S0 / C0, [{decimals, 3}]), - lager:info("Average postcode query result size of ~s", + logger:info("Average postcode query result size of ~s", [Avg]), {1, length(Results)}; _ -> @@ -509,15 +510,15 @@ run(dobquery_http, _KeyGen, _ValueGen, State) -> Host = inet_parse:ntoa(State#state.http_host), Port = State#state.http_port, Bucket = State#state.recordBucket, - + RandYear = rand:uniform(70) + 1950, DoBStart = integer_to_list(RandYear) ++ "0101", DoBEnd = integer_to_list(RandYear) ++ "0110", - - URLSrc = + + URLSrc = "http://~s:~p/buckets/~s/index/dateofbirth_bin/~s/~s?term_regex=~s", RE= "[0-9]{8}...[a-d]", - URL = io_lib:format(URLSrc, + URL = io_lib:format(URLSrc, [Host, Port, Bucket, DoBStart, DoBEnd, RE]), case json_get(URL, State#state.http_timeout) of @@ -525,11 +526,11 @@ run(dobquery_http, _KeyGen, _ValueGen, State) -> Results = proplists:get_value(<<"keys">>, Proplist), C0 = State#state.dobq_count, S0 = State#state.dobq_sum, - {C1, S1} = - case {C0, C0 rem State#state.query_logfreq} of + {C1, S1} = + case {C0, C0 rem State#state.query_logfreq} of {C0, 0} when C0 > 0 -> Avg = float_to_list(S0 / C0, [{decimals, 3}]), - lager:info("Average dob query result size of ~s", + logger:info("Average dob query result size of ~s", [Avg]), {1, length(Results)}; _ -> @@ -552,7 +553,7 @@ run(aae_query, _KeyGen, _ValueGen, State) -> end, case {State#state.nominated_id, IsAlive} of {true, true} -> - lager:info("Skipping aae query for overlap"), + logger:info("Skipping aae query for overlap"), {ok, State}; {true, false} -> Pid = spawn(?MODULE, run_aaequery, [State]), @@ -571,7 +572,7 @@ run(list_keys, _KeyGen, _ValueGen, State) -> end, case {State#state.nominated_id, IsAlive} of {true, true} -> - lager:info("Skipping listkeys for overlap"), + logger:info("Skipping listkeys for overlap"), {ok, State}; {true, false} -> Pid = spawn(?MODULE, run_listkeys, [State]), @@ -581,7 +582,7 @@ run(list_keys, _KeyGen, _ValueGen, State) -> end; run(segment_fold, _KeyGen, _ValueGen, State) -> - IsAlive = + IsAlive = case State#state.singleton_pid of undefined -> false; @@ -590,7 +591,7 @@ run(segment_fold, _KeyGen, _ValueGen, State) -> end, case {State#state.nominated_id, IsAlive} of {true, true} -> - lager:info("Skipping segment fold for overlap"), + logger:info("Skipping segment fold for overlap"), {ok, State}; {true, false} -> Pid = spawn(?MODULE, run_segmentfold, [State]), @@ -600,7 +601,7 @@ run(segment_fold, _KeyGen, _ValueGen, State) -> end; run(force_aae, KeyGen, ValueGen, State) -> - SinceLastForceSec = + SinceLastForceSec = timer:now_diff(os:timestamp(), State#state.last_forceaae)/1000000, case {State#state.nominated_id, SinceLastForceSec > ?FORCEAAE_FREQ} of {true, true} -> @@ -637,15 +638,15 @@ run(Other, _, _, _) -> prepare_unique_put(State) -> Pid = State#state.pb_pid, Bucket = State#state.documentBucket, - + UKC = State#state.unique_key_count, - Key = - generate_uniquekey(UKC, - State#state.keyid, + Key = + generate_uniquekey(UKC, + State#state.keyid, State#state.unique_keyorder), - + Value = non_compressible_value(State#state.unique_size), - + Robj0 = riakc_obj:new(Bucket, to_binary(Key)), MD1 = riakc_obj:get_update_metadata(Robj0), MD2 = riakc_obj:set_secondary_index(MD1, generate_binary_indexes()), @@ -658,8 +659,8 @@ json_get(Url, Timeout) -> json_get(Url, Timeout, UsePool) -> Target = lists:flatten(Url), - Response = - case UsePool of + Response = + case UsePool of true -> ibrowse:send_req(Target, [], get, [], [], Timeout); false -> @@ -692,8 +693,8 @@ to_binary(L) when is_list(L) -> ensure_module(Module) -> case code:which(Module) of non_existing -> - ?FAIL_MSG("~s requires " ++ atom_to_list(Module) ++ - " module to be available on code path.\n", + ?FAIL_MSG("~s requires " ++ atom_to_list(Module) ++ + " module to be available on code path.\n", [?MODULE]); _ -> ok @@ -716,26 +717,26 @@ check_repl(ReplPid, Bucket, Key, Timeout) -> run_aaequery(State) -> SW = os:timestamp(), - lager:info("Commencing aaequery request"), + logger:info("Commencing aaequery request"), Host = inet_parse:ntoa(State#state.http_host), Port = State#state.http_port, Bucket = State#state.recordBucket, - KeyStart = "0", + KeyStart = "0", KeyEnd = "z", MapFoldMod = "riak_kv_tictac_folder", - URLSrc = + URLSrc = "http://~s:~p/buckets/~s/index/$key/~s/~s?mapfold=true&mapfoldmod=~s", - URL = io_lib:format(URLSrc, + URL = io_lib:format(URLSrc, [Host, Port, Bucket, KeyStart, KeyEnd, MapFoldMod]), - + case json_get(URL, State#state.fold_timeout, false) of {ok, {struct, TreeL}} -> {<<"count">>, Count} = lists:keyfind(<<"count">>, 1, TreeL), - lager:info("AAE query returned in ~w seconds covering ~s keys", + logger:info("AAE query returned in ~w seconds covering ~s keys", [timer:now_diff(os:timestamp(), SW)/1000000, Count]), {ok, State}; @@ -747,21 +748,21 @@ run_aaequery(State) -> run_listkeys(State) -> SW = os:timestamp(), - lager:info("Commencing list keys request"), + logger:info("Commencing list keys request"), Host = inet_parse:ntoa(State#state.http_host), Port = State#state.http_port, Bucket = State#state.recordBucket, - URLSrc = + URLSrc = "http://~s:~p/buckets/~s/keys?keys=true", - URL = io_lib:format(URLSrc, + URL = io_lib:format(URLSrc, [Host, Port, Bucket]), - + case json_get(URL, State#state.fold_timeout, false) of {ok, {struct, [{<<"keys">>, KeyList}]}} -> - lager:info("List keys returned ~w keys in ~w seconds", - [length(KeyList), + logger:info("List keys returned ~w keys in ~w seconds", + [length(KeyList), timer:now_diff(os:timestamp(), SW)/1000000]), {ok, State}; @@ -774,34 +775,34 @@ run_listkeys(State) -> run_segmentfold(State) -> SW = os:timestamp(), - lager:info("Commencing segment fold request"), + logger:info("Commencing segment fold request"), Host = inet_parse:ntoa(State#state.http_host), Port = State#state.http_port, Bucket = State#state.recordBucket, - KeyStart = "0", + KeyStart = "0", KeyEnd = "z", MapFoldMod = "riak_kv_segment_folder", - % '{"check_presence": "false", - % "tree_size": "small", + % '{"check_presence": "false", + % "tree_size": "small", % "segment_list": [1, 10001]}' - MapFoldOpts = + MapFoldOpts = "eyJjaGVja19wcmVzZW5jZSI6ICJmYWxzZSIsICJ0cmVlX3NpemUiOiAic21hbGwiLCA" ++ "ic2VnbWVudF9saXN0IjogWzEsIDEwMDAxXX0=", - URLSrc = + URLSrc = "http://~s:~p/buckets/~s/index/$key/~s/~s?mapfold=true&mapfoldmod=~s" ++ "&mapfoldoptions=~s", - URL = io_lib:format(URLSrc, - [Host, Port, Bucket, KeyStart, KeyEnd, + URL = io_lib:format(URLSrc, + [Host, Port, Bucket, KeyStart, KeyEnd, MapFoldMod, MapFoldOpts]), - + case json_get(URL, State#state.fold_timeout, false) of {ok, {struct, [{<<"deltas">>, SegL}]}} -> - lager:info("Segment fold returned in ~w seconds finding ~w keys", + logger:info("Segment fold returned in ~w seconds finding ~w keys", [timer:now_diff(os:timestamp(), SW)/1000000, length(SegL)]), {ok, State}; {error, Reason} -> @@ -821,7 +822,7 @@ generate_binary_indexes() -> postcode_index() -> NotVeryNameLikeThing = base64:encode_to_string(crypto:strong_rand_bytes(4)), - lists:map(fun(_X) -> + lists:map(fun(_X) -> L = length(?POSTCODE_AREAS), {_, Area} = lists:keyfind(rand:uniform(L), 1, ?POSTCODE_AREAS), District = Area ++ integer_to_list(rand:uniform(26)), @@ -833,10 +834,10 @@ dateofbirth_index() -> Delta = rand:uniform(2500000000), {{Y, M, D}, _} = calendar:gregorian_seconds_to_datetime(Delta + 61000000000), - F = + F = lists:flatten( - io_lib:format(?DATE_FORMAT, - [Y, M, D])) ++ "|" ++ + io_lib:format(?DATE_FORMAT, + [Y, M, D])) ++ "|" ++ base64:encode_to_string(crypto:strong_rand_bytes(4)), [list_to_binary(F)]. @@ -846,7 +847,7 @@ lastmodified_index() -> F = lists:flatten(io_lib:format(?DATETIME_FORMAT, [Year, Month, Day, Hr, Min, Sec])), [list_to_binary(F)]. - + generate_uniquekey(C, RandBytes, skew_order) -> H0 = convert_tolist(erlang:phash2(C)), @@ -863,7 +864,7 @@ non_compressible_value(Size) -> eightytwenty_keycount(UKC) -> - % 80% of the time choose a key in the bottom 20% of the + % 80% of the time choose a key in the bottom 20% of the % result range, and 20% of the time in the upper 80% of the range TwentyPoint = rand:uniform(max(1, UKC div 5)), case rand:uniform(max(1, UKC)) < TwentyPoint of diff --git a/src/basho_bench_driver_riakc_pb.erl b/src/basho_bench_driver_riakc_pb.erl index 2ceef6b13..177e4cd72 100644 --- a/src/basho_bench_driver_riakc_pb.erl +++ b/src/basho_bench_driver_riakc_pb.erl @@ -29,6 +29,7 @@ -export([run_listkeys/1]). -include("basho_bench.hrl"). +-include_lib("kernel/include/logger.hrl"). -record(state, { pid, bucket, @@ -199,7 +200,7 @@ run({team, write}, KeyGen, _ValueGen, State) -> {ok, _} -> {ok, State}; {error, Reason} -> - lager:info("Team write failed, error: ~p", [Reason]), + logger:info("Team write failed, error: ~p", [Reason]), {error, Reason, State} end; @@ -215,10 +216,10 @@ run({team, read}, KeyGen, ValueGen, State) -> {ok, _} -> {ok, State}; {error, {notfound, _}} -> - lager:info("Team does not exist yet."), + logger:info("Team does not exist yet."), run({team, write}, KeyGen, ValueGen, State); {error, Reason} -> - lager:info("Team read failed, error: ~p", [Reason]), + logger:info("Team read failed, error: ~p", [Reason]), {error, Reason, State} end; @@ -254,17 +255,17 @@ run({team, player, removal}, KeyGen, ValueGen, State) -> {ok, _} -> {ok, State}; {error, Reason} -> - lager:info("Team player removal failed, error: ~p", [Reason]), + logger:info("Team player removal failed, error: ~p", [Reason]), {error, Reason, State} end; false -> {ok, State} end; {error, {notfound, _}} -> - lager:info("Team does not exist yet."), + logger:info("Team does not exist yet."), run({team, write}, KeyGen, ValueGen, State); {error, Reason} -> - lager:info("Team read failed, error: ~p", [Reason]), + logger:info("Team read failed, error: ~p", [Reason]), {error, Reason, State} end; @@ -287,7 +288,7 @@ run({team, player, addition}, KeyGen, ValueGen, State) -> {ok, _} -> {ok, State}; {error, Reason} -> - lager:info("Team player addition failed, error: ~p", [Reason]), + logger:info("Team player addition failed, error: ~p", [Reason]), {error, Reason, State} end; @@ -311,7 +312,7 @@ run({game, completed}, KeyGen, ValueGen, State) -> {ok, _} -> {ok, State}; {error, Reason} -> - lager:info("Score change failed, error: ~p", [Reason]), + logger:info("Score change failed, error: ~p", [Reason]), {error, Reason, State} end; @@ -505,7 +506,7 @@ run(listkeys, _KeyGen, _ValueGen, State) -> end, case {State#state.nominated_id, IsAlive} of {true, true} -> - lager:info("Skipping listkeys for overlap"), + logger:info("Skipping listkeys for overlap"), {ok, State}; {true, false} -> Pid = spawn(?MODULE, run_listkeys, [State]), @@ -563,7 +564,7 @@ run(mr_keylist_js, KeyGen, _ValueGen, State) -> run({counter, value}, KeyGen, _ValueGen, State) -> Key = KeyGen(), - lager:info("Counter value called for key: ~p", [Key]), + logger:info("Counter value called for key: ~p", [Key]), Options = [{r,2}, {notfound_ok, true}, {timeout, 5000}], Result = riakc_pb_socket:fetch_type(State#state.pid, State#state.bucket, @@ -572,19 +573,19 @@ run({counter, value}, KeyGen, _ValueGen, State) -> case Result of {ok, C0} -> C = riakc_counter:value(C0), - lager:info("Counter value is: ~p", [C]), + logger:info("Counter value is: ~p", [C]), {ok, State}; {error, {notfound, _}} -> {ok, State}; {error, Reason} -> - lager:info("Team read failed, error: ~p", [Reason]), + logger:info("Team read failed, error: ~p", [Reason]), {error, Reason, State} end; run({counter, increment}, KeyGen, ValueGen, State) -> Amt = ValueGen(), Key = KeyGen(), - lager:info("Counter value called for key: ~p", [Key]), + logger:info("Counter value called for key: ~p", [Key]), Result = riakc_pb_socket:modify_type(State#state.pid, fun(C) -> riakc_counter:increment(Amt, C) @@ -596,7 +597,7 @@ run({counter, increment}, KeyGen, ValueGen, State) -> {ok, _} -> {ok, State}; {error, Reason} -> - lager:info("Counter increment failed, error: ~p", [Reason]), + logger:info("Counter increment failed, error: ~p", [Reason]), {error, Reason, State} end; @@ -799,7 +800,7 @@ record_2i_results(Results, State) -> run_listkeys(State) -> SW = os:timestamp(), - lager:info("Commencing listkeys request"), + logger:info("Commencing listkeys request"), Targets = State#state.singleton_targets, {TargetIp, TargetPort} = lists:nth(rand:uniform(length(Targets)+1), @@ -813,12 +814,12 @@ run_listkeys(State) -> State#state.bucket, State#state.timeout_listkeys) of {ok, Keys} -> - lager:info("listkeys request returned ~w keys" ++ + logger:info("listkeys request returned ~w keys" ++ " in ~w seconds", [length(Keys), timer:now_diff(os:timestamp(), SW)/1000000]), ok; {error, Reason} -> - lager:info("listkeys failed due to reason ~w", [Reason]), + logger:info("listkeys failed due to reason ~w", [Reason]), ok end. diff --git a/src/basho_bench_worker.erl b/src/basho_bench_worker.erl index 0ab3dbf20..77cbee1a0 100644 --- a/src/basho_bench_worker.erl +++ b/src/basho_bench_worker.erl @@ -53,10 +53,10 @@ %% ==================================================================== start_link(SupChild, Id) -> - case basho_bench_config:get(distribute_work, false) of - true -> + case basho_bench_config:get(distribute_work, false) of + true -> start_link_distributed(SupChild, Id); - false -> + false -> start_link_local(SupChild, Id) end.