per_node_diag = [{'ns_1@10.13.41.69', [{version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {inets,"5.5.2"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {manifest, ["bucket_engine 1.7.0-0-g721dff0 Linux-x86_64", "ep-engine 1.7.1.1-39-g4bd26a3 Linux-x86_64", "geocouch couchbase_1.1_geo-0-gd09068e Linux-x86_64", "icu4c 2635939 Linux-x86_64", "libconflate 1.7.0-0-gfee8f94 Linux-x86_64", "libmemcached 2cd40dc Linux-x86_64", "libvbucket 1.7.0-0-g4bd0aba Linux-x86_64", "manifest 1.7.0-30-gd83dca9 Linux-x86_64", "membase-cli 1.7.2-0-g1d9c77e Linux-x86_64", "membase-server 1.7.2r-20-g6604356 Linux-x86_64", "membasex 1.7.0-3-g30d5dba Linux-x86_64", "memcached membase-1.7.1-0-gf99c147 Linux-x86_64", "memcachetest 0.8.3-0-g88ae3b3 Linux-x86_64", "moxi 1.7.2-0-gd5076d9 Linux-x86_64", "ns_server 1.7.2-0-g4925ee0 Linux-x86_64", "otp OTP_R14B03-0-g4a5a758 Linux-x86_64", "portsigar 1.7.0-0-ga191e6c Linux-x86_64", "sigar sigar-1.6.4-406-ge1dcf32 Linux-x86_64", "spidermonkey a3c48c1 Linux-x86_64", "tlm 1.7.1-0-g535dadc Linux-x86_64", "vbucketmigrator 1.7.0-0-g0fdc96c Linux-x86_64"]}, {config, [{otp,[{cookie,rizwejgdhowfisdt}]}, {nodes_wanted,['ns_1@10.13.41.69']}, {{node,'ns_1@10.13.41.69',rest}, [{port,8091},{port_meta,global}]}, {rest,[{port,8091}]}, {directory,"/opt/membase/var/lib/membase/config"}, {{node,'ns_1@10.13.41.69',config_version},{1,7,2}}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}, {buckets,[{configs,[]}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server, [{user,[]}, {pass,'filtered-out'}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small]}]}, {memory_quota,16090}, {port_servers, [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}, {memcached,"/opt/membase/bin/memcached", ["-X", "/opt/membase/lib/memcached/stdin_term_handler.so", "-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so", "-B","binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}]}, {replication,[{enabled,true}]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@10.13.41.69',isasl}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {path,"/opt/membase/var/lib/membase/data/isasl.pw"}]}, {{node,'ns_1@10.13.41.69',membership},active}, {{node,'ns_1@10.13.41.69',memcached}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine, "/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine, "/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.13.41.69',moxi}, [{port,11211},{verbosity,[]}]}, {{node,'ns_1@10.13.41.69',ns_log}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {filename, "/opt/membase/var/lib/membase/data/ns_log"}]}]}, {basic_info, [{version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {inets,"5.5.2"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,416}, {memory_data,{17946181632,438226944,{<0.6.0>,1113888}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}]}, {processes, [{<0.0.0>, [{registered_name,init}, {status,waiting}, {initial_call,{otp_ring0,start,2}}, {backtrace, [<<"Program counter: 0x00007ff64895e1d0 (init:loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afa9468 Return addr 0x000000000089c118 ()">>, <<"(0) {state,[{'-root',[<<23 bytes>>]},{'-progname',[<<3 bytes>>]},{'-home',[<<12 bytes>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,56}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.6.0>,<0.7.0>,<0.3.0>]}, {memory,29480}, {message_queue_len,0}, {reductions,20994}, {trap_exit,true}]}, {<0.3.0>, [{registered_name,erl_prim_loader}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff6489a97c0 (erl_prim_loader:loop/3 + 176)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f582b8 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>, <<"(1) [\"/opt/membase/lib/erlang/lib/kernel-2.14.3/ebin\",\"/opt/membase/lib/erlang/lib/std">>, <<"y(2) <0.2.0>">>, <<"(3) {state,efile,[],none,#Port<0.1>,infinity,undefined,true,{prim_state,false,undefine">>, <<"y(4) infinity">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,111}]}, {heap_size,6765}, {total_heap_size,24476}, {links,[#Port<0.1>,<0.0.0>]}, {memory,196680}, {message_queue_len,0}, {reductions,1251057}, {trap_exit,true}]}, {<0.6.0>, [{registered_name,error_logger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64633d2f0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_log_mf_h,false,{state,\"/opt/membase/var/lib/membase/logs\",10485760,20">>, <<"y(3) error_logger">>,<<"y(4) <0.2.0>">>, <<>>, <<"0x00007ff64633d320 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,17711}, {total_heap_size,139104}, {links,[<0.24.0>,<0.55.0>,#Port<0.869>,<0.0.0>]}, {memory,1113888}, {message_queue_len,0}, {reductions,290843}, {trap_exit,true}]}, {<0.7.0>, [{registered_name,application_controller}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645c69238 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) application_controller">>, <<"(3) {state,[],[],[],[{mnesia,<0.1599.0>},{inets,<0.1526.0>},{ns_server,<0.50.0>},{os_m">>, <<"y(4) application_controller">>, <<"y(5) <0.2.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,13}]}, {heap_size,28657}, {total_heap_size,75025}, {links, [<0.40.0>,<0.1526.0>,<0.1599.0>,<0.50.0>,<0.9.0>, <0.31.0>,<0.0.0>]}, {memory,601376}, {message_queue_len,0}, {reductions,46177}, {trap_exit,true}]}, {<0.9.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646ff37e8 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afe6930 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.10.0>,{appl_data,kernel,[application_controller,erl_reply,auth,boot_serv">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00007ff64afe6950 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.7.0>,<0.10.0>]}, {memory,7008}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<0.10.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x00007ff646ff5ab8 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64aff0878 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) kernel">>, <<"y(2) <0.11.0>">>,<<"y(3) <0.9.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.9.0>,<0.11.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,69}, {trap_exit,true}]}, {<0.11.0>, [{registered_name,kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f7f968 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,kernel_sup},one_for_all,[{child,<0.1336.0>,net_sup_dynamic,{erl_dist">>, <<"y(4) kernel_sup">>,<<"y(5) <0.10.0>">>, <<>>, <<"0x00007ff646f7f9a0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,2584}, {total_heap_size,5168}, {links, [<0.21.0>,<0.25.0>,<0.26.0>,<0.1336.0>,<0.23.0>, <0.16.0>,<0.19.0>,<0.20.0>,<0.18.0>,<0.12.0>, <0.13.0>,<0.10.0>]}, {memory,42720}, {message_queue_len,0}, {reductions,38555}, {trap_exit,true}]}, {<0.12.0>, [{registered_name,rex}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646ca9ca0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) rpc">>, <<"y(3) {1,{<0.1897.0>,{<0.1896.0>,{#Ref<0.0.0.31728>,'ns_1@10.13.41.69'}},nil,nil}}">>, <<"y(4) rex">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff646ca9cd8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.11.0>]}, {memory,16872}, {message_queue_len,0}, {reductions,1285}, {trap_exit,true}]}, {<0.13.0>, [{registered_name,global_name_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646c94620 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) global">>, <<"y(3) {state,true,[],[],[],[],'ns_1@10.13.41.69',<0.14.0>,<0.15.0>,no_trace,false}">>, <<"y(4) global_name_server">>, <<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff646c94658 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,15}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.14.0>,<0.15.0>,<0.11.0>]}, {memory,9128}, {message_queue_len,0}, {reductions,4112}, {trap_exit,true}]}, {<0.14.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646e3a020 (global:loop_the_locker/1 + 768)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f8a2b8 Return addr 0x00007ff646e39ce0 (global:init_the_locker/1 + 328)">>, <<"y(0) {multi,[],[],[],'ns_1@10.13.41.69',false,false}">>, <<"y(1) infinity">>,<<>>, <<"0x00007ff646f8a2d0 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.13.0>]}, {memory,8728}, {message_queue_len,0}, {reductions,395}, {trap_exit,true}]}, {<0.15.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646e41118 (global:loop_the_registrar/0 + 24)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646852768 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.13.0>]}, {memory,16624}, {message_queue_len,0}, {reductions,718}, {trap_exit,false}]}, {<0.16.0>, [{registered_name,inet_db}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64b006eb0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) inet_db">>, <<"(3) {state,inet_db,inet_cache,inet_hosts_byname,inet_hosts_byaddr,inet_hosts_file_byna">>, <<"y(4) inet_db">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff64b006ee8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.11.0>]}, {memory,6968}, {message_queue_len,0}, {reductions,284}, {trap_exit,true}]}, {<0.18.0>, [{registered_name,global_group}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afcce98 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) global_group">>, <<"y(3) {state,no_conf,true,[],[],[],[],[],'nonode@nohost',[],normal,normal}">>, <<"y(4) global_group">>,<<"y(5) <0.11.0>">>, <<>>, <<"0x00007ff64afcced0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,99}, {trap_exit,true}]}, {<0.19.0>, [{registered_name,file_server_2}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645f9c7e8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) file_server">>, <<"y(3) #Port<0.56>">>, <<"y(4) file_server_2">>,<<"y(5) <0.11.0>">>, <<>>, <<"0x00007ff645f9c820 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[#Port<0.56>,<0.11.0>]}, {memory,21648}, {message_queue_len,0}, {reductions,1063004}, {trap_exit,true}]}, {<0.20.0>, [{registered_name,code_server}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646e89ab0 (code_server:loop/1 + 128)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64648ee30 Return addr 0x000000000089c118 ()">>, <<"(0) {state,<0.11.0>,\"/opt/membase/lib/erlang\",[\".\",\"/opt/membase/lib/erlang/lib/kernel">>, <<"y(1) <0.11.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,137}]}, {heap_size,4181}, {total_heap_size,32838}, {links,[<0.11.0>]}, {memory,263536}, {message_queue_len,0}, {reductions,278544}, {trap_exit,true}]}, {<0.21.0>, [{registered_name,standard_error_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afec330 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_bridge">>, <<"y(3) {state,standard_error,<0.22.0>,<0.22.0>,{local,standard_error_sup}}">>, <<"y(4) standard_error_sup">>, <<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff64afec368 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>,<0.22.0>]}, {memory,2840}, {message_queue_len,0}, {reductions,41}, {trap_exit,true}]}, {<0.22.0>, [{registered_name,standard_error}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646f05238 (standard_error:server_loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64b00af90 Return addr 0x000000000089c118 ()">>, <<"y(0) #Port<0.422>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.21.0>,#Port<0.422>]}, {memory,2840}, {message_queue_len,0}, {reductions,9}, {trap_exit,true}]}, {<0.23.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64b008d98 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_bridge">>, <<"y(3) {state,user_sup,<0.24.0>,<0.24.0>,{<0.23.0>,user_sup}}">>, <<"y(4) <0.23.0>">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff64b008dd0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<0.11.0>,<0.24.0>]}, {memory,8872}, {message_queue_len,0}, {reductions,118}, {trap_exit,true}]}, {<0.24.0>, [{registered_name,user}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff6466747e8 (user:server_loop/2 + 56)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646ca2590 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) {[],[]}">>,<<"y(3) #Port<0.431>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,14}]}, {heap_size,6765}, {total_heap_size,53133}, {links,[<0.6.0>,<0.23.0>,#Port<0.431>]}, {memory,426080}, {message_queue_len,0}, {reductions,88540}, {trap_exit,true}]}, {<0.25.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af908c0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) kernel_config">>,<<"y(3) []">>, <<"y(4) <0.25.0>">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff64af908f8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.11.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,268}, {trap_exit,true}]}, {<0.26.0>, [{registered_name,kernel_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6467c7a08 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,kernel_safe_sup},one_for_one,[{child,<0.219.0>,inet_gethost_native_s">>, <<"y(4) kernel_safe_sup">>, <<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff6467c7a40 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.81.0>,<0.86.0>,<0.219.0>,<0.85.0>,<0.53.0>, <0.80.0>,<0.11.0>]}, {memory,7208}, {message_queue_len,0}, {reductions,457}, {trap_exit,true}]}, {<0.31.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646ff37e8 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afeffb0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.32.0>,{appl_data,sasl,[sasl_sup,alarm_handler,overload,release_handler],">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00007ff64afeffd0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.32.0>]}, {memory,2840}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.32.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x00007ff646ff5ab8 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64b009510 Return addr 0x000000000089c118 ()">>, <<"y(0) {state,tty,undefined}">>, <<"y(1) sasl">>,<<"y(2) <0.33.0>">>, <<"y(3) <0.31.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.31.0>,<0.33.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,69}, {trap_exit,true}]}, {<0.33.0>, [{registered_name,sasl_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afcc740 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,sasl_sup},one_for_one,[{child,<0.37.0>,release_handler,{release_hand">>, <<"y(4) sasl_sup">>,<<"y(5) <0.32.0>">>,<<>>, <<"0x00007ff64afcc778 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.34.0>,<0.37.0>,<0.32.0>]}, {memory,5896}, {message_queue_len,0}, {reductions,158}, {trap_exit,true}]}, {<0.34.0>, [{registered_name,sasl_safe_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af8ca70 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,sasl_safe_sup},one_for_one,[{child,<0.36.0>,overload,{overload,start">>, <<"y(4) sasl_safe_sup">>,<<"y(5) <0.33.0>">>, <<>>, <<"0x00007ff64af8caa8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.35.0>,<0.36.0>,<0.33.0>]}, {memory,5896}, {message_queue_len,0}, {reductions,170}, {trap_exit,true}]}, {<0.35.0>, [{registered_name,alarm_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afda9a0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"y(2) [{handler,alarm_handler,false,[],false}]">>, <<"y(3) alarm_handler">>,<<"y(4) <0.34.0>">>, <<>>, <<"0x00007ff64afda9d0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.34.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,28}, {trap_exit,true}]}, {<0.36.0>, [{registered_name,overload}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af8b740 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) overload">>, <<"y(3) {state,0,0,8.000000e-01,143,1.000000e-01,{0,0},clear}">>, <<"y(4) overload">>,<<"y(5) <0.34.0>">>,<<>>, <<"0x00007ff64af8b778 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.34.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,39}, {trap_exit,false}]}, {<0.37.0>, [{registered_name,release_handler}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afe4a30 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) release_handler">>, <<"(3) {state,[],\"/opt/membase/lib/erlang\",\"/opt/membase/lib/erlang/releases\",[{release,\"">>, <<"y(4) release_handler">>, <<"y(5) <0.33.0>">>,<<>>, <<"0x00007ff64afe4a68 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,1597}, {total_heap_size,2207}, {links,[<0.33.0>]}, {memory,18592}, {message_queue_len,0}, {reductions,2093}, {trap_exit,false}]}, {<0.40.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646ff37e8 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afeec80 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.41.0>,{appl_data,os_mon,[os_mon_sup,os_mon_sysinfo,disksup,memsup,cpu_su">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00007ff64afeeca0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.7.0>,<0.41.0>]}, {memory,2840}, {message_queue_len,0}, {reductions,23}, {trap_exit,true}]}, {<0.41.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x00007ff646ff5ab8 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afef3e0 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) os_mon">>, <<"y(2) <0.42.0>">>,<<"y(3) <0.40.0>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.40.0>,<0.42.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,40}, {trap_exit,true}]}, {<0.42.0>, [{registered_name,os_mon_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afd6900 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,os_mon_sup},one_for_one,[{child,<0.46.0>,cpu_sup,{cpu_sup,start_link">>, <<"y(4) os_mon_sup">>,<<"y(5) <0.41.0>">>, <<>>, <<"0x00007ff64afd6938 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.43.0>,<0.44.0>,<0.46.0>,<0.41.0>]}, {memory,7088}, {message_queue_len,0}, {reductions,280}, {trap_exit,true}]}, {<0.43.0>, [{registered_name,disksup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645cf2780 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) disksup">>, <<"(3) {state,80,60000,{unix,linux},[{\"/\",8256952,15},{\"/dev\",8754940,1},{\"/run\",3505116,">>, <<"y(4) disksup">>,<<"y(5) <0.42.0>">>,<<>>, <<"0x00007ff645cf27b8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,9}]}, {heap_size,4181}, {total_heap_size,8362}, {links,[<0.42.0>,#Port<0.743>]}, {memory,67872}, {message_queue_len,0}, {reductions,13583}, {trap_exit,true}]}, {<0.44.0>, [{registered_name,memsup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afa2908 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) memsup">>, <<"(3) {state,{unix,linux},true,{438226944,17946181632},{<0.6.0>,1113888},false,60000,300">>, <<"y(4) memsup">>,<<"y(5) <0.42.0>">>,<<>>, <<"0x00007ff64afa2940 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,1597}, {total_heap_size,2207}, {links,[<0.42.0>,<0.45.0>]}, {memory,18632}, {message_queue_len,0}, {reductions,14134}, {trap_exit,true}]}, {<0.45.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646709a88 (memsup:port_idle/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646841678 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) #Port<0.774>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,10}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.44.0>,#Port<0.774>]}, {memory,13648}, {message_queue_len,0}, {reductions,3110}, {trap_exit,true}]}, {<0.46.0>, [{registered_name,cpu_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afdc6f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) cpu_sup">>, <<"y(3) {state,<0.47.0>,{unix,linux}}">>, <<"y(4) cpu_sup">>,<<"y(5) <0.42.0>">>,<<>>, <<"0x00007ff64afdc730 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.42.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,33}, {trap_exit,true}]}, {<0.47.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646720d50 (cpu_sup:measurement_server_loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afd45b0 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) []">>, <<"y(4) []">>,<<"y(5) []">>, <<"y(6) []">>,<<"y(7) []">>, <<"y(8) {internal,<0.48.0>,[],{unix,linux}}">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.48.0>]}, {memory,2696}, {message_queue_len,0}, {reductions,11}, {trap_exit,true}]}, {<0.48.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646721df8 (cpu_sup:port_server_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afdbce0 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) 6000">>, <<"y(2) #Port<0.805>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,610}, {total_heap_size,987}, {links,[<0.47.0>,#Port<0.805>]}, {memory,8768}, {message_queue_len,0}, {reductions,250}, {trap_exit,false}]}, {<0.50.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646ff37e8 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afbf610 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.51.0>,{appl_data,ns_server,[ns_server_sup,ns_config,ns_config_sup,ns_con">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00007ff64afbf630 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,610}, {total_heap_size,610}, {links,[<0.7.0>,<0.51.0>]}, {memory,5856}, {message_queue_len,0}, {reductions,58}, {trap_exit,true}]}, {<0.51.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x00007ff646ff5ab8 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f4af48 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) ns_server">>, <<"y(2) <0.54.0>">>,<<"y(3) <0.50.0>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.50.0>,<0.54.0>]}, {memory,13648}, {message_queue_len,0}, {reductions,1741}, {trap_exit,true}]}, {<0.53.0>, [{registered_name,timer_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f6e498 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) 798">>, <<"y(2) timer">>,<<"y(3) []">>, <<"y(4) timer_server">>,<<"y(5) <0.26.0>">>, <<>>, <<"0x00007ff646f6e4d0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3630}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.1655.0>,<0.1662.0>,<0.1670.0>,<0.1682.0>, <0.1669.0>,<0.1661.0>,<0.1635.0>,<0.1650.0>, <0.1653.0>,<0.1639.0>,<0.1607.0>,<0.1611.0>, <0.26.0>]}, {memory,7448}, {message_queue_len,0}, {reductions,2231734}, {trap_exit,true}]}, {<0.54.0>, [{registered_name,ns_server_cluster_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64684f538 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_server_cluster_sup},one_for_one,[{child,<0.1634.0>,ns_server_sup,">>, <<"y(4) ns_server_cluster_sup">>, <<"y(5) <0.51.0>">>,<<>>, <<"0x00007ff64684f570 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,6765}, {total_heap_size,10946}, {links, [<0.57.0>,<0.65.0>,<0.157.0>,<0.1634.0>,<0.64.0>, <0.55.0>,<0.56.0>,<0.51.0>]}, {memory,88784}, {message_queue_len,0}, {reductions,4020}, {trap_exit,true}]}, {<0.55.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646740718 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af94250 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.54.0>,<0.6.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,1103}, {trap_exit,false}]}, {<0.56.0>, [{registered_name,timeout_diag_logger}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afd4d10 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) timeout_diag_logger">>, <<"y(3) {state,1322678866396}">>, <<"y(4) timeout_diag_logger">>, <<"y(5) <0.54.0>">>,<<>>, <<"0x00007ff64afd4d48 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.54.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,28}, {trap_exit,false}]}, {<0.57.0>, [{registered_name,dist_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af960f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) dist_manager">>, <<"y(3) {state,true,\"10.13.41.69\"}">>, <<"y(4) dist_manager">>,<<"y(5) <0.54.0>">>, <<>>, <<"0x00007ff64af96130 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.54.0>]}, {memory,11848}, {message_queue_len,0}, {reductions,3541}, {trap_exit,false}]}, {<0.64.0>, [{registered_name,ns_cluster}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6467aed80 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_cluster">>,<<"y(3) {state}">>, <<"y(4) ns_cluster">>,<<"y(5) <0.54.0>">>, <<>>, <<"0x00007ff6467aedb8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,28657}, {total_heap_size,57314}, {links,[<0.54.0>]}, {memory,459448}, {message_queue_len,0}, {reductions,26686}, {trap_exit,false}]}, {<0.65.0>, [{registered_name,mb_mnesia_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af92348 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mb_mnesia_sup},one_for_one,[{child,<0.67.0>,mb_mnesia,{mb_mnesia,sta">>, <<"y(4) mb_mnesia_sup">>,<<"y(5) <0.54.0>">>, <<>>, <<"0x00007ff64af92380 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.66.0>,<0.67.0>,<0.54.0>]}, {memory,5896}, {message_queue_len,0}, {reductions,158}, {trap_exit,true}]}, {<0.66.0>, [{registered_name,mb_mnesia_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afeaa08 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) mb_mnesia_events">>, <<"y(4) <0.65.0>">>,<<>>, <<"0x00007ff64afeaa38 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.65.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,28}, {trap_exit,true}]}, {<0.67.0>, [{registered_name,mb_mnesia}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646ca6340 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mb_mnesia">>, <<"y(3) {state,['ns_1@10.13.41.69']}">>, <<"y(4) mb_mnesia">>,<<"y(5) <0.65.0>">>, <<>>, <<"0x00007ff646ca6378 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,11}]}, {heap_size,987}, {total_heap_size,3571}, {links,[<0.1605.0>,<0.65.0>]}, {memory,29544}, {message_queue_len,0}, {reductions,10408}, {trap_exit,true}]}, {<0.80.0>, [{registered_name,dets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f86920 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,dets_sup},simple_one_for_one,[{child,undefined,dets,{dets,istart_lin">>, <<"y(4) dets_sup">>,<<"y(5) <0.26.0>">>,<<>>, <<"0x00007ff646f86958 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.26.0>]}, {memory,11848}, {message_queue_len,0}, {reductions,711}, {trap_exit,true}]}, {<0.81.0>, [{registered_name,dets}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645cea4c8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) dets_server">>, <<"y(3) {state,20510,[<0.26.0>],[]}">>, <<"y(4) dets">>,<<"y(5) <0.26.0>">>,<<>>, <<"0x00007ff645cea500 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,10}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.26.0>]}, {memory,24624}, {message_queue_len,0}, {reductions,1906}, {trap_exit,true}]}, {<0.85.0>, [{registered_name,disk_log_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afb22c0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,disk_log_sup},simple_one_for_one,[{child,undefined,disk_log,{disk_lo">>, <<"y(4) disk_log_sup">>,<<"y(5) <0.26.0>">>, <<>>, <<"0x00007ff64afb22f8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,11}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.1616.0>,<0.26.0>]}, {memory,24664}, {message_queue_len,0}, {reductions,4102}, {trap_exit,true}]}, {<0.86.0>, [{registered_name,disk_log_server}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645be5008 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) disk_log_server">>, <<"y(3) {state,[]}">>, <<"y(4) disk_log_server">>, <<"y(5) <0.26.0>">>,<<>>, <<"0x00007ff645be5040 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,30}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.1616.0>,<0.26.0>]}, {memory,21728}, {message_queue_len,0}, {reductions,7637}, {trap_exit,true}]}, {<0.157.0>, [{registered_name,ns_config_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646410868 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_config_sup},rest_for_one,[{child,<0.164.0>,ns_config_log,{ns_conf">>, <<"y(4) ns_config_sup">>,<<"y(5) <0.54.0>">>, <<>>, <<"0x00007ff6464108a0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,6765}, {total_heap_size,7142}, {links, [<0.161.0>,<0.162.0>,<0.164.0>,<0.158.0>,<0.159.0>, <0.54.0>]}, {memory,58272}, {message_queue_len,0}, {reductions,1087}, {trap_exit,true}]}, {<0.158.0>, [{registered_name,ns_config_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6467c43b8 Return addr 0x00007ff646f95e80 (proc_lib:wake_up/3 + 120)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.28976>,{state,#Fun,undefine">>, <<"y(3) ns_config_events">>, <<"y(4) <0.157.0>">>,<<>>, <<"0x00007ff6467c43e8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95ea0 (proc_lib:wake_up/3 + 152)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,4181}, {total_heap_size,10946}, {links, [<0.1643.0>,<0.1668.0>,<0.1679.0>,<0.1687.0>, <0.1672.0>,<0.1650.0>,<0.162.0>,<0.164.0>,<0.157.0>]}, {memory,88896}, {message_queue_len,0}, {reductions,275563}, {trap_exit,true}]}, {<0.159.0>, [{registered_name,ns_config}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64597a870 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config">>, <<"(3) {config,{full,\"/opt/membase/etc/membase/config\",undefined,ns_config_default},[[],[">>, <<"y(4) ns_config">>,<<"y(5) <0.157.0>">>, <<>>, <<"0x00007ff64597a8a8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,75025}, {total_heap_size,121393}, {links,[<0.157.0>]}, {memory,972080}, {message_queue_len,0}, {reductions,61872}, {trap_exit,true}]}, {<0.161.0>, [{registered_name,ns_config_remote}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645ca5490 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config_replica">>, <<"y(3) {state}">>, <<"y(4) ns_config_remote">>, <<"y(5) <0.157.0>">>,<<>>, <<"0x00007ff645ca54c8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,17711}, {total_heap_size,17711}, {links,[<0.157.0>]}, {memory,142624}, {message_queue_len,0}, {reductions,1421}, {trap_exit,false}]}, {<0.162.0>, [{registered_name,ns_config_isasl_sync}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645c0e908 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config_isasl_sync">>, <<"y(3) {state,[],\"/opt/membase/var/lib/membase/data/isasl.pw\",2,\"_admin\",\"_admin\",false}">>, <<"y(4) ns_config_isasl_sync">>, <<"y(5) <0.157.0>">>,<<>>, <<"0x00007ff645c0e940 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,17711}, {total_heap_size,28657}, {links,[<0.157.0>,<0.158.0>]}, {memory,230232}, {message_queue_len,0}, {reductions,3162}, {trap_exit,false}]}, {<0.164.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646740718 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64b00b6f0 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.157.0>,<0.158.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,11}, {trap_exit,false}]}, {<0.219.0>, [{registered_name,inet_gethost_native_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afee510 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_bridge">>, <<"y(3) {state,inet_gethost_native,<0.220.0>,<0.220.0>,{local,inet_gethost_native_sup}}">>, <<"y(4) inet_gethost_native_sup">>, <<"y(5) <0.26.0>">>,<<>>, <<"0x00007ff64afee548 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.26.0>,<0.220.0>]}, {memory,2840}, {message_queue_len,0}, {reductions,41}, {trap_exit,true}]}, {<0.220.0>, [{registered_name,inet_gethost_native}, {status,waiting}, {initial_call,{inet_gethost_native,server_init,2}}, {backtrace, [<<"Program counter: 0x00007ff646e4d500 (inet_gethost_native:main_loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645d02d38 Return addr 0x000000000089c118 ()">>, <<"y(0) {state,#Port<0.2294>,8000,86058,90155,<0.219.0>,4,{statistics,0,0,0,0,0,0,0,0}}">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,63}]}, {heap_size,1597}, {total_heap_size,1974}, {links,[<0.219.0>,#Port<0.2294>]}, {memory,16768}, {message_queue_len,0}, {reductions,4428}, {trap_exit,true}]}, {<0.1336.0>, [{registered_name,net_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af9f710 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,net_sup},one_for_all,[{child,<0.1339.0>,net_kernel,{net_kernel,start">>, <<"y(4) net_sup">>,<<"y(5) <0.11.0>">>,<<>>, <<"0x00007ff64af9f748 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.1337.0>,<0.1338.0>,<0.1339.0>,<0.11.0>]}, {memory,8952}, {message_queue_len,0}, {reductions,241}, {trap_exit,true}]}, {<0.1337.0>, [{registered_name,erl_epmd}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645d1a450 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) erl_epmd">>, <<"y(3) {state,#Port<0.2810>,21100,ns_1}">>, <<"y(4) erl_epmd">>,<<"y(5) <0.1336.0>">>, <<>>, <<"0x00007ff645d1a488 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1336.0>,#Port<0.2810>]}, {memory,2840}, {message_queue_len,0}, {reductions,127}, {trap_exit,false}]}, {<0.1338.0>, [{registered_name,auth}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f8a9e8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) auth">>, <<"y(3) {state,rizwejgdhowfisdt,5468177}">>, <<"y(4) auth">>,<<"y(5) <0.1336.0>">>,<<>>, <<"0x00007ff646f8aa20 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.1336.0>]}, {memory,5816}, {message_queue_len,0}, {reductions,221}, {trap_exit,true}]}, {<0.1339.0>, [{registered_name,net_kernel}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646c8d618 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) net_kernel">>, <<"(3) {state,'ns_1@10.13.41.69','ns_1@10.13.41.69',longnames,{tick,<0.1341.0>,15000},700">>, <<"y(4) net_kernel">>,<<"y(5) <0.1336.0>">>, <<>>, <<"0x00007ff646c8d650 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,610}, {total_heap_size,987}, {links, [<0.1336.0>,<0.1341.0>,<0.1340.0>,#Port<0.2808>]}, {memory,8992}, {message_queue_len,0}, {reductions,609}, {trap_exit,true}]}, {<0.1340.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{inet_tcp_dist,accept_loop,2}}, {backtrace, [<<"Program counter: 0x00007ff64898fe28 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afa4360 Return addr 0x00007ff6468d7ec8 (inet_tcp:accept/1 + 40)">>, <<"y(0) 30257">>,<<"y(1) #Port<0.2808>">>, <<>>, <<"0x00007ff64afa4378 Return addr 0x00007ff6468cd438 (inet_tcp_dist:accept_loop/2 + 96)">>, <<"y(0) []">>,<<>>, <<"0x00007ff64afa4388 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) #Port<0.2808>">>, <<"y(2) <0.1339.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1339.0>]}, {memory,2768}, {message_queue_len,0}, {reductions,8}, {trap_exit,false}]}, {<0.1341.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{net_kernel,ticker,2}}, {backtrace, [<<"Program counter: 0x00007ff646ea76a0 (net_kernel:ticker_loop/2 + 56)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645d1abd8 Return addr 0x000000000089c118 ()">>, <<"y(0) 15000">>,<<"y(1) <0.1339.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1339.0>]}, {memory,2696}, {message_queue_len,0}, {reductions,13}, {trap_exit,false}]}, {<0.1526.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646ff37e8 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463eaef0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.1527.0>,{appl_data,inets,[inets_sup,httpc_manager],undefined,{inets_app,">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00007ff6463eaf10 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,377}, {total_heap_size,377}, {links,[<0.7.0>,<0.1527.0>]}, {memory,3992}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<0.1527.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x00007ff646ff5ab8 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af93698 Return addr 0x000000000089c118 ()">>, <<"y(0) []">>,<<"y(1) inets_app">>, <<"y(2) <0.1547.0>">>,<<"y(3) <0.1526.0>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1526.0>,<0.1547.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,42}, {trap_exit,true}]}, {<0.1547.0>, [{registered_name,inets_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6464d5ad8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,inets_sup},one_for_one,[{child,<0.1555.0>,tftp_sup,{tftp_sup,start_l">>, <<"y(4) inets_sup">>,<<"y(5) <0.1527.0>">>, <<>>, <<"0x00007ff6464d5b10 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,4}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.1548.0>,<0.1554.0>,<0.1555.0>,<0.1549.0>, <0.1527.0>]}, {memory,7128}, {message_queue_len,0}, {reductions,326}, {trap_exit,true}]}, {<0.1548.0>, [{registered_name,ftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afea2a8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ftp_sup},simple_one_for_one,[{child,undefined,undefined,{ftp,start_l">>, <<"y(4) ftp_sup">>,<<"y(5) <0.1547.0>">>, <<>>, <<"0x00007ff64afea2e0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1547.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,52}, {trap_exit,true}]}, {<0.1549.0>, [{registered_name,httpc_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afe93f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,httpc_sup},one_for_one,[{child,<0.1552.0>,httpc_handler_sup,{httpc_h">>, <<"y(4) httpc_sup">>,<<"y(5) <0.1547.0>">>, <<>>, <<"0x00007ff64afe9430 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.1550.0>,<0.1552.0>,<0.1547.0>]}, {memory,5896}, {message_queue_len,0}, {reductions,171}, {trap_exit,true}]}, {<0.1550.0>, [{registered_name,httpc_profile_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afd3a10 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,httpc_profile_sup},one_for_one,[{child,<0.1551.0>,httpc_manager,{htt">>, <<"y(4) httpc_profile_sup">>, <<"y(5) <0.1549.0>">>,<<>>, <<"0x00007ff64afd3a48 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1549.0>,<0.1551.0>]}, {memory,2840}, {message_queue_len,0}, {reductions,122}, {trap_exit,true}]}, {<0.1551.0>, [{registered_name,httpc_manager}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645bc95f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) httpc_manager">>, <<"(3) {state,[],httpc_manager__handler_db,{cookie_db,undefined,5500994},httpc_manager__s">>, <<"y(4) httpc_manager">>, <<"y(5) <0.1550.0>">>,<<>>, <<"0x00007ff645bc9630 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,10946}, {total_heap_size,11323}, {links,[<0.1550.0>]}, {memory,91520}, {message_queue_len,0}, {reductions,897}, {trap_exit,true}]}, {<0.1552.0>, [{registered_name,httpc_handler_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463f36e8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,httpc_handler_sup},simple_one_for_one,[{child,undefined,undefined,{h">>, <<"y(4) httpc_handler_sup">>, <<"y(5) <0.1549.0>">>,<<>>, <<"0x00007ff6463f3720 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1549.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,116}, {trap_exit,true}]}, {<0.1554.0>, [{registered_name,httpd_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646776de8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,httpd_sup},one_for_one,[],undefined,10,3600,[],httpd_sup,[[]]}">>, <<"y(4) httpd_sup">>,<<"y(5) <0.1547.0>">>, <<>>, <<"0x00007ff646776e20 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1547.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,43}, {trap_exit,true}]}, {<0.1555.0>, [{registered_name,tftp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463f3e40 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,tftp_sup},one_for_one,[],undefined,10,3600,[],tftp_sup,[[]]}">>, <<"y(4) tftp_sup">>,<<"y(5) <0.1547.0>">>, <<>>, <<"0x00007ff6463f3e78 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1547.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,44}, {trap_exit,true}]}, {<0.1599.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646ff37e8 (application_master:main_loop/2 + 64)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646cabba0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>, <<"(1) {state,<0.1600.0>,{appl_data,mnesia,[mnesia_dumper_load_regulator,mnesia_event,mne">>, <<"y(2) <0.7.0>">>,<<>>, <<"0x00007ff646cabbc0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.7.0>,<0.1600.0>]}, {memory,13752}, {message_queue_len,0}, {reductions,81}, {trap_exit,true}]}, {<0.1600.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{application_master,start_it,4}}, {backtrace, [<<"Program counter: 0x00007ff646ff5ab8 (application_master:loop_it/4 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64b00be28 Return addr 0x000000000089c118 ()">>, <<"y(0) {normal,[]}">>,<<"y(1) mnesia_sup">>, <<"y(2) <0.1601.0>">>,<<"y(3) <0.1599.0>">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1599.0>,<0.1601.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,32}, {trap_exit,true}]}, {<0.1601.0>, [{registered_name,mnesia_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afe9b50 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_sup},one_for_all,[{child,<0.1603.0>,mnesia_kernel_sup,{mnesia">>, <<"y(4) mnesia_sup">>,<<"y(5) <0.1600.0>">>, <<>>, <<"0x00007ff64afe9b88 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.1602.0>,<0.1603.0>,<0.1600.0>]}, {memory,5896}, {message_queue_len,0}, {reductions,195}, {trap_exit,true}]}, {<0.1602.0>, [{registered_name,mnesia_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afc9068 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"y(2) [{handler,mnesia_event,false,{state,[],false,[]},false}]">>, <<"y(3) mnesia_event">>, <<"y(4) <0.1601.0>">>,<<>>, <<"0x00007ff64afc9098 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.1601.0>,<0.1605.0>]}, {memory,13752}, {message_queue_len,0}, {reductions,418}, {trap_exit,true}]}, {<0.1603.0>, [{registered_name,mnesia_kernel_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463efda8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_kernel_sup},one_for_all,[{child,<0.1612.0>,mnesia_late_loader">>, <<"y(4) mnesia_kernel_sup">>, <<"y(5) <0.1601.0>">>,<<>>, <<"0x00007ff6463efde0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,7}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.1606.0>,<0.1610.0>,<0.1611.0>,<0.1612.0>, <0.1608.0>,<0.1609.0>,<0.1607.0>,<0.1604.0>, <0.1605.0>,<0.1601.0>]}, {memory,7328}, {message_queue_len,0}, {reductions,551}, {trap_exit,true}]}, {<0.1604.0>, [{registered_name,mnesia_monitor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645bea0e8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_monitor">>, <<"y(3) {state,<0.1603.0>,[],[],true,[],undefined,[]}">>, <<"y(4) mnesia_monitor">>, <<"y(5) <0.1603.0>">>,<<>>, <<"0x00007ff645bea120 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,16}]}, {heap_size,1597}, {total_heap_size,2207}, {links,[<0.1603.0>,<0.1616.0>]}, {memory,18632}, {message_queue_len,0}, {reductions,3151}, {trap_exit,true}]}, {<0.1605.0>, [{registered_name,mnesia_subscr}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646854648 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_subscr">>, <<"y(3) {state,<0.1603.0>,5578791}">>, <<"y(4) mnesia_subscr">>, <<"y(5) <0.1603.0>">>,<<>>, <<"0x00007ff646854680 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1602.0>,<0.1603.0>,<0.67.0>]}, {memory,2880}, {message_queue_len,0}, {reductions,111}, {trap_exit,true}]}, {<0.1606.0>, [{registered_name,mnesia_locker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff6465540a0 (mnesia_locker:loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6467d9e20 Return addr 0x00007ff6469b9050 (mnesia_sp:init_proc/4 + 240)">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) []">>, <<"y(4) []">>,<<"y(5) {state,<0.1603.0>}">>, <<>>, <<"0x00007ff6467d9e58 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) Catch 0x00007ff6469b9050 (mnesia_sp:init_proc/4 + 240)">>, <<"y(1) mnesia_locker">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) [<0.1603.0>]">>,<<>>, <<"0x00007ff6467d9e88 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,16}]}, {heap_size,2584}, {total_heap_size,2961}, {links,[<0.1603.0>]}, {memory,24624}, {message_queue_len,0}, {reductions,10214}, {trap_exit,true}]}, {<0.1607.0>, [{registered_name,mnesia_recover}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645be6ef0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_recover">>, <<"y(3) {state,<0.1603.0>,undefined,undefined,undefined,0,false,true,[]}">>, <<"y(4) mnesia_recover">>, <<"y(5) <0.1603.0>">>,<<>>, <<"0x00007ff645be6f28 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.1603.0>,<0.53.0>]}, {memory,11888}, {message_queue_len,0}, {reductions,758}, {trap_exit,true}]}, {<0.1608.0>, [{registered_name,mnesia_tm}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff6465dbc98 (mnesia_tm:doit_loop/1 + 200)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646839ed8 Return addr 0x00007ff6469b9050 (mnesia_sp:init_proc/4 + 240)">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) {state,{0,nil},{0,nil},<0.1603.0>,[],[],[]}">>, <<"y(3) []">>,<<"y(4) []">>, <<"y(5) <0.1603.0>">>,<<"y(6) {0,nil}">>, <<"y(7) {0,nil}">>,<<>>, <<"0x00007ff646839f20 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) Catch 0x00007ff6469b9050 (mnesia_sp:init_proc/4 + 240)">>, <<"y(1) mnesia_tm">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) [<0.1603.0>]">>,<<>>, <<"0x00007ff646839f50 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,10}]}, {heap_size,2584}, {total_heap_size,3194}, {links,[<0.1603.0>]}, {memory,26488}, {message_queue_len,0}, {reductions,26329}, {trap_exit,true}]}, {<0.1609.0>, [{registered_name,mnesia_checkpoint_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f45e78 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_checkpoint_sup},simple_one_for_one,[{child,undefined,mnesia_c">>, <<"y(4) mnesia_checkpoint_sup">>, <<"y(5) <0.1603.0>">>,<<>>, <<"0x00007ff646f45eb0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1603.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,59}, {trap_exit,true}]}, {<0.1610.0>, [{registered_name,mnesia_snmp_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646075f68 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mnesia_snmp_sup},simple_one_for_one,[{child,undefined,mnesia_snmp_su">>, <<"y(4) mnesia_snmp_sup">>, <<"y(5) <0.1603.0>">>,<<>>, <<"0x00007ff646075fa0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1603.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,59}, {trap_exit,true}]}, {<0.1611.0>, [{registered_name,mnesia_controller}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afa34e0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mnesia_controller">>, <<"(3) {state,<0.1603.0>,true,[],[],{0,nil},[],[],{0,nil},undefined,[],[],{interval,#Ref<">>, <<"y(4) mnesia_controller">>, <<"y(5) <0.1603.0>">>,<<>>, <<"0x00007ff64afa3518 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.1603.0>,<0.53.0>]}, {memory,7008}, {message_queue_len,0}, {reductions,457}, {trap_exit,true}]}, {<0.1612.0>, [{registered_name,mnesia_late_loader}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646215598 (mnesia_late_loader:loop/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646854d88 Return addr 0x00007ff6469b9050 (mnesia_sp:init_proc/4 + 240)">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) {state,<0.1603.0>}">>,<<>>, <<"0x00007ff646854da8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) Catch 0x00007ff6469b9050 (mnesia_sp:init_proc/4 + 240)">>, <<"y(1) mnesia_late_loader">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) [<0.1603.0>]">>,<<>>, <<"0x00007ff646854dd8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.1603.0>]}, {memory,5816}, {message_queue_len,0}, {reductions,179}, {trap_exit,false}]}, {<0.1616.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64621edc0 (disk_log:loop/1 + 168)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afc4700 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"(0) {state,[],[],<0.85.0>,<0.86.0>,186,{arg,latest_log,undefined,\"/opt/membase/var/lib">>, <<>>, <<"0x00007ff64afc4710 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,19}]}, {heap_size,1597}, {total_heap_size,2584}, {links,[<0.86.0>,<0.1604.0>,<0.85.0>,#Port<0.3885>]}, {memory,21728}, {message_queue_len,0}, {reductions,9888}, {trap_exit,true}]}, {<0.1634.0>, [{registered_name,ns_server_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afab318 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_server_sup},one_for_one,[{child,<0.1687.0>,ns_moxi_sup,{ns_moxi_s">>, <<"y(4) ns_server_sup">>,<<"y(5) <0.54.0>">>, <<>>, <<"0x00007ff64afab350 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,987}, {total_heap_size,2584}, {links, [<0.1658.0>,<0.1680.0>,<0.1685.0>,<0.1686.0>, <0.1687.0>,<0.1682.0>,<0.1671.0>,<0.1678.0>, <0.1679.0>,<0.1677.0>,<0.1662.0>,<0.1664.0>, <0.1661.0>,<0.1637.0>,<0.1650.0>,<0.1657.0>, <0.1649.0>,<0.1635.0>,<0.1636.0>,<0.54.0>]}, {memory,22368}, {message_queue_len,0}, {reductions,3697}, {trap_exit,true}]}, {<0.1635.0>, [{registered_name,ns_log}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6467bc0f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_log">>, <<"(3) {state,[{log_entry,{1321,984081,339889},'ns_1@127.0.0.1',ns_node_disco,3,\"Initial ">>, <<"y(4) ns_log">>,<<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff6467bc130 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,6765}, {total_heap_size,17711}, {links,[<0.53.0>,<0.1634.0>]}, {memory,142664}, {message_queue_len,0}, {reductions,1190}, {trap_exit,true}]}, {<0.1636.0>, [{registered_name,ns_log_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x000000000089c110 (unknown function)">>, <<"CP: 0x000000000089c118 ()">>, <<"arity = 3">>,<<" proc_lib">>,<<" wake_up">>, <<" [gen_event,wake_hib,[<0.1634.0>,ns_log_events,[{handler,ns_mail_log,false,{state},<0.166">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,34}, {total_heap_size,34}, {links,[<0.1634.0>,<0.1660.0>]}, {memory,1248}, {message_queue_len,0}, {reductions,653}, {trap_exit,true}]}, {<0.1637.0>, [{registered_name,ns_node_disco_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646c8e1f0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_node_disco_sup},rest_for_one,[{child,<0.1644.0>,ns_config_rep,{ns">>, <<"y(4) ns_node_disco_sup">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff646c8e228 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.1642.0>,<0.1643.0>,<0.1644.0>,<0.1638.0>, <0.1639.0>,<0.1634.0>]}, {memory,7168}, {message_queue_len,0}, {reductions,777}, {trap_exit,true}]}, {<0.1638.0>, [{registered_name,ns_node_disco_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af8e500 Return addr 0x00007ff646f95e80 (proc_lib:wake_up/3 + 120)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,menelaus_event,ns_node_disco_events,{state,undefined,[{<0.1667.0>,#Ref<0">>, <<"y(3) ns_node_disco_events">>, <<"y(4) <0.1637.0>">>,<<>>, <<"0x00007ff64af8e530 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95ea0 (proc_lib:wake_up/3 + 152)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,322}, {links,[<0.1642.0>,<0.1644.0>,<0.1668.0>,<0.1637.0>]}, {memory,3704}, {message_queue_len,0}, {reductions,80}, {trap_exit,true}]}, {<0.1639.0>, [{registered_name,ns_node_disco}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463825f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_node_disco">>, <<"y(3) {state,['ns_1@10.13.41.69']}">>, <<"y(4) ns_node_disco">>, <<"y(5) <0.1637.0>">>,<<>>, <<"0x00007ff646382630 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,20}]}, {heap_size,17711}, {total_heap_size,28657}, {links,[<0.1637.0>,<0.53.0>]}, {memory,230232}, {message_queue_len,0}, {reductions,13625}, {trap_exit,false}]}, {<0.1642.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff6462d0dc0 (ns_node_disco_log:'-start_link/0-fun-0-'/0 + 80)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af8d208 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1637.0>,<0.1638.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,10}, {trap_exit,false}]}, {<0.1643.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646740718 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af90170 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1637.0>,<0.158.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,11}, {trap_exit,false}]}, {<0.1644.0>, [{registered_name,ns_config_rep}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645c7e848 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_config_rep">>, <<"y(3) {state,<0.1647.0>}">>, <<"y(4) ns_config_rep">>, <<"y(5) <0.1637.0>">>,<<>>, <<"0x00007ff645c7e880 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,10946}, {total_heap_size,21892}, {links,[<0.1638.0>,<0.1647.0>,<0.1637.0>]}, {memory,176152}, {message_queue_len,0}, {reductions,1217}, {trap_exit,false}]}, {<0.1647.0>, [{registered_name,ns_config_rep_merger}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff6462e00a8 (ns_config_rep:merger_loop/0 + 24)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afd0100 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1644.0>]}, {memory,2696}, {message_queue_len,0}, {reductions,3}, {trap_exit,false}]}, {<0.1649.0>, [{registered_name,ns_tick_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463eed98 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.28941>,{state,#Fun,ignored},<">>, <<"y(3) ns_tick_event">>, <<"y(4) <0.1634.0>">>,<<>>, <<"0x00007ff6463eedc8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,6}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.1634.0>,<0.1680.0>]}, {memory,7008}, {message_queue_len,0}, {reductions,830}, {trap_exit,true}]}, {<0.1650.0>, [{registered_name,mb_master}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff6462ec4d0 (gen_fsm:loop/7 + 272)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af9b1c0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mb_master">>, <<"(3) {state,<0.1652.0>,'ns_1@10.13.41.69',['ns_1@10.13.41.69'],{1322,679215,116921},['n">>, <<"y(4) master">>,<<"y(5) mb_master">>, <<"y(6) <0.1634.0>">>,<<>>, <<"0x00007ff64af9b200 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,66}]}, {heap_size,2584}, {total_heap_size,4181}, {links,[<0.158.0>,<0.1634.0>,<0.1652.0>,<0.53.0>]}, {memory,34504}, {message_queue_len,0}, {reductions,14213}, {trap_exit,true}]}, {<0.1652.0>, [{registered_name,mb_master_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646402968 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,mb_master_sup},one_for_one,[{child,<0.1656.0>,auto_failover,{auto_fa">>, <<"y(4) mb_master_sup">>, <<"y(5) <0.1650.0>">>,<<>>, <<"0x00007ff6464029a0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.1653.0>,<0.1655.0>,<0.1656.0>,<0.1650.0>]}, {memory,5936}, {message_queue_len,0}, {reductions,246}, {trap_exit,true}]}, {<0.1653.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff6462ec4d0 (gen_fsm:loop/7 + 272)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6465303f0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_orchestrator">>, <<"y(3) {idle_state,[]}">>,<<"y(4) idle">>, <<"y(5) ns_orchestrator">>, <<"y(6) <0.1652.0>">>,<<>>, <<"0x00007ff646530430 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,17711}, {total_heap_size,28657}, {links,[<0.1652.0>,<0.53.0>]}, {memory,230304}, {message_queue_len,0}, {reductions,1944}, {trap_exit,true}]}, {<0.1655.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646ca4458 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_tick">>, <<"y(3) {state,1322679287117}">>, <<"y(4) ns_tick">>,<<"y(5) <0.1652.0>">>, <<>>, <<"0x00007ff646ca4490 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,6}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.1652.0>,<0.53.0>]}, {memory,11960}, {message_queue_len,0}, {reductions,1760}, {trap_exit,false}]}, {<0.1656.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645f76c68 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) auto_failover">>, <<"y(3) {state,undefined,nil,30,0}">>, <<"y(4) auto_failover">>, <<"y(5) <0.1652.0>">>,<<>>, <<"0x00007ff645f76ca0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links,[<0.1652.0>]}, {memory,55128}, {message_queue_len,0}, {reductions,636}, {trap_exit,false}]}, {<0.1657.0>, [{registered_name,buckets_events}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6464030c8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,menelaus_event,buckets_events,{state,undefined,[{<0.1667.0>,#Ref<0.0.0.2">>, <<"y(3) buckets_events">>, <<"y(4) <0.1634.0>">>,<<>>, <<"0x00007ff6464030f8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1661.0>,<0.1668.0>,<0.1634.0>]}, {memory,2952}, {message_queue_len,0}, {reductions,51}, {trap_exit,true}]}, {<0.1658.0>, [{registered_name,ns_mail_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463ee1b8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_mail_sup},one_for_all,[{child,<0.1660.0>,ns_mail_log,{ns_mail_log">>, <<"y(4) ns_mail_sup">>,<<"y(5) <0.1634.0>">>, <<>>, <<"0x00007ff6463ee1f0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.1659.0>,<0.1660.0>,<0.1634.0>]}, {memory,5896}, {message_queue_len,0}, {reductions,650}, {trap_exit,true}]}, {<0.1659.0>, [{registered_name,ns_mail}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f79c00 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_mail">>,<<"y(3) empty_state">>, <<"y(4) ns_mail">>,<<"y(5) <0.1658.0>">>, <<>>, <<"0x00007ff646f79c38 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1658.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,27}, {trap_exit,true}]}, {<0.1660.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646740718 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463ecec8 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1658.0>,<0.1636.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,11}, {trap_exit,false}]}, {<0.1661.0>, [{registered_name,ns_heart}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645fc24e8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_heart">>, <<"(3) {state,undefined,[{meminfo,<<1170 bytes>>},{system_memory_data,[{system_total_memo">>, <<"y(4) ns_heart">>,<<"y(5) <0.1634.0>">>, <<>>, <<"0x00007ff645fc2520 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,39}]}, {heap_size,10946}, {total_heap_size,28657}, {links,[<0.1634.0>,<0.1657.0>,<0.53.0>]}, {memory,230272}, {message_queue_len,0}, {reductions,33978}, {trap_exit,false}]}, {<0.1662.0>, [{registered_name,ns_doctor}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645facec8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_doctor">>, <<"(3) {state,{dict,1,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[]},{{[">>, <<"y(4) ns_doctor">>,<<"y(5) <0.1634.0>">>, <<>>, <<"0x00007ff645facf00 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,5}]}, {heap_size,4181}, {total_heap_size,5778}, {links,[<0.1634.0>,<0.53.0>]}, {memory,47200}, {message_queue_len,0}, {reductions,1959}, {trap_exit,false}]}, {<0.1664.0>, [{registered_name,menelaus_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64676f6c8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,menelaus_sup},one_for_one,[{child,<0.1670.0>,menelaus_web_alerts_srv">>, <<"y(4) menelaus_sup">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff64676f700 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,6765}, {total_heap_size,17711}, {links, [<0.1666.0>,<0.1669.0>,<0.1670.0>,<0.1668.0>, <0.1634.0>]}, {memory,142784}, {message_queue_len,0}, {reductions,1455}, {trap_exit,true}]}, {<0.1666.0>, [{registered_name,menelaus_web}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64afecf08 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) mochiweb_socket_server">>, <<"(3) {mochiweb_socket_server,8091,#Fun,{local,menelaus_web},">>, <<"y(4) menelaus_web">>, <<"y(5) <0.1664.0>">>,<<>>, <<"0x00007ff64afecf40 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,6}]}, {heap_size,377}, {total_heap_size,754}, {links, [<0.1667.0>,<0.1893.0>,<0.1894.0>,<0.1895.0>, <0.1891.0>,<0.1892.0>,<0.1745.0>,<0.1664.0>, #Port<0.3905>]}, {memory,7288}, {message_queue_len,0}, {reductions,551}, {trap_exit,true}]}, {<0.1667.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff6460bb1d0 (menelaus_web:handle_streaming/4 + 464)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6456ae898 Return addr 0x00007ff6460b7578 (menelaus_web:loop/3 + 25992)">>, <<"(0) {mochiweb_response,{mochiweb_request,#Port<0.3909>,'GET',\"/pools/default/saslBucke">>, <<"(1) {mochiweb_request,#Port<0.3909>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},">>, <<"y(2) #Fun">>, <<"y(3) {struct,[{buckets,[]}]}">>,<<>>, <<"0x00007ff6456ae8c0 Return addr 0x00007ff6460f85c0 (mochiweb_http:headers/5 + 1224)">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) []">>, <<"(4) {mochiweb_request,#Port<0.3909>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},">>, <<"y(5) Catch 0x00007ff6460b7598 (menelaus_web:loop/3 + 26024)">>, <<>>, <<"0x00007ff6456ae8f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) []">>,<<"y(2) []">>, <<"(3) {mochiweb_request,#Port<0.3909>,'GET',\"/pools/default/saslBucketsStreaming\",{1,1},">>, <<>>, <<"0x00007ff6456ae920 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,8}]}, {heap_size,46368}, {total_heap_size,57314}, {links,[<0.1666.0>,#Port<0.3909>]}, {memory,459704}, {message_queue_len,0}, {reductions,6885}, {trap_exit,false}]}, {<0.1668.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646740718 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646401688 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1638.0>,<0.1664.0>,<0.1657.0>,<0.158.0>]}, {memory,2816}, {message_queue_len,0}, {reductions,27}, {trap_exit,false}]}, {<0.1669.0>, [{registered_name,hot_keys_keeper}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646c8fc68 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) hot_keys_keeper">>, <<"y(3) {state,[],[],<0.1884.0>}">>, <<"y(4) hot_keys_keeper">>, <<"y(5) <0.1664.0>">>,<<>>, <<"0x00007ff646c8fca0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,233}, {total_heap_size,610}, {links,[<0.1664.0>,<0.53.0>]}, {memory,5856}, {message_queue_len,0}, {reductions,267}, {trap_exit,false}]}, {<0.1670.0>, [{registered_name,menelaus_web_alerts_srv}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463ffbf0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) menelaus_web_alerts_srv">>, <<"(3) {state,[],[],{dict,2,16,16,8,80,48,{[],[],[],[],[],[],[],[],[],[],[],[],[],[],[],[">>, <<"y(4) menelaus_web_alerts_srv">>, <<"y(5) <0.1664.0>">>,<<>>, <<"0x00007ff6463ffc28 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,3}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.1664.0>,<0.53.0>]}, {memory,29544}, {message_queue_len,0}, {reductions,584}, {trap_exit,false}]}, {<0.1671.0>, [{registered_name,ns_port_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64650da70 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"(3) {state,{local,ns_port_sup},one_for_one,[{child,<0.1675.0>,{memcached,\"/opt/membase">>, <<"y(4) ns_port_sup">>,<<"y(5) <0.1634.0>">>, <<>>, <<"0x00007ff64650daa8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,28657}, {total_heap_size,39603}, {links,[<0.1672.0>,<0.1673.0>,<0.1675.0>,<0.1634.0>]}, {memory,317880}, {message_queue_len,0}, {reductions,5388}, {trap_exit,true}]}, {<0.1672.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646740718 (misc:'-start_event_link/1-fun-0-'/1 + 40)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f88fb0 Return addr 0x000000000089c118 ()">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1671.0>,<0.158.0>]}, {memory,2736}, {message_queue_len,0}, {reductions,11}, {trap_exit,false}]}, {<0.1673.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646d0a168 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_cushion">>, <<"y(3) {state,moxi,5000,{1322,679215,119820},<0.1674.0>}">>, <<"y(4) <0.1673.0>">>,<<"y(5) <0.1671.0>">>, <<>>, <<"0x00007ff646d0a1a0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<0.1671.0>,<0.1674.0>]}, {memory,8872}, {message_queue_len,0}, {reductions,145}, {trap_exit,true}]}, {<0.1674.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645c81a40 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_port_server">>, <<"y(3) {state,#Port<0.3906>,moxi,{[empty],[empty,empty]},undefined,[],0,true}">>, <<"y(4) <0.1674.0>">>,<<"y(5) <0.1673.0>">>, <<>>, <<"0x00007ff645c81a78 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,1597}, {total_heap_size,1597}, {links,[<0.1673.0>,#Port<0.3906>]}, {memory,13752}, {message_queue_len,0}, {reductions,163}, {trap_exit,true}]}, {<0.1675.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64af9d0b0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor_cushion">>, <<"y(3) {state,memcached,5000,{1322,679215,120164},<0.1676.0>}">>, <<"y(4) <0.1675.0>">>,<<"y(5) <0.1671.0>">>, <<>>, <<"0x00007ff64af9d0e8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<0.1671.0>,<0.1676.0>]}, {memory,8872}, {message_queue_len,0}, {reductions,55}, {trap_exit,true}]}, {<0.1676.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463f8060 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) ns_port_server">>, <<"y(3) {state,#Port<0.3907>,memcached,{[empty],[empty,empty]},undefined,[],0,true}">>, <<"y(4) <0.1676.0>">>,<<"y(5) <0.1675.0>">>, <<>>, <<"0x00007ff6463f8098 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,987}, {total_heap_size,987}, {links,[<0.1675.0>,#Port<0.3907>]}, {memory,8872}, {message_queue_len,0}, {reductions,74}, {trap_exit,true}]}, {<0.1677.0>, [{registered_name,ns_stats_event}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64897ffa8 (gen_event:fetch_msg/5 + 72)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f7a7e0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) false">>,<<"y(1) []">>, <<"(2) [{handler,ns_pubsub,#Ref<0.0.0.28967>,{state,#Fun,ignored},<">>, <<"y(3) ns_stats_event">>, <<"y(4) <0.1634.0>">>,<<>>, <<"0x00007ff646f7a810 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,12}]}, {heap_size,377}, {total_heap_size,987}, {links,[<0.1634.0>,<0.1682.0>]}, {memory,8872}, {message_queue_len,0}, {reductions,1698}, {trap_exit,true}]}, {<0.1678.0>, [{registered_name,ns_bucket_worker}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646841da0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) work_queue">>,<<"y(3) []">>, <<"y(4) ns_bucket_worker">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff646841dd8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1634.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,26}, {trap_exit,false}]}, {<0.1679.0>, [{registered_name,ns_bucket_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6467d4d80 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,ns_bucket_sup},one_for_one,[],undefined,3,10,[],ns_bucket_sup,[]}">>, <<"y(4) ns_bucket_sup">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff6467d4db8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links,[<0.1634.0>,<0.158.0>]}, {memory,55096}, {message_queue_len,0}, {reductions,645}, {trap_exit,true}]}, {<0.1680.0>, [{registered_name,system_stats_collector}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f88818 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) system_stats_collector">>, <<"(3) {state,#Port<0.3908>,[{cpu_local_ms,1767360},{cpu_idle_ms,1744160},{swap_total,0},">>, <<"y(4) system_stats_collector">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff646f88850 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,25}]}, {heap_size,987}, {total_heap_size,1597}, {links,[<0.1634.0>,<0.1649.0>,#Port<0.3908>]}, {memory,13792}, {message_queue_len,0}, {reductions,4486}, {trap_exit,false}]}, {<0.1682.0>, [{registered_name,'stats_archiver-@system'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646776680 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) stats_archiver">>, <<"y(3) {state,\"@system\"}">>, <<"y(4) 'stats_archiver-@system'">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff6467766b8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,48}]}, {heap_size,2584}, {total_heap_size,3571}, {links,[<0.1634.0>,<0.1677.0>,<0.53.0>]}, {memory,29584}, {message_queue_len,0}, {reductions,277900}, {trap_exit,false}]}, {<0.1685.0>, [{registered_name,'stats_reader-@system'}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463f9f48 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) stats_reader">>, <<"y(3) {state,\"@system\"}">>, <<"y(4) 'stats_reader-@system'">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff6463f9f80 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,14}]}, {heap_size,987}, {total_heap_size,1364}, {links,[<0.1634.0>]}, {memory,11848}, {message_queue_len,0}, {reductions,2864}, {trap_exit,false}]}, {<0.1686.0>, [{registered_name,ns_moxi_sup_work_queue}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645d19b00 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) work_queue">>,<<"y(3) []">>, <<"y(4) ns_moxi_sup_work_queue">>, <<"y(5) <0.1634.0>">>,<<>>, <<"0x00007ff645d19b38 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1634.0>]}, {memory,2800}, {message_queue_len,0}, {reductions,26}, {trap_exit,false}]}, {<0.1687.0>, [{registered_name,ns_moxi_sup}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe4d88 (gen_server:loop/6 + 256)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645ce0328 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) []">>,<<"y(1) infinity">>, <<"y(2) supervisor">>, <<"y(3) {state,{local,ns_moxi_sup},one_for_one,[],undefined,20,10,[],ns_moxi_sup,[]}">>, <<"y(4) ns_moxi_sup">>,<<"y(5) <0.1634.0>">>, <<>>, <<"0x00007ff645ce0360 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,6765}, {total_heap_size,6765}, {links,[<0.1634.0>,<0.158.0>]}, {memory,55096}, {message_queue_len,0}, {reductions,1289}, {trap_exit,true}]}, {<0.1745.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff646fe56c0 (gen_server:do_multi_call/4 + 736)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff645f99540 Return addr 0x00007ff647018d08 (rpc:do_multicall/5 + 256)">>, <<"y(0) []">>,<<"y(1) #Ref<0.0.0.31729>">>, <<"y(2) <0.1896.0>">>, <<"y(3) #Ref<0.0.0.31728>">>,<<>>, <<"0x00007ff645f99568 Return addr 0x00007ff6467486e8 (diag_handler:diag_multicall/3 + 208)">>, <<"y(0) []">>,<<"y(1) []">>,<<>>, <<"0x00007ff645f99580 Return addr 0x00007ff6467490c8 (diag_handler:handle_diag/1 + 232)">>, <<"y(0) ['ns_1@10.13.41.69']">>, <<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>,<<>>, <<"0x00007ff645f995a8 Return addr 0x00007ff6460b7098 (menelaus_web:loop/3 + 24744)">>, <<"y(0) []">>,<<"y(1) []">>, <<"(2) {mochiweb_request,#Port<0.3964>,'GET',\"/diag\",{1,1},{8,{\"host\",{'Host',\"ec2-107-20">>, <<>>, <<"0x00007ff645f995c8 Return addr 0x00007ff6460f85c0 (mochiweb_http:headers/5 + 1224)">>, <<"y(0) []">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) []">>, <<"(4) {mochiweb_request,#Port<0.3964>,'GET',\"/diag\",{1,1},{8,{\"host\",{'Host',\"ec2-107-20">>, <<"y(5) Catch 0x00007ff6460b7598 (menelaus_web:loop/3 + 26024)">>, <<>>, <<"0x00007ff645f99600 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) []">>,<<"y(2) []">>, <<"(3) {mochiweb_request,#Port<0.3964>,'GET',\"/diag\",{1,1},{8,{\"host\",{'Host',\"ec2-107-20">>, <<>>, <<"0x00007ff645f99628 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,2}]}, {heap_size,17711}, {total_heap_size,20295}, {links,[<0.1666.0>,#Port<0.3964>]}, {memory,163480}, {message_queue_len,0}, {reductions,2444}, {trap_exit,false}]}, {<0.1891.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff648991188 (prim_inet:recv0/3 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646073dc0 Return addr 0x00007ff6460f7db0 (mochiweb_http:request/2 + 104)">>, <<"y(0) 37211">>,<<"y(1) #Port<0.3971>">>, <<>>, <<"0x00007ff646073dd8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) #Port<0.3971>">>,<<>>, <<"0x00007ff646073df0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.1666.0>,#Port<0.3971>]}, {memory,7008}, {message_queue_len,0}, {reductions,276}, {trap_exit,false}]}, {<0.1892.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff648991188 (prim_inet:recv0/3 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646f83e98 Return addr 0x00007ff6460f7db0 (mochiweb_http:request/2 + 104)">>, <<"y(0) 37213">>,<<"y(1) #Port<0.3972>">>, <<>>, <<"0x00007ff646f83eb0 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) #Port<0.3972>">>,<<>>, <<"0x00007ff646f83ec8 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.1666.0>,#Port<0.3972>]}, {memory,7008}, {message_queue_len,0}, {reductions,276}, {trap_exit,false}]}, {<0.1893.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff648991188 (prim_inet:recv0/3 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff64648f9e0 Return addr 0x00007ff6460f7db0 (mochiweb_http:request/2 + 104)">>, <<"y(0) 37215">>,<<"y(1) #Port<0.3973>">>, <<>>, <<"0x00007ff64648f9f8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) #Port<0.3973>">>,<<>>, <<"0x00007ff64648fa10 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.1666.0>,#Port<0.3973>]}, {memory,7008}, {message_queue_len,0}, {reductions,276}, {trap_exit,false}]}, {<0.1894.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff648991188 (prim_inet:recv0/3 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff646491190 Return addr 0x00007ff6460f7db0 (mochiweb_http:request/2 + 104)">>, <<"y(0) 37217">>,<<"y(1) #Port<0.3974>">>, <<>>, <<"0x00007ff6464911a8 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) #Port<0.3974>">>,<<>>, <<"0x00007ff6464911c0 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,377}, {total_heap_size,754}, {links,[<0.1666.0>,#Port<0.3974>]}, {memory,7008}, {message_queue_len,0}, {reductions,276}, {trap_exit,false}]}, {<0.1895.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{proc_lib,init_p,5}}, {backtrace, [<<"Program counter: 0x00007ff64898fe28 (prim_inet:accept0/2 + 184)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"0x00007ff6463ec720 Return addr 0x00007ff6468d7ec8 (inet_tcp:accept/1 + 40)">>, <<"y(0) 37218">>,<<"y(1) #Port<0.3905>">>, <<>>, <<"x00007ff6463ec738 Return addr 0x00007ff6461087b0 (mochiweb_socket_server:acceptor_loop/1 +">>, <<"y(0) []">>,<<>>, <<"0x00007ff6463ec748 Return addr 0x00007ff646f95da0 (proc_lib:init_p_do_apply/3 + 56)">>, <<"y(0) #Fun">>, <<"y(1) <0.1666.0>">>, <<"y(2) Catch 0x00007ff6461087b0 (mochiweb_socket_server:acceptor_loop/1 + 160)">>, <<>>, <<"0x00007ff6463ec768 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff646f95dc0 (proc_lib:init_p_do_apply/3 + 88)">>, <<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[<0.1666.0>]}, {memory,2872}, {message_queue_len,0}, {reductions,18}, {trap_exit,false}]}, {<0.1896.0>, [{registered_name,[]}, {status,waiting}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff646fe5c68 (gen_server:rec_nodes/7 + 224)">>, <<"CP: 0x0000000000000000 (invalid)">>, <<"arity = 0">>,<<>>, <<"x00007ff646f80088 Return addr 0x00007ff646feb7d8 (gen_server:'-do_multi_call/4-fun-0-'/6 +">>, <<"y(0) #Ref<0.0.0.31732>">>,<<"y(1) 2000">>, <<"y(2) []">>,<<"y(3) []">>, <<"y(4) rex">>,<<"y(5) #Ref<0.0.0.31728>">>, <<"y(6) []">>,<<"y(7) #Ref<0.0.0.31731>">>, <<"y(8) 'ns_1@10.13.41.69'">>,<<>>, <<"0x00007ff646f800d8 Return addr 0x000000000089c118 ()">>, <<"y(0) #Ref<0.0.0.31728>">>,<<"y(1) []">>, <<"y(2) []">>,<<"y(3) []">>, <<"y(4) []">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,0}]}, {heap_size,233}, {total_heap_size,233}, {links,[]}, {memory,2872}, {message_queue_len,0}, {reductions,13}, {trap_exit,true}]}, {<0.1897.0>, [{registered_name,[]}, {status,running}, {initial_call,{erlang,apply,2}}, {backtrace, [<<"Program counter: 0x00007ff649b0cf88 (unknown function)">>, <<"CP: 0x00007ff646748258 (diag_handler:grab_process_info/1 + 56)">>, <<>>, <<"x00007ff645ed3200 Return addr 0x00007ff64674a468 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) []">>,<<>>, <<"x00007ff645ed3210 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) []">>,<<"y(1) <0.1897.0>">>, <<"y(2) Catch 0x00007ff64674a468 (diag_handler:'-do_diag_per_node/0-lc$^0/1-0-'/1 + 104)">>, <<>>, <<"x00007ff645ed3230 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1896.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3248 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1895.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3260 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1894.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3278 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1893.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3290 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1892.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed32a8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1891.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed32c0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1745.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed32d8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1687.0>">>, <<"(1) [{registered_name,ns_moxi_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}}">>, <<>>, <<"x00007ff645ed32f0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1686.0>">>, <<"(1) [{registered_name,ns_moxi_sup_work_queue},{status,waiting},{initial_call,{proc_lib">>, <<>>, <<"x00007ff645ed3308 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1685.0>">>, <<"(1) [{registered_name,'stats_reader-@system'},{status,waiting},{initial_call,{proc_lib">>, <<>>, <<"x00007ff645ed3320 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1682.0>">>, <<"(1) [{registered_name,'stats_archiver-@system'},{status,waiting},{initial_call,{proc_l">>, <<>>, <<"x00007ff645ed3338 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1680.0>">>, <<"(1) [{registered_name,system_stats_collector},{status,waiting},{initial_call,{proc_lib">>, <<>>, <<"x00007ff645ed3350 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1679.0>">>, <<"(1) [{registered_name,ns_bucket_sup},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3368 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1678.0>">>, <<"(1) [{registered_name,ns_bucket_worker},{status,waiting},{initial_call,{proc_lib,init_">>, <<>>, <<"x00007ff645ed3380 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1677.0>">>, <<"(1) [{registered_name,ns_stats_event},{status,waiting},{initial_call,{proc_lib,init_p,">>, <<>>, <<"x00007ff645ed3398 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1676.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed33b0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1675.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed33c8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1674.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed33e0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1673.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed33f8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1672.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3410 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1671.0>">>, <<"(1) [{registered_name,ns_port_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}}">>, <<>>, <<"x00007ff645ed3428 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1670.0>">>, <<"(1) [{registered_name,menelaus_web_alerts_srv},{status,waiting},{initial_call,{proc_li">>, <<>>, <<"x00007ff645ed3440 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1669.0>">>, <<"(1) [{registered_name,hot_keys_keeper},{status,waiting},{initial_call,{proc_lib,init_p">>, <<>>, <<"x00007ff645ed3458 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1668.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3470 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1667.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3488 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1666.0>">>, <<"(1) [{registered_name,menelaus_web},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed34a0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1664.0>">>, <<"(1) [{registered_name,menelaus_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed34b8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1662.0>">>, <<"(1) [{registered_name,ns_doctor},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed34d0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1661.0>">>, <<"(1) [{registered_name,ns_heart},{status,waiting},{initial_call,{proc_lib,init_p,5}},{b">>, <<>>, <<"x00007ff645ed34e8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1660.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3500 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1659.0>">>, <<"(1) [{registered_name,ns_mail},{status,waiting},{initial_call,{proc_lib,init_p,5}},{ba">>, <<>>, <<"x00007ff645ed3518 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1658.0>">>, <<"(1) [{registered_name,ns_mail_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}}">>, <<>>, <<"x00007ff645ed3530 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1657.0>">>, <<"(1) [{registered_name,buckets_events},{status,waiting},{initial_call,{proc_lib,init_p,">>, <<>>, <<"x00007ff645ed3548 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1656.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3560 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1655.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3578 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1653.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3590 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1652.0>">>, <<"(1) [{registered_name,mb_master_sup},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed35a8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1650.0>">>, <<"(1) [{registered_name,mb_master},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed35c0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1649.0>">>, <<"(1) [{registered_name,ns_tick_event},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed35d8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1647.0>">>, <<"(1) [{registered_name,ns_config_rep_merger},{status,waiting},{initial_call,{erlang,app">>, <<>>, <<"x00007ff645ed35f0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1644.0>">>, <<"(1) [{registered_name,ns_config_rep},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3608 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1643.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3620 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1642.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3638 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1639.0>">>, <<"(1) [{registered_name,ns_node_disco},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3650 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1638.0>">>, <<"(1) [{registered_name,ns_node_disco_events},{status,waiting},{initial_call,{proc_lib,i">>, <<>>, <<"x00007ff645ed3668 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1637.0>">>, <<"(1) [{registered_name,ns_node_disco_sup},{status,waiting},{initial_call,{proc_lib,init">>, <<>>, <<"x00007ff645ed3680 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1636.0>">>, <<"(1) [{registered_name,ns_log_events},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3698 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1635.0>">>, <<"(1) [{registered_name,ns_log},{status,waiting},{initial_call,{proc_lib,init_p,5}},{bac">>, <<>>, <<"x00007ff645ed36b0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1634.0>">>, <<"(1) [{registered_name,ns_server_sup},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed36c8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1616.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed36e0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1612.0>">>, <<"(1) [{registered_name,mnesia_late_loader},{status,waiting},{initial_call,{proc_lib,ini">>, <<>>, <<"x00007ff645ed36f8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1611.0>">>, <<"(1) [{registered_name,mnesia_controller},{status,waiting},{initial_call,{proc_lib,init">>, <<>>, <<"x00007ff645ed3710 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1610.0>">>, <<"(1) [{registered_name,mnesia_snmp_sup},{status,waiting},{initial_call,{proc_lib,init_p">>, <<>>, <<"x00007ff645ed3728 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1609.0>">>, <<"(1) [{registered_name,mnesia_checkpoint_sup},{status,waiting},{initial_call,{proc_lib,">>, <<>>, <<"x00007ff645ed3740 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1608.0>">>, <<"(1) [{registered_name,mnesia_tm},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed3758 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1607.0>">>, <<"(1) [{registered_name,mnesia_recover},{status,waiting},{initial_call,{proc_lib,init_p,">>, <<>>, <<"x00007ff645ed3770 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1606.0>">>, <<"(1) [{registered_name,mnesia_locker},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3788 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1605.0>">>, <<"(1) [{registered_name,mnesia_subscr},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed37a0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1604.0>">>, <<"(1) [{registered_name,mnesia_monitor},{status,waiting},{initial_call,{proc_lib,init_p,">>, <<>>, <<"x00007ff645ed37b8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1603.0>">>, <<"(1) [{registered_name,mnesia_kernel_sup},{status,waiting},{initial_call,{proc_lib,init">>, <<>>, <<"x00007ff645ed37d0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1602.0>">>, <<"(1) [{registered_name,mnesia_event},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed37e8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1601.0>">>, <<"(1) [{registered_name,mnesia_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},">>, <<>>, <<"x00007ff645ed3800 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1600.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,">>, <<>>, <<"x00007ff645ed3818 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1599.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3830 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1555.0>">>, <<"(1) [{registered_name,tftp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{b">>, <<>>, <<"x00007ff645ed3848 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1554.0>">>, <<"(1) [{registered_name,httpd_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed3860 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1552.0>">>, <<"(1) [{registered_name,httpc_handler_sup},{status,waiting},{initial_call,{proc_lib,init">>, <<>>, <<"x00007ff645ed3878 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1551.0>">>, <<"(1) [{registered_name,httpc_manager},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3890 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1550.0>">>, <<"(1) [{registered_name,httpc_profile_sup},{status,waiting},{initial_call,{proc_lib,init">>, <<>>, <<"x00007ff645ed38a8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1549.0>">>, <<"(1) [{registered_name,httpc_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed38c0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1548.0>">>, <<"(1) [{registered_name,ftp_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{ba">>, <<>>, <<"x00007ff645ed38d8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1547.0>">>, <<"(1) [{registered_name,inets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed38f0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1527.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,">>, <<>>, <<"x00007ff645ed3908 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1526.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3920 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1341.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{net_kernel,ticker,2}},{backt">>, <<>>, <<"x00007ff645ed3938 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1340.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{inet_tcp_dist,accept_loop,2}">>, <<>>, <<"x00007ff645ed3950 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1339.0>">>, <<"(1) [{registered_name,net_kernel},{status,waiting},{initial_call,{proc_lib,init_p,5}},">>, <<>>, <<"x00007ff645ed3968 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1338.0>">>, <<"(1) [{registered_name,auth},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backt">>, <<>>, <<"x00007ff645ed3980 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1337.0>">>, <<"(1) [{registered_name,erl_epmd},{status,waiting},{initial_call,{proc_lib,init_p,5}},{b">>, <<>>, <<"x00007ff645ed3998 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.1336.0>">>, <<"(1) [{registered_name,net_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{ba">>, <<>>, <<"x00007ff645ed39b0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.220.0>">>, <<"(1) [{registered_name,inet_gethost_native},{status,waiting},{initial_call,{inet_gethos">>, <<>>, <<"x00007ff645ed39c8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.219.0>">>, <<"(1) [{registered_name,inet_gethost_native_sup},{status,waiting},{initial_call,{proc_li">>, <<>>, <<"x00007ff645ed39e0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.164.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed39f8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.162.0>">>, <<"(1) [{registered_name,ns_config_isasl_sync},{status,waiting},{initial_call,{proc_lib,i">>, <<>>, <<"x00007ff645ed3a10 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.161.0>">>, <<"(1) [{registered_name,ns_config_remote},{status,waiting},{initial_call,{proc_lib,init_">>, <<>>, <<"x00007ff645ed3a28 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.159.0>">>, <<"(1) [{registered_name,ns_config},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed3a40 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.158.0>">>, <<"(1) [{registered_name,ns_config_events},{status,waiting},{initial_call,{proc_lib,init_">>, <<>>, <<"x00007ff645ed3a58 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.157.0>">>, <<"(1) [{registered_name,ns_config_sup},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3a70 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.86.0>">>, <<"(1) [{registered_name,disk_log_server},{status,waiting},{initial_call,{proc_lib,init_p">>, <<>>, <<"x00007ff645ed3a88 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.85.0>">>, <<"(1) [{registered_name,disk_log_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed3aa0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.81.0>">>, <<"(1) [{registered_name,dets},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backt">>, <<>>, <<"x00007ff645ed3ab8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.80.0>">>, <<"(1) [{registered_name,dets_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{b">>, <<>>, <<"x00007ff645ed3ad0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.67.0>">>, <<"(1) [{registered_name,mb_mnesia},{status,waiting},{initial_call,{proc_lib,init_p,5}},{">>, <<>>, <<"x00007ff645ed3ae8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.66.0>">>, <<"(1) [{registered_name,mb_mnesia_events},{status,waiting},{initial_call,{proc_lib,init_">>, <<>>, <<"x00007ff645ed3b00 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.65.0>">>, <<"(1) [{registered_name,mb_mnesia_sup},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3b18 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.64.0>">>, <<"(1) [{registered_name,ns_cluster},{status,waiting},{initial_call,{proc_lib,init_p,5}},">>, <<>>, <<"x00007ff645ed3b30 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.57.0>">>, <<"(1) [{registered_name,dist_manager},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed3b48 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.56.0>">>, <<"(1) [{registered_name,timeout_diag_logger},{status,waiting},{initial_call,{proc_lib,in">>, <<>>, <<"x00007ff645ed3b60 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.55.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3b78 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.54.0>">>, <<"(1) [{registered_name,ns_server_cluster_sup},{status,waiting},{initial_call,{proc_lib,">>, <<>>, <<"x00007ff645ed3b90 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.53.0>">>, <<"(1) [{registered_name,timer_server},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed3ba8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.51.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,">>, <<>>, <<"x00007ff645ed3bc0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.50.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3bd8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.48.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3bf0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.47.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3c08 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.46.0>">>, <<"(1) [{registered_name,cpu_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{ba">>, <<>>, <<"x00007ff645ed3c20 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.45.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3c38 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.44.0>">>, <<"(1) [{registered_name,memsup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{bac">>, <<>>, <<"x00007ff645ed3c50 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.43.0>">>, <<"(1) [{registered_name,disksup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{ba">>, <<>>, <<"x00007ff645ed3c68 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.42.0>">>, <<"(1) [{registered_name,os_mon_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},">>, <<>>, <<"x00007ff645ed3c80 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.41.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,">>, <<>>, <<"x00007ff645ed3c98 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.40.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3cb0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.37.0>">>, <<"(1) [{registered_name,release_handler},{status,waiting},{initial_call,{proc_lib,init_p">>, <<>>, <<"x00007ff645ed3cc8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.36.0>">>, <<"(1) [{registered_name,overload},{status,waiting},{initial_call,{proc_lib,init_p,5}},{b">>, <<>>, <<"x00007ff645ed3ce0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.35.0>">>, <<"(1) [{registered_name,alarm_handler},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3cf8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.34.0>">>, <<"(1) [{registered_name,sasl_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3d10 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.33.0>">>, <<"(1) [{registered_name,sasl_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},{b">>, <<>>, <<"x00007ff645ed3d28 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.32.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,">>, <<>>, <<"x00007ff645ed3d40 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.31.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3d58 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.26.0>">>, <<"(1) [{registered_name,kernel_safe_sup},{status,waiting},{initial_call,{proc_lib,init_p">>, <<>>, <<"x00007ff645ed3d70 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.25.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3d88 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.24.0>">>, <<"(1) [{registered_name,user},{status,waiting},{initial_call,{erlang,apply,2}},{backtrac">>, <<>>, <<"x00007ff645ed3da0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.23.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3db8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.22.0>">>, <<"(1) [{registered_name,standard_error},{status,waiting},{initial_call,{erlang,apply,2}}">>, <<>>, <<"x00007ff645ed3dd0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.21.0>">>, <<"(1) [{registered_name,standard_error_sup},{status,waiting},{initial_call,{proc_lib,ini">>, <<>>, <<"x00007ff645ed3de8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.20.0>">>, <<"(1) [{registered_name,code_server},{status,waiting},{initial_call,{erlang,apply,2}},{b">>, <<>>, <<"x00007ff645ed3e00 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.19.0>">>, <<"(1) [{registered_name,file_server_2},{status,waiting},{initial_call,{proc_lib,init_p,5">>, <<>>, <<"x00007ff645ed3e18 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.18.0>">>, <<"(1) [{registered_name,global_group},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed3e30 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.16.0>">>, <<"(1) [{registered_name,inet_db},{status,waiting},{initial_call,{proc_lib,init_p,5}},{ba">>, <<>>, <<"x00007ff645ed3e48 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.15.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3e60 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.14.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{erlang,apply,2}},{backtrace,">>, <<>>, <<"x00007ff645ed3e78 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.13.0>">>, <<"(1) [{registered_name,global_name_server},{status,waiting},{initial_call,{proc_lib,ini">>, <<>>, <<"x00007ff645ed3e90 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.12.0>">>, <<"(1) [{registered_name,rex},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtr">>, <<>>, <<"x00007ff645ed3ea8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.11.0>">>, <<"(1) [{registered_name,kernel_sup},{status,waiting},{initial_call,{proc_lib,init_p,5}},">>, <<>>, <<"x00007ff645ed3ec0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.10.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{application_master,start_it,">>, <<>>, <<"x00007ff645ed3ed8 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.9.0>">>, <<"(1) [{registered_name,[]},{status,waiting},{initial_call,{proc_lib,init_p,5}},{backtra">>, <<>>, <<"x00007ff645ed3ef0 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.7.0>">>, <<"(1) [{registered_name,application_controller},{status,waiting},{initial_call,{erlang,a">>, <<>>, <<"x00007ff645ed3f08 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.6.0>">>, <<"(1) [{registered_name,error_logger},{status,waiting},{initial_call,{proc_lib,init_p,5}">>, <<>>, <<"x00007ff645ed3f20 Return addr 0x00007ff64674a4b8 (diag_handler:'-do_diag_per_node/0-lc$^0/">>, <<"y(0) <0.3.0>">>, <<"(1) [{registered_name,erl_prim_loader},{status,waiting},{initial_call,{erlang,apply,2}">>, <<>>, <<"0x00007ff645ed3f38 Return addr 0x00007ff646748440 (diag_handler:do_diag_per_node/0 + 216)">>, <<"y(0) <0.0.0>">>, <<"(1) [{registered_name,init},{status,waiting},{initial_call,{otp_ring0,start,2}},{backt">>, <<>>, <<"x00007ff645ed3f50 Return addr 0x00007ff64701a628 (rpc:'-handle_call_call/6-fun-0-'/5 + 192">>, <<"y(0) []">>,<<"y(1) []">>, <<"(2) [{version,[{os_mon,\"2.2.5\"},{mnesia,\"4.4.17\"},{inets,\"5.5.2\"},{kernel,\"2.14.3\"},{s">>, <<"(3) [{otp,[{cookie,rizwejgdhowfisdt}]},{nodes_wanted,['ns_1@10.13.41.69']},{{node,'ns_">>, <<"(4) [\"bucket_engine 1.7.0-0-g721dff0 Linux-x86_64\",\"ep-engine 1.7.1.1-39-g4bd26a3 Linu">>, <<"(5) [{os_mon,\"2.2.5\"},{mnesia,\"4.4.17\"},{inets,\"5.5.2\"},{kernel,\"2.14.3\"},{sasl,\"2.1.9">>, <<>>, <<"0x00007ff645ed3f88 Return addr 0x000000000089c118 ()">>, <<"y(0) Catch 0x00007ff64701a628 (rpc:'-handle_call_call/6-fun-0-'/5 + 192)">>, <<"y(1) []">>,<<"y(2) []">>, <<"y(3) []">>,<<"y(4) <0.12.0>">>,<<>>]}, {error_handler,error_handler}, {garbage_collection, [{min_bin_vheap_size,46368}, {min_heap_size,233}, {fullsweep_after,65535}, {minor_gcs,1}]}, {heap_size,28657}, {total_heap_size,75025}, {links,[]}, {memory,601064}, {message_queue_len,0}, {reductions,30977}, {trap_exit,false}]}]}, {memory,{17946181632,438226944,{<0.6.0>,1113888}}}, {disk, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}]}] nodes_info = [{struct, [{systemStats, {struct, [{cpu_utilization_rate,0.23696682464454977}, {swap_total,0}, {swap_used,0}]}}, {interestingStats,{struct,[]}}, {uptime,<<"414">>}, {memoryTotal,17946181632}, {memoryFree,17507954688}, {mcdMemoryReserved,13691}, {mcdMemoryAllocated,13691}, {otpNode,<<"ns_1@10.13.41.69">>}, {otpCookie,<<"rizwejgdhowfisdt">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.13.41.69:8091">>}, {clusterCompatibility,1}, {version,<<"1.7.2r-20-g6604356">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]}] buckets = [] logs: ------------------------------- 2011-11-22 17:48:01.339 ns_node_disco:3:info:cookie update(ns_1@127.0.0.1) - Initial otp cookie generated: sgzcoaesvptwzitb 2011-11-22 17:48:01.378 menelaus_sup:1:info:web start ok(ns_1@127.0.0.1) - Membase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. 2011-11-30 18:47:52.249 ns_node_disco:2:info:cookie update(ns_1@127.0.0.1) - Node 'ns_1@127.0.0.1' synchronized otp cookie sgzcoaesvptwzitb from cluster 2011-11-30 18:47:52.478 menelaus_sup:1:info:web start ok(ns_1@127.0.0.1) - Membase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. 2011-11-30 18:50:37.483 menelaus_web:12:info:message(ns_1@127.0.0.1) - Created bucket "default" of type: membase 2011-11-30 18:50:38.993 ns_memcached:1:info:message(ns_1@127.0.0.1) - Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. 2011-11-30 18:53:28.160 ns_node_disco:5:warning:node down(nonode@nohost) - Node nonode@nohost saw that node 'ns_1@127.0.0.1' went down. 2011-11-30 18:53:28.161 ns_node_disco:4:info:node up(ns_1@10.13.41.69) - Node 'ns_1@10.13.41.69' saw that node 'ns_1@10.13.41.69' came up. 2011-11-30 18:53:31.959 ns_node_disco:4:info:node up(ns_1@10.13.41.69) - Node 'ns_1@10.13.41.69' saw that node 'ns_1@10.13.41.47' came up. 2011-11-30 18:53:31.961 ns_config:3:info:message(ns_1@10.13.41.69) - Conflicting configuration changes to field nodes_wanted: [{'_vclock',[{'ns_1@10.13.41.47',{1,63489898411}}]},'ns_1@10.13.41.47'] and [{'_vclock',[{'ns_1@10.13.41.69',{2,63489898411}}]}, 'ns_1@10.13.41.47','ns_1@10.13.41.69'], choosing the former. 2011-11-30 18:53:31.966 ns_node_disco:4:info:node up(ns_1@10.13.41.47) - Node 'ns_1@10.13.41.47' saw that node 'ns_1@10.13.41.69' came up. 2011-11-30 18:53:31.969 ns_cluster:2:info:message(ns_1@10.13.41.47) - Node 'ns_1@10.13.41.47' is joining cluster via node 'ns_1@10.13.41.69'. 2011-11-30 18:53:32.924 menelaus_sup:1:info:web start ok(ns_1@10.13.41.47) - Membase Server has started on web port 8091 on node 'ns_1@10.13.41.47'. 2011-11-30 18:53:32.937 ns_cluster:1:info:message(ns_1@10.13.41.69) - Node 'ns_1@10.13.41.69' is leaving cluster. 2011-11-30 18:53:32.938 ns_memcached:2:info:message(ns_1@10.13.41.69) - Shutting down bucket "default" on 'ns_1@10.13.41.69' for server shutdown 2011-11-30 18:53:32.943 ns_cluster:3:info:message(ns_1@10.13.41.47) - Node ns_1@10.13.41.47 joined cluster 2011-11-30 18:53:35.118 menelaus_sup:1:info:web start ok(ns_1@10.13.41.69) - Membase Server has started on web port 8091 on node 'ns_1@10.13.41.69'. logs_node: ------------------------------- INFO REPORT <3.54.0> 2011-11-22 17:48:00 =============================================================================== nonode@nohost:<3.54.0>:log_os_info:25: OS type: {unix,linux} Version: {3,0,0} Runtime info: [{otp_release,"R14B02"}, {erl_version,"5.8.3"}, {erl_version_long, "Erlang R14B02 (erts-5.8.3) [source] [64-bit] [smp:2:2] [rq:2] [async-threads:16] [hipe] [kernel-poll:false]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2011,11,22},{17,48,0}}}, {memory, [{total,15839088}, {processes,1671912}, {processes_used,1656824}, {system,14167176}, {atom,452097}, {atom_used,423209}, {binary,698584}, {code,3363556}, {ets,279472}]}, {loaded, [ns_info,log_os_info,misc,ns_log_mf_h, ns_server_cluster_sup,ns_server,timer,io_lib_fread, cpu_sup,memsup,disksup,os_mon,io_lib_pretty,io_lib_format, io_lib,io,sasl_report,release_handler,calendar,overload, alarm_handler,sasl_report_tty_h,sasl,ns_bootstrap, file_io_server,orddict,erl_eval,c,error_logger_tty_h, queue,kernel_config,user,user_sup,supervisor_bridge, standard_error,ram_file,file,beam_lib,unicode,binary,ets, gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,inet_parse,inet,inet_udp,os, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{os_mon,"CPO CXC 138 46","2.2.5"}, {kernel,"ERTS CXC 138 10","2.14.3"}, {sasl,"SASL CXC 138 11","2.1.9.3"}, {ns_server,"Membase server","1.7.2r-20-g6604356"}, {stdlib,"ERTS CXC 138 10","1.17.3"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,44}, {node,nonode@nohost}, {nodes,[]}, {registered, [rex,release_handler,inet_db,kernel_sup,code_server, global_name_server,overload,file_server_2, application_controller,alarm_handler,os_mon_sup, standard_error,init,cpu_sup,global_group,erl_prim_loader, sasl_sup,memsup,disksup,timer_server,standard_error_sup, user,sasl_safe_sup,error_logger,kernel_safe_sup, ns_server_cluster_sup]}, {cookie,nocookie}, {wordsize,8}, {wall_clock,0}] INFO REPORT <3.54.0> 2011-11-22 17:48:00 =============================================================================== nonode@nohost:<3.54.0>:log_os_info:27: Manifest: ["bucket_engine 1.7.0-0-g721dff0 Linux-x86_64", "ep-engine 1.7.1.1-39-g4bd26a3 Linux-x86_64", "geocouch couchbase_1.1_geo-0-gd09068e Linux-x86_64", "icu4c 2635939 Linux-x86_64","libconflate 1.7.0-0-gfee8f94 Linux-x86_64", "libmemcached 2cd40dc Linux-x86_64", "libvbucket 1.7.0-0-g4bd0aba Linux-x86_64", "manifest 1.7.0-30-gd83dca9 Linux-x86_64", "membase-cli 1.7.2-0-g1d9c77e Linux-x86_64", "membase-server 1.7.2r-20-g6604356 Linux-x86_64", "membasex 1.7.0-3-g30d5dba Linux-x86_64", "memcached membase-1.7.1-0-gf99c147 Linux-x86_64", "memcachetest 0.8.3-0-g88ae3b3 Linux-x86_64", "moxi 1.7.2-0-gd5076d9 Linux-x86_64", "ns_server 1.7.2-0-g4925ee0 Linux-x86_64", "otp OTP_R14B03-0-g4a5a758 Linux-x86_64", "portsigar 1.7.0-0-ga191e6c Linux-x86_64", "sigar sigar-1.6.4-406-ge1dcf32 Linux-x86_64", "spidermonkey a3c48c1 Linux-x86_64","tlm 1.7.1-0-g535dadc Linux-x86_64", "vbucketmigrator 1.7.0-0-g0fdc96c Linux-x86_64"] PROGRESS REPORT <3.54.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<3.56.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <3.57.0> 2011-11-22 17:48:00 =============================================================================== reading ip config from "/opt/membase/var/lib/membase/ip" ERROR REPORT <3.57.0> 2011-11-22 17:48:00 =============================================================================== Got error:einval. Ignoring bad address:[] INFO REPORT <3.57.0> 2011-11-22 17:48:00 =============================================================================== nonode@nohost:<3.57.0>:dist_manager:105: Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' PROGRESS REPORT <3.58.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,net_sup} started [{pid,<3.59.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <3.58.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,net_sup} started [{pid,<3.60.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <6524.58.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,net_sup} started [{pid,<6524.61.0>}, {name,net_kernel}, {mfargs,{net_kernel,start_link,[['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <6524.11.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,kernel_sup} started [{pid,<6524.58.0>}, {name,net_sup_dynamic}, {mfargs,{erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <6524.54.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.57.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.54.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.64.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.65.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,mb_mnesia_sup} started [{pid,<6524.66.0>}, {name,mb_mnesia_events}, {mfargs,{gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <6524.72.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<6524.73.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.75.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:00 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.76.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.77.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.78.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.79.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.80.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.81.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.82.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.83.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.72.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<6524.74.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.7.0> 2011-11-22 17:48:01 =============================================================================== application mnesia started_at 'ns_1@127.0.0.1' PROGRESS REPORT <6524.26.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.87.0>}, {name,disk_log_sup}, {mfargs,{disk_log_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <6524.26.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.88.0>}, {name,disk_log_server}, {mfargs,{disk_log_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <6524.26.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.91.0>}, {name,dets_sup}, {mfargs,{dets_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <6524.26.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.92.0>}, {name,dets}, {mfargs,{dets_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:409: Committed schema to disk. INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:245: Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@127.0.0.1']}, {debug,verbose}, {directory,"/opt/membase/var/lib/membase/mnesia"}, {dump_log_load_regulation,false}, {dump_log_time_threshold,180000}, {dump_log_update_in_place,true}, {dump_log_write_threshold,1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup,false}, {fallback_error_function,{mnesia,lkill}}, {is_running,yes}, {local_tables,[local_config,cluster,schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{7,6}}, {running_db_nodes,['ns_1@127.0.0.1']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers,[<6524.73.0>,<6524.67.0>]}, {tables,[local_config,cluster,schema]}, {transaction_commits,5}, {transaction_failures,0}, {transaction_log_writes,3}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {send_compressed,0}, {version,"4.4.17"}] Peers: ['ns_1@127.0.0.1'] INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,schema, [{name,schema}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,false}, {record_name,schema}, {attributes,[table,cstruct]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,13188},'ns_1@127.0.0.1'}}, {version,{{3,0},{'ns_1@127.0.0.1',{1321,984081,34779}}}}]}, {tid,3,<6524.86.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:194: Peers: ['ns_1@127.0.0.1'] INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:194: Peers: ['ns_1@127.0.0.1'] PROGRESS REPORT <6524.65.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mb_mnesia_sup} started [{pid,<6524.67.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,local_config, [{name,local_config}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,local_config}, {attributes,[key,val]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,299640},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,5,<6524.111.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,local_config, [{name,local_config}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,local_config}, {attributes,[key,val]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,299640},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,5,<6524.111.0>}} PROGRESS REPORT <6524.54.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.65.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <6524.117.0> 2011-11-22 17:48:01 =============================================================================== loading static ns_config from "/opt/membase/etc/membase/config" PROGRESS REPORT <6524.117.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.118.0>}, {name,ns_config_events}, {mfargs,{gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config_default:226: Upgrading config from 1.6 to 1.7 INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config:413: Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63489203281}}]}, {port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}]}, {set, {node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63489203281}}]}, {filename,"/opt/membase/var/lib/membase/data/ns_log"}]}, {set,port_servers, [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {set, {node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63489203281}}]}, {path,"/opt/membase/var/lib/membase/data/isasl.pw"}]}, {set,directory,"/opt/membase/var/lib/membase/config"}] INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config_default:262: Upgrading config from 1.7 to 1.7.1 INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config:413: Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,1}}, {set,email_alerts, [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small]}]}, {set,auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}] INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config_default:273: Upgrading config from 1.7.1 to 1.7.2 INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config_default:300: Setting global and per-node rest port to 8091 INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config:413: Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,2}}, {set,rest,[{port,8091}]}, {set,{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}] INFO REPORT <6524.119.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.119.0>:ns_config:447: Upgraded initial config: {config, {full,"/opt/membase/etc/membase/config",undefined,ns_config_default}, [[], [{directory,"/opt/membase/var/lib/membase/config"}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@127.0.0.1',isasl}, [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}]}, {memory_quota,16090}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {port_servers, [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{filename,"/opt/membase/var/lib/membase/data/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small]}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}]], [[{directory,"/opt/membase/var/lib/membase/config"}, {{node,'ns_1@127.0.0.1',config_version},{1,7,2}}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}, {buckets,[{configs,[]}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small]}]}, {memory_quota,16090}, {nodes_wanted,['ns_1@127.0.0.1']}, {port_servers, [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63489203281}}]}, {path,"/opt/membase/var/lib/membase/data/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63489203281}}]}, {port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{'ns_1@127.0.0.1',{1,63489203281}}]}, {filename,"/opt/membase/var/lib/membase/data/ns_log"}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} PROGRESS REPORT <6524.117.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.119.0>}, {name,ns_config}, {mfargs,{ns_config,start_link, ["/opt/membase/etc/membase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.117.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.122.0>}, {name,ns_config_remote}, {mfargs,{ns_config_replica,start_link,[{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.123.0> 2011-11-22 17:48:01 =============================================================================== isasl_sync init: ["/opt/membase/var/lib/membase/data/isasl.pw","_admin", "_admin"] INFO REPORT <6524.123.0> 2011-11-22 17:48:01 =============================================================================== isasl_sync init buckets: [] INFO REPORT <6524.123.0> 2011-11-22 17:48:01 =============================================================================== Writing isasl passwd file: "/opt/membase/var/lib/membase/data/isasl.pw" PROGRESS REPORT <6524.117.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.123.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.117.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.125.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.54.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.117.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <6524.127.0> 2011-11-22 17:48:01 =============================================================================== ns_log:init(): Couldn't load logs from "/opt/membase/var/lib/membase/data/ns_log": {error, enoent} PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.127.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.128.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.129.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.130.0>}, {name,ns_node_disco_events}, {mfargs,{gen_event,start_link,[{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.131.0> 2011-11-22 17:48:01 =============================================================================== Initting ns_node_disco with [] INFO REPORT <6524.132.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco cookie_sync INFO REPORT <6524.132.0> 2011-11-22 17:48:01 =============================================================================== ns_log: logging ns_node_disco:3:Initial otp cookie generated: sgzcoaesvptwzitb INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: otp -> [{cookie,sgzcoaesvptwzitb}] INFO REPORT <6524.132.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.132.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb PROGRESS REPORT <6524.129.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.131.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.129.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.135.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.129.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.136.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.137.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.137.0>:ns_config_rep:56: init pulling INFO REPORT <6524.137.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.137.0>:ns_config_rep:58: init pushing INFO REPORT <6524.137.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.137.0>:ns_config_rep:62: init reannouncing INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco_conf_events config on otp INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: otp -> [{cookie,sgzcoaesvptwzitb}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: directory -> "/opt/membase/var/lib/membase/config" INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: {node,'ns_1@127.0.0.1',config_version} -> {1,7,2} INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: auto_failover_cfg -> [{enabled,false},{timeout,30},{max_nodes,1},{count,0}] INFO REPORT <6524.139.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco cookie_sync INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: buckets -> [{configs,[]}] INFO REPORT <6524.139.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small]}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: memory_quota -> 16090 INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: nodes_wanted -> ['ns_1@127.0.0.1'] INFO REPORT <6524.139.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.140.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco cookie_sync INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: port_servers -> [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: replication -> [{enabled,true}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: rest -> [{port,8091}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: rest_creds -> ******** INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: {node,'ns_1@127.0.0.1',membership} -> active INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: {node,'ns_1@127.0.0.1',memcached} -> [{port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] INFO REPORT <6524.140.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.140.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/membase/var/lib/membase/data/ns_log"}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] INFO REPORT <6524.118.0> 2011-11-22 17:48:01 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.137.0> 2011-11-22 17:48:01 =============================================================================== Pushing config PROGRESS REPORT <6524.129.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.137.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.129.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.142.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.137.0> 2011-11-22 17:48:01 =============================================================================== Pushing config done INFO REPORT <6524.144.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.144.0>:mb_master:97: I'm the only node, so I'm the master. INFO REPORT <6524.145.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.145.0>:misc:785: start_singleton(gen_fsm, ns_orchestrator, [], []): started as <6524.146.0> on 'ns_1@127.0.0.1' PROGRESS REPORT <6524.145.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<6524.146.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] INFO REPORT <6524.145.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.145.0>:misc:785: start_singleton(gen_server, ns_tick, [], []): started as <6524.147.0> on 'ns_1@127.0.0.1' PROGRESS REPORT <6524.145.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<6524.147.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <6524.148.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.148.0>:auto_failover:120: init auto_failover. INFO REPORT <6524.145.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.145.0>:misc:785: start_singleton(gen_server, auto_failover, [], []): started as <6524.148.0> on 'ns_1@127.0.0.1' PROGRESS REPORT <6524.145.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<6524.148.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.144.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.149.0>}, {name,buckets_events}, {mfargs,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.150.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<6524.151.0>}, {name,ns_mail}, {mfargs,{ns_mail,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.150.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<6524.152.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.150.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <6524.128.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.128.0>:ns_mail_log:45: ns_mail_log started up PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.153.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.155.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] ERROR REPORT <6524.153.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.153.0>:ns_heart:137: Failed to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} INFO REPORT <6524.155.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.155.0>:ns_doctor:82: Got initial status [{'ns_1@127.0.0.1', [{last_heard, {1321,984081, 374108}}, {active_buckets, []}, {ready_buckets, []}, {replication, []}, {memory, [{total, 21796320}, {processes, 3523144}, {processes_used, 3510496}, {system, 18273176}, {atom, 761969}, {atom_used, 733937}, {binary, 695168}, {code, 6891478}, {ets, 470280}]}, {system_stats, [{cpu_utilization_rate, 0}, {swap_total, 0}, {swap_used, 0}]}, {interesting_stats, []}, {cluster_compatibility_version, 1}, {version, [{os_mon, "2.2.5"}, {mnesia, "4.4.17"}, {kernel, "2.14.3"}, {sasl, "2.1.9.3"}, {ns_server, "1.7.2r-20-g6604356"}, {stdlib, "1.17.3"}]}, {system_arch, "x86_64-unknown-linux-gnu"}, {wall_clock,0}, {memory_data, {17946165248, 959442944, {<6524.7.0>, 513760}}}, {disk_data, [{"/", 8256952,15}, {"/dev", 8754936,1}, {"/run", 3505112,1}, {"/run/lock", 5120,0}, {"/run/shm", 8762776,0}, {"/mnt", 423135208, 1}]}, {meminfo, <<"MemTotal: 17525552 kB\nMemFree: 16583488 kB\nBuffers: 27972 kB\nCached: 600604 kB\nSwapCached: 0 kB\nActive: 362920 kB\nInactive: 313352 kB\nActive(anon): 47544 kB\nInactive(anon): 208 kB\nActive(file): 315376 kB\nInactive(file): 313144 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 5412 kB\nWriteback: 0 kB\nAnonPages: 47508 kB\nMapped: 7908 kB\nShmem: 324 kB\nSlab: 53908 kB\nSReclaimable: 47584 kB\nSUnreclaim: 6324 kB\nKernelStack: 648 kB\nPageTables: 1848 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762776 kB\nCommitted_AS: 126356 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17928192 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory, 17946165248}, {free_swap, 0}, {total_swap, 0}, {cached_memory, 615018496}, {buffered_memory, 28643328}, {free_memory, 16981491712}, {total_memory, 17946165248}]}, {statistics, [{wall_clock, {630,433}}, {context_switches, {20770,0}}, {garbage_collection, {1251, 2596016, 0}}, {io, {{input, 6326854}, {output, 1462748}}}, {reductions, {1345982, 1345982}}, {run_queue, 0}, {runtime, {330, 330}}]}]}] PROGRESS REPORT <6524.157.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.158.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.157.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.160.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.157.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.161.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.157.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.162.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== ns_log: logging menelaus_sup:1:Membase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.157.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.163.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<6524.164.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <6524.165.0> 2011-11-22 17:48:01 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <6524.163.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<6524.165.0>}, {name, {moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,ns_port_server,start_link, [moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] INFO REPORT <6524.167.0> 2011-11-22 17:48:01 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <6524.163.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<6524.167.0>}, {name, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so", "-p","11210","-E", "/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/opt/membase/var/lib/membase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {supervisor_cushion,start_link, [memcached,5000,ns_port_server,start_link, [memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so", "-p","11210","-E", "/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/opt/membase/var/lib/membase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.163.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,60000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.169.0>}, {name,ns_stats_event}, {mfargs,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.170.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.171.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.172.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.173.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-minute' PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.175.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.178.0>}, {name,ns_moxi_sup_work_queue}, {mfargs,{work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.126.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.179.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.54.0> 2011-11-22 17:48:01 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.126.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.7.0> 2011-11-22 17:48:01 =============================================================================== application ns_server started_at 'ns_1@127.0.0.1' INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-minute', [{name,'stats_archiver-@system-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,398506},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,6,<6524.177.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-minute', [{name,'stats_archiver-@system-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,398506},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,6,<6524.177.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-hour' INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-hour', [{name,'stats_archiver-@system-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,412208},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,7,<6524.186.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-hour', [{name,'stats_archiver-@system-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,412208},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,7,<6524.186.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-day' INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-day', [{name,'stats_archiver-@system-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,451634},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,8,<6524.193.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-day', [{name,'stats_archiver-@system-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,451634},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,8,<6524.193.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-week' INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-week', [{name,'stats_archiver-@system-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,463122},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9,<6524.200.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-week', [{name,'stats_archiver-@system-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,463122},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,9,<6524.200.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-month' INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-month', [{name,'stats_archiver-@system-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,501780},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,10,<6524.207.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-month', [{name,'stats_archiver-@system-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,501780},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,10,<6524.207.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-year' INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-year', [{name,'stats_archiver-@system-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,511107},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,11,<6524.214.0>}} INFO REPORT <6524.67.0> 2011-11-22 17:48:01 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-year', [{name,'stats_archiver-@system-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1321,984081,511107},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,11,<6524.214.0>}} PROGRESS REPORT <6524.223.0> 2011-11-22 17:48:04 =============================================================================== supervisor {local,inet_gethost_native_sup} started [{pid,<6524.224.0>},{mfa,{inet_gethost_native,init,[[]]}}] PROGRESS REPORT <6524.26.0> 2011-11-22 17:48:04 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.223.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.155.0> 2011-11-22 17:49:01 =============================================================================== ns_1@127.0.0.1:<6524.155.0>:ns_doctor:86: Current node statuses: [{'ns_1@127.0.0.1', [{last_heard,{1321,984136,360322}}, {active_buckets,[]}, {ready_buckets,[]}, {replication,[]}, {memory, [{total,23946024}, {processes,4910232}, {processes_used,4894792}, {system,19035792}, {atom,785401}, {atom_used,766564}, {binary,746376}, {code,7415109}, {ets,610232}]}, {system_stats, [{cpu_utilization_rate,0.25906735751295334}, {swap_total,0}, {swap_used,0}]}, {interesting_stats,[]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,55}, {memory_data,{17946165248,959442944,{<6524.7.0>,513760}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754936,1}, {"/run",3505112,1}, {"/run/lock",5120,0}, {"/run/shm",8762776,0}, {"/mnt",423135208,1}]}, {meminfo, <<"MemTotal: 17525552 kB\nMemFree: 16591000 kB\nBuffers: 28160 kB\nCached: 600836 kB\nSwapCached: 0 kB\nActive: 354772 kB\nInactive: 313548 kB\nActive(anon): 39460 kB\nInactive(anon): 208 kB\nActive(file): 315312 kB\nInactive(file): 313340 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 56 kB\nWriteback: 0 kB\nAnonPages: 39396 kB\nMapped: 8836 kB\nShmem: 324 kB\nSlab: 54144 kB\nSReclaimable: 47396 kB\nSUnreclaim: 6748 kB\nKernelStack: 896 kB\nPageTables: 1928 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762776 kB\nCommitted_AS: 235440 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17928192 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory,17946165248}, {free_swap,0}, {total_swap,0}, {cached_memory,615256064}, {buffered_memory,28835840}, {free_memory,16989184000}, {total_memory,17946165248}]}, {statistics, [{wall_clock,{45631,1}}, {context_switches,{26635,0}}, {garbage_collection,{2353,4344909,0}}, {io,{{input,6839903},{output,1573639}}}, {reductions,{1997128,173692}}, {run_queue,0}, {runtime,{480,40}}]}]}] INFO REPORT <3.54.0> 2011-11-30 18:47:51 =============================================================================== nonode@nohost:<3.54.0>:log_os_info:25: OS type: {unix,linux} Version: {3,0,0} Runtime info: [{otp_release,"R14B02"}, {erl_version,"5.8.3"}, {erl_version_long, "Erlang R14B02 (erts-5.8.3) [source] [64-bit] [smp:2:2] [rq:2] [async-threads:16] [hipe] [kernel-poll:false]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2011,11,30},{18,47,51}}}, {memory, [{total,15883864}, {processes,1716256}, {processes_used,1701312}, {system,14167608}, {atom,452097}, {atom_used,423209}, {binary,698384}, {code,3363956}, {ets,279424}]}, {loaded, [ns_info,log_os_info,misc,ns_log_mf_h, ns_server_cluster_sup,timer,io_lib_fread,ns_server, cpu_sup,memsup,disksup,os_mon,io_lib_pretty,io_lib_format, io_lib,io,sasl_report,release_handler,calendar,overload, alarm_handler,sasl_report_tty_h,sasl,ns_bootstrap, file_io_server,orddict,erl_eval,c,error_logger_tty_h, queue,kernel_config,user,user_sup,supervisor_bridge, standard_error,ram_file,file,beam_lib,unicode,binary,ets, gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,inet_parse,inet,inet_udp,os, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{os_mon,"CPO CXC 138 46","2.2.5"}, {kernel,"ERTS CXC 138 10","2.14.3"}, {sasl,"SASL CXC 138 11","2.1.9.3"}, {ns_server,"Membase server","1.7.2r-20-g6604356"}, {stdlib,"ERTS CXC 138 10","1.17.3"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,44}, {node,nonode@nohost}, {nodes,[]}, {registered, [rex,release_handler,inet_db,kernel_sup, ns_server_cluster_sup,code_server,global_name_server, overload,file_server_2,application_controller, alarm_handler,os_mon_sup,standard_error,init,cpu_sup, global_group,erl_prim_loader,sasl_sup,memsup,disksup, timer_server,standard_error_sup,user,sasl_safe_sup, error_logger,kernel_safe_sup]}, {cookie,nocookie}, {wordsize,8}, {wall_clock,0}] INFO REPORT <3.54.0> 2011-11-30 18:47:51 =============================================================================== nonode@nohost:<3.54.0>:log_os_info:27: Manifest: ["bucket_engine 1.7.0-0-g721dff0 Linux-x86_64", "ep-engine 1.7.1.1-39-g4bd26a3 Linux-x86_64", "geocouch couchbase_1.1_geo-0-gd09068e Linux-x86_64", "icu4c 2635939 Linux-x86_64","libconflate 1.7.0-0-gfee8f94 Linux-x86_64", "libmemcached 2cd40dc Linux-x86_64", "libvbucket 1.7.0-0-g4bd0aba Linux-x86_64", "manifest 1.7.0-30-gd83dca9 Linux-x86_64", "membase-cli 1.7.2-0-g1d9c77e Linux-x86_64", "membase-server 1.7.2r-20-g6604356 Linux-x86_64", "membasex 1.7.0-3-g30d5dba Linux-x86_64", "memcached membase-1.7.1-0-gf99c147 Linux-x86_64", "memcachetest 0.8.3-0-g88ae3b3 Linux-x86_64", "moxi 1.7.2-0-gd5076d9 Linux-x86_64", "ns_server 1.7.2-0-g4925ee0 Linux-x86_64", "otp OTP_R14B03-0-g4a5a758 Linux-x86_64", "portsigar 1.7.0-0-ga191e6c Linux-x86_64", "sigar sigar-1.6.4-406-ge1dcf32 Linux-x86_64", "spidermonkey a3c48c1 Linux-x86_64","tlm 1.7.1-0-g535dadc Linux-x86_64", "vbucketmigrator 1.7.0-0-g0fdc96c Linux-x86_64"] PROGRESS REPORT <3.54.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<3.56.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <3.57.0> 2011-11-30 18:47:51 =============================================================================== reading ip config from "/opt/membase/var/lib/membase/ip" ERROR REPORT <3.57.0> 2011-11-30 18:47:51 =============================================================================== Got error:einval. Ignoring bad address:[] INFO REPORT <3.57.0> 2011-11-30 18:47:51 =============================================================================== nonode@nohost:<3.57.0>:dist_manager:105: Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' PROGRESS REPORT <3.58.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,net_sup} started [{pid,<3.59.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <3.58.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,net_sup} started [{pid,<3.60.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <6524.58.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,net_sup} started [{pid,<6524.61.0>}, {name,net_kernel}, {mfargs,{net_kernel,start_link,[['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <6524.11.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,kernel_sup} started [{pid,<6524.58.0>}, {name,net_sup_dynamic}, {mfargs,{erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <6524.54.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.57.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.54.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.64.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.65.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,mb_mnesia_sup} started [{pid,<6524.66.0>}, {name,mb_mnesia_events}, {mfargs,{gen_event,start_link,[{local,mb_mnesia_events}]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <6524.72.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<6524.73.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.75.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.76.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.77.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.78.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] PROGRESS REPORT <6524.26.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.80.0>}, {name,dets_sup}, {mfargs,{dets_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <6524.26.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.81.0>}, {name,dets}, {mfargs,{dets_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <6524.26.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.85.0>}, {name,disk_log_sup}, {mfargs,{disk_log_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] PROGRESS REPORT <6524.26.0> 2011-11-30 18:47:51 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.86.0>}, {name,disk_log_server}, {mfargs,{disk_log_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.79.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.106.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.107.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.108.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.74.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<6524.109.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <6524.72.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<6524.74.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.7.0> 2011-11-30 18:47:52 =============================================================================== application mnesia started_at 'ns_1@127.0.0.1' INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:417: Using existing disk schema on ['ns_1@127.0.0.1']. INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of 'stats_archiver-@system-year' INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of 'stats_archiver-@system-month' INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of 'stats_archiver-@system-week' INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of 'stats_archiver-@system-day' INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of 'stats_archiver-@system-hour' INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of 'stats_archiver-@system-minute' INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of local_config INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:427: Have local copy of cluster INFO REPORT <6524.67.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:245: Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@127.0.0.1']}, {debug,verbose}, {directory,"/opt/membase/var/lib/membase/mnesia"}, {dump_log_load_regulation,false}, {dump_log_time_threshold,180000}, {dump_log_update_in_place,true}, {dump_log_write_threshold,1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup,false}, {fallback_error_function,{mnesia,lkill}}, {is_running,yes}, {local_tables,['stats_archiver-@system-minute','stats_archiver-@system-year', 'stats_archiver-@system-week','stats_archiver-@system-month', 'stats_archiver-@system-hour','stats_archiver-@system-day', local_config,cluster,schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{7,6}}, {running_db_nodes,['ns_1@127.0.0.1']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers,[<6524.73.0>,<6524.67.0>]}, {tables,['stats_archiver-@system-year','stats_archiver-@system-month', 'stats_archiver-@system-week','stats_archiver-@system-day', 'stats_archiver-@system-hour','stats_archiver-@system-minute', local_config,cluster,schema]}, {transaction_commits,2}, {transaction_failures,8}, {transaction_log_writes,0}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {send_compressed,0}, {version,"4.4.17"}] Peers: ['ns_1@127.0.0.1'] PROGRESS REPORT <6524.65.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mb_mnesia_sup} started [{pid,<6524.67.0>}, {name,mb_mnesia}, {mfargs,{mb_mnesia,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.54.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.65.0>}, {name,mb_mnesia_sup}, {mfargs,{mb_mnesia_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <6524.157.0> 2011-11-30 18:47:52 =============================================================================== loading static ns_config from "/opt/membase/etc/membase/config" PROGRESS REPORT <6524.157.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.158.0>}, {name,ns_config_events}, {mfargs,{gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.157.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.159.0>}, {name,ns_config}, {mfargs,{ns_config,start_link, ["/opt/membase/etc/membase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.157.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.161.0>}, {name,ns_config_remote}, {mfargs,{ns_config_replica,start_link,[{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.162.0> 2011-11-30 18:47:52 =============================================================================== isasl_sync init: ["/opt/membase/var/lib/membase/data/isasl.pw","_admin", "_admin"] INFO REPORT <6524.162.0> 2011-11-30 18:47:52 =============================================================================== isasl_sync init buckets: [] INFO REPORT <6524.162.0> 2011-11-30 18:47:52 =============================================================================== Writing isasl passwd file: "/opt/membase/var/lib/membase/data/isasl.pw" PROGRESS REPORT <6524.157.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.162.0>}, {name,ns_config_isasl_sync}, {mfargs,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.157.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_config_sup} started [{pid,<6524.164.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.54.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.157.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.166.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.167.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.168.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.169.0>}, {name,ns_node_disco_events}, {mfargs,{gen_event,start_link,[{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.170.0> 2011-11-30 18:47:52 =============================================================================== Initting ns_node_disco with [] INFO REPORT <6524.171.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco cookie_sync INFO REPORT <6524.171.0> 2011-11-30 18:47:52 =============================================================================== ns_log: logging ns_node_disco:2:Node 'ns_1@127.0.0.1' synchronized otp cookie sgzcoaesvptwzitb from cluster INFO REPORT <6524.171.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.171.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb PROGRESS REPORT <6524.168.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.170.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.168.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.173.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.168.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.174.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.175.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.175.0>:ns_config_rep:56: init pulling INFO REPORT <6524.175.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.175.0>:ns_config_rep:58: init pushing INFO REPORT <6524.175.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.175.0>:ns_config_rep:62: init reannouncing INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: auto_failover_cfg -> [{enabled,false},{timeout,30},{max_nodes,1},{count,0}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: buckets -> [{configs,[]}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small]}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: memory_quota -> 16090 INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: nodes_wanted -> ['ns_1@127.0.0.1'] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco_conf_events config on otp INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: otp -> [{cookie,sgzcoaesvptwzitb}] INFO REPORT <6524.177.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco cookie_sync INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: port_servers -> [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] INFO REPORT <6524.178.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco cookie_sync INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: replication -> [{enabled,true}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: rest -> [{port,8091}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: rest_creds -> ******** INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: {node,'ns_1@127.0.0.1',config_version} -> {1,7,2} INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: {node,'ns_1@127.0.0.1',membership} -> active INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: {node,'ns_1@127.0.0.1',memcached} -> [{port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/membase/var/lib/membase/data/ns_log"}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] INFO REPORT <6524.158.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.177.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.178.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.177.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.178.0> 2011-11-30 18:47:52 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: sgzcoaesvptwzitb INFO REPORT <6524.175.0> 2011-11-30 18:47:52 =============================================================================== Pushing config PROGRESS REPORT <6524.168.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<6524.175.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.168.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.180.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.175.0> 2011-11-30 18:47:52 =============================================================================== Pushing config done INFO REPORT <6524.182.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.182.0>:mb_master:97: I'm the only node, so I'm the master. INFO REPORT <6524.183.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.183.0>:misc:785: start_singleton(gen_fsm, ns_orchestrator, [], []): started as <6524.184.0> on 'ns_1@127.0.0.1' PROGRESS REPORT <6524.183.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<6524.184.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] INFO REPORT <6524.183.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.183.0>:misc:785: start_singleton(gen_server, ns_tick, [], []): started as <6524.185.0> on 'ns_1@127.0.0.1' PROGRESS REPORT <6524.183.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<6524.185.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <6524.186.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.186.0>:auto_failover:120: init auto_failover. INFO REPORT <6524.183.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.183.0>:misc:785: start_singleton(gen_server, auto_failover, [], []): started as <6524.186.0> on 'ns_1@127.0.0.1' PROGRESS REPORT <6524.183.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<6524.186.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.182.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.187.0>}, {name,buckets_events}, {mfargs,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.188.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<6524.189.0>}, {name,ns_mail}, {mfargs,{ns_mail,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.188.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<6524.190.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.188.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <6524.167.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.167.0>:ns_mail_log:45: ns_mail_log started up PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.191.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.193.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] ERROR REPORT <6524.191.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.191.0>:ns_heart:137: Failed to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} INFO REPORT <6524.193.0> 2011-11-30 18:47:52 =============================================================================== ns_1@127.0.0.1:<6524.193.0>:ns_doctor:82: Got initial status [{'ns_1@127.0.0.1', [{last_heard, {1322,678872, 469864}}, {active_buckets, []}, {ready_buckets, []}, {replication, []}, {memory, [{total, 22685120}, {processes, 4295040}, {processes_used, 4282472}, {system, 18390080}, {atom, 730537}, {atom_used, 700654}, {binary, 889632}, {code, 6751955}, {ets, 557264}]}, {system_stats, [{cpu_utilization_rate, 0}, {swap_total, 0}, {swap_used, 0}]}, {interesting_stats, []}, {cluster_compatibility_version, 1}, {version, [{os_mon, "2.2.5"}, {mnesia, "4.4.17"}, {kernel, "2.14.3"}, {sasl, "2.1.9.3"}, {ns_server, "1.7.2r-20-g6604356"}, {stdlib, "1.17.3"}]}, {system_arch, "x86_64-unknown-linux-gnu"}, {wall_clock,1}, {memory_data, {17946181632, 407089152, {<6524.7.0>, 426272}}}, {disk_data, [{"/", 8256952,15}, {"/dev", 8754940,1}, {"/run", 3505116,1}, {"/run/lock", 5120,0}, {"/run/shm", 8762784,0}, {"/mnt", 423135208, 1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17120336 kB\nBuffers: 98228 kB\nCached: 50488 kB\nSwapCached: 0 kB\nActive: 136372 kB\nInactive: 41396 kB\nActive(anon): 29076 kB\nInactive(anon): 140 kB\nActive(file): 107296 kB\nInactive(file): 41256 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 2228 kB\nWriteback: 0 kB\nAnonPages: 29128 kB\nMapped: 6272 kB\nShmem: 156 kB\nSlab: 17776 kB\nSReclaimable: 12212 kB\nSUnreclaim: 5564 kB\nKernelStack: 720 kB\nPageTables: 1452 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 101212 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory, 17946181632}, {free_swap, 0}, {total_swap, 0}, {cached_memory, 51699712}, {buffered_memory, 100585472}, {free_memory, 17531224064}, {total_memory, 17946181632}]}, {statistics, [{wall_clock, {1613, 1268}}, {context_switches, {8099,0}}, {garbage_collection, {1208, 2652333, 0}}, {io, {{input, 6510238}, {output, 1536283}}}, {reductions, {1352936, 1352936}}, {run_queue, 0}, {runtime, {320, 320}}]}]}] PROGRESS REPORT <6524.195.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.196.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.195.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.198.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.195.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.199.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <6524.195.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<6524.200.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== ns_log: logging menelaus_sup:1:Membase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.195.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.201.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<6524.202.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <6524.203.0> 2011-11-30 18:47:52 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <6524.201.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<6524.203.0>}, {name, {moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,ns_port_server,start_link, [moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] INFO REPORT <6524.205.0> 2011-11-30 18:47:52 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <6524.201.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<6524.205.0>}, {name, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so", "-p","11210","-E", "/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/opt/membase/var/lib/membase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {supervisor_cushion,start_link, [memcached,5000,ns_port_server,start_link, [memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so", "-p","11210","-E", "/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/opt/membase/var/lib/membase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.201.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,60000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.207.0>}, {name,ns_stats_event}, {mfargs,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.208.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.209.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.210.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.211.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.212.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.213.0>}, {name,ns_moxi_sup_work_queue}, {mfargs,{work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.165.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<6524.214.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.54.0> 2011-11-30 18:47:52 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<6524.165.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <6524.7.0> 2011-11-30 18:47:52 =============================================================================== application ns_server started_at 'ns_1@127.0.0.1' PROGRESS REPORT <6524.219.0> 2011-11-30 18:47:55 =============================================================================== supervisor {local,inet_gethost_native_sup} started [{pid,<6524.220.0>},{mfa,{inet_gethost_native,init,[[]]}}] PROGRESS REPORT <6524.26.0> 2011-11-30 18:47:55 =============================================================================== supervisor {local,kernel_safe_sup} started [{pid,<6524.219.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.193.0> 2011-11-30 18:48:52 =============================================================================== ns_1@127.0.0.1:<6524.193.0>:ns_doctor:86: Current node statuses: [{'ns_1@127.0.0.1', [{last_heard,{1322,678932,271947}}, {active_buckets,[]}, {ready_buckets,[]}, {replication,[]}, {memory, [{total,24546640}, {processes,5462376}, {processes_used,5446936}, {system,19084264}, {atom,779745}, {atom_used,759280}, {binary,916168}, {code,7280616}, {ets,632168}]}, {system_stats, [{cpu_utilization_rate,0.24630541871921183}, {swap_total,0}, {swap_used,0}]}, {interesting_stats,[]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,61}, {memory_data,{17946181632,424046592,{<6524.7.0>,601336}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17111592 kB\nBuffers: 98252 kB\nCached: 53796 kB\nSwapCached: 0 kB\nActive: 141944 kB\nInactive: 43528 kB\nActive(anon): 33448 kB\nInactive(anon): 140 kB\nActive(file): 108496 kB\nInactive(file): 43388 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 16 kB\nWriteback: 0 kB\nAnonPages: 33492 kB\nMapped: 7356 kB\nShmem: 156 kB\nSlab: 18372 kB\nSReclaimable: 12572 kB\nSUnreclaim: 5800 kB\nKernelStack: 856 kB\nPageTables: 1640 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 224452 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory,17946181632}, {free_swap,0}, {total_swap,0}, {cached_memory,55087104}, {buffered_memory,100610048}, {free_memory,17522270208}, {total_memory,17946181632}]}, {statistics, [{wall_clock,{46614,1}}, {context_switches,{13514,0}}, {garbage_collection,{2180,4374458,0}}, {io,{{input,7005512},{output,1624996}}}, {reductions,{2029549,206674}}, {run_queue,0}, {runtime,{450,30}}]}]}] INFO REPORT <6524.193.0> 2011-11-30 18:49:52 =============================================================================== ns_1@127.0.0.1:<6524.193.0>:ns_doctor:86: Current node statuses: [{'ns_1@127.0.0.1', [{last_heard,{1322,678992,272008}}, {active_buckets,[]}, {ready_buckets,[]}, {replication,[]}, {memory, [{total,25672032}, {processes,6307456}, {processes_used,6296928}, {system,19364576}, {atom,780553}, {atom_used,760742}, {binary,1114312}, {code,7314281}, {ets,664080}]}, {system_stats, [{cpu_utilization_rate,0.24937655860349128}, {swap_total,0}, {swap_used,0}]}, {interesting_stats,[]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,121}, {memory_data,{17946181632,426209280,{<6524.7.0>,601336}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17111476 kB\nBuffers: 98252 kB\nCached: 53876 kB\nSwapCached: 0 kB\nActive: 142172 kB\nInactive: 43552 kB\nActive(anon): 33608 kB\nInactive(anon): 140 kB\nActive(file): 108564 kB\nInactive(file): 43412 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 4 kB\nWriteback: 0 kB\nAnonPages: 33616 kB\nMapped: 7324 kB\nShmem: 156 kB\nSlab: 18380 kB\nSReclaimable: 12580 kB\nSUnreclaim: 5800 kB\nKernelStack: 840 kB\nPageTables: 1576 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 223736 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory,17946181632}, {free_swap,0}, {total_swap,0}, {cached_memory,55169024}, {buffered_memory,100610048}, {free_memory,17522151424}, {total_memory,17946181632}]}, {statistics, [{wall_clock,{106614,1}}, {context_switches,{19640,0}}, {garbage_collection,{3324,6486237,0}}, {io,{{input,7053305},{output,1714494}}}, {reductions,{2865954,211126}}, {run_queue,0}, {runtime,{600,50}}]}]}] INFO REPORT <6524.158.0> 2011-11-30 18:50:19 =============================================================================== config change: {node,'ns_1@127.0.0.1',memcached} -> [{dbdir,"/opt/membase/var/lib/membase/data"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] INFO REPORT <6524.158.0> 2011-11-30 18:50:19 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.175.0> 2011-11-30 18:50:19 =============================================================================== Pushing config INFO REPORT <6524.175.0> 2011-11-30 18:50:19 =============================================================================== Pushing config done INFO REPORT <6524.158.0> 2011-11-30 18:50:19 =============================================================================== config change: memory_quota -> 15000 INFO REPORT <6524.158.0> 2011-11-30 18:50:19 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.175.0> 2011-11-30 18:50:19 =============================================================================== Pushing config INFO REPORT <6524.175.0> 2011-11-30 18:50:19 =============================================================================== Pushing config done INFO REPORT <6524.456.0> 2011-11-30 18:50:37 =============================================================================== ns_log: logging menelaus_web:12:Created bucket "default" of type: membase INFO REPORT <6524.158.0> 2011-11-30 18:50:37 =============================================================================== config change: buckets -> [{configs,[{"default", [{sasl_password,[]}, {num_replicas,1}, {ram_quota,104857600}, {auth_type,sasl}, {type,membase}, {num_vbuckets,1024}, {servers,[]}]}]}] INFO REPORT <6524.162.0> 2011-11-30 18:50:37 =============================================================================== Writing isasl passwd file: "/opt/membase/var/lib/membase/data/isasl.pw" INFO REPORT <6524.158.0> 2011-11-30 18:50:37 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.175.0> 2011-11-30 18:50:37 =============================================================================== Pushing config INFO REPORT <6524.175.0> 2011-11-30 18:50:37 =============================================================================== Pushing config done INFO REPORT <6524.208.0> 2011-11-30 18:50:37 =============================================================================== ns_1@127.0.0.1:<6524.208.0>:ns_bucket_sup:75: Starting new child: {{per_bucket_sup, "default"}, {single_bucket_sup, start_link, ["default"]}, permanent, infinity, supervisor, [single_bucket_sup]} INFO REPORT <6524.594.0> 2011-11-30 18:50:37 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:308: Waiting for "default" on ['ns_1@127.0.0.1'] INFO REPORT <6524.158.0> 2011-11-30 18:50:37 =============================================================================== config change: buckets -> [{configs,[{"default", [{sasl_password,[]}, {num_replicas,1}, {ram_quota,104857600}, {auth_type,sasl}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@127.0.0.1']}]}]}] PROGRESS REPORT <6524.209.0> 2011-11-30 18:50:37 =============================================================================== supervisor {local,ns_bucket_sup} started [{pid,<6524.598.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <6524.158.0> 2011-11-30 18:50:37 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.175.0> 2011-11-30 18:50:37 =============================================================================== Pushing config INFO REPORT <6524.175.0> 2011-11-30 18:50:37 =============================================================================== Pushing config done INFO REPORT <6524.204.0> 2011-11-30 18:50:38 =============================================================================== moxi<0.204.0>: 2011-11-30 18:53:24: (agent_config.c.705) ERROR: bad JSON configuration from http://127.0.0.1:8091/pools/default/saslBucketsStreaming: Empty serverList ({ moxi<0.204.0>: "name": "default", moxi<0.204.0>: "nodeLocator": "vbucket", moxi<0.204.0>: "saslPassword": "", moxi<0.204.0>: "nodes": [], moxi<0.204.0>: "vBucketServerMap": { moxi<0.204.0>: "hashAlgorithm": "CRC", moxi<0.204.0>: "numReplicas": 1, moxi<0.204.0>: "serverList": [], moxi<0.204.0>: "vBucketMap": [] ERROR REPORT <6524.204.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.204.0>:ns_port_server:161: Dropped 26 log lines from moxi INFO REPORT <6524.594.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:308: Waiting for "default" on ['ns_1@127.0.0.1'] INFO REPORT <6524.600.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.600.0>:ns_memcached:504: Created bucket "default" with config string "ht_size=3079;ht_locks=5;db_shards=4;tap_noop_interval=20;max_txn_size=1000;max_size=104857600;initfile=/opt/membase/etc/membase/init.sql;tap_keepalive=300;dbname=/opt/membase/var/lib/membase/data/default-data/default;vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB;" PROGRESS REPORT <6524.599.0> 2011-11-30 18:50:38 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<6524.600.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] PROGRESS REPORT <6524.599.0> 2011-11-30 18:50:38 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<6524.606.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.599.0> 2011-11-30 18:50:38 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<6524.607.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.599.0> 2011-11-30 18:50:38 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<6524.608.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-default-minute' PROGRESS REPORT <6524.599.0> 2011-11-30 18:50:38 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<6524.609.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <6524.599.0> 2011-11-30 18:50:38 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<6524.612.0>}, {name,{failover_safeness_level,"default"}}, {mfargs,{failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-minute', [{name,'stats_archiver-default-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,629611},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,436,<6524.611.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-minute', [{name,'stats_archiver-default-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,629611},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,436,<6524.611.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-default-hour' INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-hour', [{name,'stats_archiver-default-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,645541},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,437,<6524.619.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-hour', [{name,'stats_archiver-default-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,645541},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,437,<6524.619.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-default-day' INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-day', [{name,'stats_archiver-default-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,657103},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,438,<6524.626.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-day', [{name,'stats_archiver-default-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,657103},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,438,<6524.626.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-default-week' INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-week', [{name,'stats_archiver-default-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,663042},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,439,<6524.633.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-week', [{name,'stats_archiver-default-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,663042},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,439,<6524.633.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-default-month' INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-month', [{name,'stats_archiver-default-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,668880},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,440,<6524.640.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-month', [{name,'stats_archiver-default-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,668880},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,440,<6524.640.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:109: Creating table 'stats_archiver-default-year' INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-year', [{name,'stats_archiver-default-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,674986},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,441,<6524.647.0>}} INFO REPORT <6524.67.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-default-year', [{name,'stats_archiver-default-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@127.0.0.1']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679038,674986},'ns_1@127.0.0.1'}}, {version,{{2,0},[]}}]}, {tid,441,<6524.647.0>}} ERROR REPORT <6524.191.0> 2011-11-30 18:50:38 =============================================================================== ns_1@127.0.0.1:<6524.191.0>:ns_heart:156: Failed to get stats for bucket: "default": {error,{exit,{badarg,[{erlang,hd,[[]]}, {stats_reader,'-do_handle_call/3-fun-0-',2}, {mnesia_tm,non_transaction,5}, {stats_reader,do_handle_call,3}, {stats_reader,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}}} INFO REPORT <6524.600.0> 2011-11-30 18:50:38 =============================================================================== ns_log: logging ns_memcached:1:Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. ERROR REPORT <6524.191.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.191.0>:ns_heart:156: Failed to get stats for bucket: "default": {error,{exit,{badarg,[{erlang,hd,[[]]}, {stats_reader,'-do_handle_call/3-fun-0-',2}, {mnesia_tm,non_transaction,5}, {stats_reader,do_handle_call,3}, {stats_reader,handle_call,3}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}}} INFO REPORT <6524.607.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.607.0>:stats_collector:83: Stats for bucket "default": accepting_conns 1 auth_cmds 0 auth_errors 0 bucket_active_conns 1 bucket_conns 1 bytes_read 96 bytes_written 24099 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 0 conn_yields 0 connection_structures 11 curr_connections 11 curr_items 0 curr_items_tot 0 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 0 ep_commit_time 0 ep_commit_time_total 0 ep_data_age 0 ep_data_age_highwat 0 ep_db_cleaner_status complete ep_db_strategy multiMTVBDB ep_dbinit 1 ep_dbname /opt/membase/var/lib/membase/data/default-data/default ep_dbshards 4 ep_diskqueue_drain 0 ep_diskqueue_fill 0 ep_diskqueue_items 0 ep_diskqueue_memory 0 ep_diskqueue_pending 0 ep_exp_pager_stime 3600 ep_expired 0 ep_flush_all false ep_flush_duration 0 ep_flush_duration_highwat 0 ep_flush_duration_total 0 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 0 ep_io_num_write 0 ep_io_read_bytes 0 ep_io_write_bytes 0 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_items_rm_from_checkpoints 0 ep_kv_size 0 ep_latency_arith_cmd 0 ep_latency_get_cmd 0 ep_latency_store_cmd 0 ep_max_data_size 104857600 ep_max_txn_size 1000 ep_mem_high_wat 78643200 ep_mem_low_wat 62914560 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_checkpoint_remover_runs0 ep_num_eject_failures 0 ep_num_eject_replicas 0 ep_num_expiry_pager_runs 0 ep_num_non_resident 0 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 0 ep_onlineupdate false ep_onlineupdate_revert_add 0 ep_onlineupdate_revert_delete 0 ep_onlineupdate_revert_update 0 ep_oom_errors 0 ep_overhead 640 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 0 ep_storage_age 0 ep_storage_age_highwat 0 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 300 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 0 ep_total_del_items 0 ep_total_enqueued 0 ep_total_new_items 0 ep_total_persisted 0 ep_uncommitted_items 0 ep_value_size 0 ep_vb_total 0 ep_vbucket_del 0 ep_vbucket_del_fail 0 ep_version 1.7.1.1_39_g4bd26a3 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 15688 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.11-stable limit_maxbytes 67108864 listen_disabled_num 0 mem_used 640 pid 758 pointer_size 64 rejected_conns 0 rusage_system 0.060003 rusage_user 0.320020 threads 4 time 1322679038 total_connections 11 uptime 167 vb_active_curr_items 0 vb_active_eject 0 vb_active_ht_memory 0 vb_active_itm_memory 0 vb_active_num 0 vb_active_num_non_resident 0 vb_active_ops_create 0 vb_active_ops_delete 0 vb_active_ops_reject 0 vb_active_ops_update 0 vb_active_perc_mem_resident 0 vb_active_queue_age 0 vb_active_queue_drain 0 vb_active_queue_fill 0 vb_active_queue_memory 0 vb_active_queue_pending 0 vb_active_queue_size 0 vb_dead_num 0 vb_pending_curr_items 0 vb_pending_eject 0 vb_pending_ht_memory 0 vb_pending_itm_memory 0 vb_pending_num 0 vb_pending_num_non_resident 0 vb_pending_ops_create 0 vb_pending_ops_delete 0 vb_pending_ops_reject 0 vb_pending_ops_update 0 vb_pending_perc_mem_resident 0 vb_pending_queue_age 0 vb_pending_queue_drain 0 vb_pending_queue_fill 0 vb_pending_queue_memory 0 vb_pending_queue_pending 0 vb_pending_queue_size 0 vb_replica_curr_items 0 vb_replica_eject 0 vb_replica_ht_memory 0 vb_replica_itm_memory 0 vb_replica_num 0 vb_replica_num_non_resident 0 vb_replica_ops_create 0 vb_replica_ops_delete 0 vb_replica_ops_reject 0 vb_replica_ops_update 0 vb_replica_perc_mem_resident 0 vb_replica_queue_age 0 vb_replica_queue_drain 0 vb_replica_queue_fill 0 vb_replica_queue_memory 0 vb_replica_queue_pending 0 vb_replica_queue_size 0 version 1.4.4_461_gf99c147 INFO REPORT <6524.158.0> 2011-11-30 18:50:39 =============================================================================== config change: settings -> [{stats,[{send_stats,true}]}] INFO REPORT <6524.158.0> 2011-11-30 18:50:39 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.175.0> 2011-11-30 18:50:39 =============================================================================== Pushing config INFO REPORT <6524.175.0> 2011-11-30 18:50:39 =============================================================================== Pushing config done INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 0 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 2 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 3 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 4 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 5 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 6 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 7 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 8 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 9 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 10 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 11 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 12 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 13 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 14 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 15 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 16 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 17 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 18 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 19 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 20 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 21 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 22 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 23 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 24 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 25 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 26 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 27 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 28 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 29 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 30 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 31 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 32 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 33 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.158.0> 2011-11-30 18:50:39 =============================================================================== config change: buckets -> [{configs,[{"default", [{sasl_password,[]}, {num_replicas,1}, {ram_quota,104857600}, {auth_type,sasl}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@127.0.0.1']}, {map,[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1'|...], [...]|...]}]}]}] INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 34 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 35 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 36 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 37 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 38 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 39 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 40 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 41 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 42 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 43 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 44 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.158.0> 2011-11-30 18:50:39 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 45 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 46 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.175.0> 2011-11-30 18:50:39 =============================================================================== Pushing config INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 47 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 48 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 49 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 50 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 51 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 52 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 53 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 54 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.175.0> 2011-11-30 18:50:39 =============================================================================== Pushing config done INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 55 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 56 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 57 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 58 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 59 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 60 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 61 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 62 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 63 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 64 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 65 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 66 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 67 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 68 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 69 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 70 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 71 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 72 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 73 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 74 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 75 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 76 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 77 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 78 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 79 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 80 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 81 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 82 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 83 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 84 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 85 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 86 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 87 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 88 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 89 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 90 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 91 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 92 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 93 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 94 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 95 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 96 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 97 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 98 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 99 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 100 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 101 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 102 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 103 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 104 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 105 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 106 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 107 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 108 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 109 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 110 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 111 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 112 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 113 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 114 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 115 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 116 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 117 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 118 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 119 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 120 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 121 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 122 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 123 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 124 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 125 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 126 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 127 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 128 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 129 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 130 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 131 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 132 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 133 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 134 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 135 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 136 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 137 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 138 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 139 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 140 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 141 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 142 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 143 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 144 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 145 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 146 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 147 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 148 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 149 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 150 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 151 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 152 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 153 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 154 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 155 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 156 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 157 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 158 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 159 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 160 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 161 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 162 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 163 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 164 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 165 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 166 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 167 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 168 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 169 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 170 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 171 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 172 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 173 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 174 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 175 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 176 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 177 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 178 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 179 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 180 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 181 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 182 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 183 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 184 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 185 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 186 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 187 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 188 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 189 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 190 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 191 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 192 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 193 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 194 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 195 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 196 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 197 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 198 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 199 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 200 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 201 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 202 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 203 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 204 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 205 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 206 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 207 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 208 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 209 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 210 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 211 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 212 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 213 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 214 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 215 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 216 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 217 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 218 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 219 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 220 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 221 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 222 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 223 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 224 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 225 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 226 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 227 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 228 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 229 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 230 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 231 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 232 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 233 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 234 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 235 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 236 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 237 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 238 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 239 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 240 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 241 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 242 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 243 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 244 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 245 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 246 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 247 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 248 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 249 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 250 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 251 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 252 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 253 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 254 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 255 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 256 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 257 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 258 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 259 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 260 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 261 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 262 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 263 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 264 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 265 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 266 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 267 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 268 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 269 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 270 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 271 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 272 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 273 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 274 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 275 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 276 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 277 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 278 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 279 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 280 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 281 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 282 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 283 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 284 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 285 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 286 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 287 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 288 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 289 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 290 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 291 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 292 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 293 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 294 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 295 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 296 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 297 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 298 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 299 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 300 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 301 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 302 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 303 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 304 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 305 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 306 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 307 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 308 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 309 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 310 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 311 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 312 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 313 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 314 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 315 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 316 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 317 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 318 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 319 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 320 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 321 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 322 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 323 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 324 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 325 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 326 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 327 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 328 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 329 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 330 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 331 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 332 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 333 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 334 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 335 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 336 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 337 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 338 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 339 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 340 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 341 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 342 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 343 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 344 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 345 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 346 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 347 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 348 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 349 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 350 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 351 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 352 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 353 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 354 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 355 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 356 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 357 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 358 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 359 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 360 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 361 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 362 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 363 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 364 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 365 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 366 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 367 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 368 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 369 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 370 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 371 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 372 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 373 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 374 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 375 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 376 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 377 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 378 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 379 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 380 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 381 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 382 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 383 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 384 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 385 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 386 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 387 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 388 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 389 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 390 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 391 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 392 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 393 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 394 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 395 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 396 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 397 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 398 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 399 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 400 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 401 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 402 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 403 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 404 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 405 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 406 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 407 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 408 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 409 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 410 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 411 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 412 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 413 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 414 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 415 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 416 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 417 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 418 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 419 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 420 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 421 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 422 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 423 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 424 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 425 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 426 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 427 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 428 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 429 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 430 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 431 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 432 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 433 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 434 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 435 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 436 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 437 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 438 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 439 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 440 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 441 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 442 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 443 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 444 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 445 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 446 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 447 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 448 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 449 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 450 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 451 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 452 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 453 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 454 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 455 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 456 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 457 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 458 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 459 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 460 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 461 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 462 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 463 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 464 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 465 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 466 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 467 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 468 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 469 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 470 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 471 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 472 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 473 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 474 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 475 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 476 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 477 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 478 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 479 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 480 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 481 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 482 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 483 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 484 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 485 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 486 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 487 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 488 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 489 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 490 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 491 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 492 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 493 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 494 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 495 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 496 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 497 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 498 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 499 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 500 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 501 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 502 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 503 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 504 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 505 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 506 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 507 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 508 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 509 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 510 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 511 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 512 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 513 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 514 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 515 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 516 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 517 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 518 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 519 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 520 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 521 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 522 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 523 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 524 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 525 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 526 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 527 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 528 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 529 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 530 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 531 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 532 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 533 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 534 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 535 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 536 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 537 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 538 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 539 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 540 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 541 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 542 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 543 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 544 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 545 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 546 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 547 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 548 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 549 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 550 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 551 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 552 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 553 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 554 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 555 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 556 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 557 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 558 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 559 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 560 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 561 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 562 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 563 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 564 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 565 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 566 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 567 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 568 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 569 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 570 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 571 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 572 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 573 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 574 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 575 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 576 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 577 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 578 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 579 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 580 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 581 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 582 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 583 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 584 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 585 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 586 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 587 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 588 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 589 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 590 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 591 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 592 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 593 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 594 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 595 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 596 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 597 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 598 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 599 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 600 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 601 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 602 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 603 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 604 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 605 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 606 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 607 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 608 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 609 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 610 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 611 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 612 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 613 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 614 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 615 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 616 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 617 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 618 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 619 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 620 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 621 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 622 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 623 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 624 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 625 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 626 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 627 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 628 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 629 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 630 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 631 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 632 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 633 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 634 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 635 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 636 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 637 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 638 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 639 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 640 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 641 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 642 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 643 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 644 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 645 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 646 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 647 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 648 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 649 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 650 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 651 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 652 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 653 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 654 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 655 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 656 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 657 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 658 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 659 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 660 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 661 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 662 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 663 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 664 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 665 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 666 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 667 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 668 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 669 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 670 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 671 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 672 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 673 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 674 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 675 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 676 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 677 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 678 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 679 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 680 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 681 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 682 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 683 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 684 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 685 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 686 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 687 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 688 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 689 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 690 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 691 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 692 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 693 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 694 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 695 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 696 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 697 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 698 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 699 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 700 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 701 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 702 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 703 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 704 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 705 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 706 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 707 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 708 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.206.0> 2011-11-30 18:50:39 =============================================================================== memcached<0.206.0>: Extension support isn't implemented in this version of bucket_engine INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 709 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 710 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 711 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 712 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 713 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 714 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 715 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 716 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 717 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 718 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 719 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 720 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 721 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 722 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 723 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 724 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 725 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 726 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 727 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 728 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 729 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 730 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 731 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 732 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 733 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 734 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 735 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 736 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 737 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 738 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 739 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 740 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 741 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 742 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 743 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 744 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 745 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 746 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 747 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 748 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 749 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 750 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 751 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 752 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 753 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 754 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 755 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 756 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 757 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 758 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 759 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 760 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 761 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 762 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 763 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 764 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 765 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 766 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 767 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 768 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 769 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 770 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 771 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 772 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 773 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 774 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 775 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 776 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 777 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 778 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 779 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 780 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 781 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 782 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 783 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 784 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 785 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 786 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 787 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 788 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 789 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 790 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 791 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 792 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 793 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 794 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 795 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 796 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 797 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 798 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 799 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 800 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 801 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 802 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 803 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 804 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 805 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 806 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 807 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 808 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 809 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 810 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 811 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 812 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 813 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 814 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 815 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 816 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 817 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 818 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 819 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 820 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 821 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 822 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 823 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 824 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 825 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 826 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 827 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 828 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 829 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 830 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 831 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 832 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 833 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 834 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 835 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 836 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 837 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 838 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 839 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 840 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 841 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 842 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 843 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 844 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 845 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 846 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 847 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 848 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 849 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 850 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 851 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 852 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 853 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 854 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 855 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 856 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 857 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 858 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 859 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 860 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 861 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 862 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 863 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 864 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 865 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 866 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 867 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 868 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 869 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 870 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 871 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 872 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 873 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 874 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 875 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 876 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 877 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 878 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 879 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 880 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 881 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 882 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 883 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 884 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 885 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 886 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 887 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 888 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 889 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 890 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 891 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 892 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 893 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 894 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 895 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 896 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 897 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 898 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 899 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 900 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 901 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 902 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 903 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 904 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 905 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 906 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 907 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 908 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 909 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 910 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 911 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 912 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 913 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 914 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 915 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 916 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 917 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 918 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 919 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 920 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 921 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 922 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 923 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 924 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 925 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 926 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 927 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 928 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 929 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 930 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 931 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 932 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 933 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 934 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 935 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 936 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 937 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 938 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 939 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 940 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 941 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 942 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 943 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 944 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 945 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 946 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 947 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 948 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 949 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 950 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 951 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 952 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 953 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 954 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 955 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 956 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 957 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 958 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 959 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 960 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 961 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 962 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 963 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 964 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 965 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 966 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 967 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 968 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 969 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 970 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 971 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 972 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 973 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 974 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 975 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 976 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 977 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 978 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 979 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 980 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 981 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 982 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 983 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 984 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 985 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 986 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 987 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 988 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 989 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 990 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 991 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 992 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 993 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 994 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 995 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 996 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 997 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 998 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 999 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1000 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1001 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1002 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1003 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1004 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1005 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1006 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1007 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1008 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1009 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1010 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1011 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1012 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1013 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1014 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1015 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1016 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1017 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1018 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1019 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1020 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1021 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1022 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.594.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.594.0>:ns_janitor:183: Setting vbucket 1023 in "default" on 'ns_1@127.0.0.1' from missing to active. INFO REPORT <6524.204.0> 2011-11-30 18:50:39 =============================================================================== moxi<0.204.0>: 2011-11-30 18:53:25: (agent_config.c.705) ERROR: bad JSON configuration from http://127.0.0.1:8091/pools/default/saslBucketsStreaming: Number of buckets must be a power of two > 0 and <= MAX_BUCKETS ({ moxi<0.204.0>: "name": "default", moxi<0.204.0>: "nodeLocator": "vbucket", moxi<0.204.0>: "saslPassword": "", moxi<0.204.0>: "nodes": [{ moxi<0.204.0>: "replication": 1, moxi<0.204.0>: "clusterMembership": "active", moxi<0.204.0>: "status": "warmup", moxi<0.204.0>: "hostname": "127.0.0.1:8091", moxi<0.204.0>: "clusterCompatibility": 1, ERROR REPORT <6524.204.0> 2011-11-30 18:50:39 =============================================================================== ns_1@127.0.0.1:<6524.204.0>:ns_port_server:161: Dropped 38 log lines from moxi INFO REPORT <6524.158.0> 2011-11-30 18:50:51 =============================================================================== unsupervising port: {moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} INFO REPORT <6524.197.0> 2011-11-30 18:50:51 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <6524.204.0> 2011-11-30 18:50:51 =============================================================================== moxi<0.204.0>: EOL on stdin. Exiting INFO REPORT <6524.158.0> 2011-11-30 18:50:51 =============================================================================== supervising port: {moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","test1234"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]} INFO REPORT <6524.692.0> 2011-11-30 18:50:51 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <6524.201.0> 2011-11-30 18:50:51 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<6524.692.0>}, {name, {moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","test1234"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,ns_port_server,start_link, [moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR","Administrator"}, {"MOXI_SASL_PLAIN_PWD","test1234"}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] INFO REPORT <6524.158.0> 2011-11-30 18:50:51 =============================================================================== config change: rest -> [{port,8091}] INFO REPORT <6524.158.0> 2011-11-30 18:50:51 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.175.0> 2011-11-30 18:50:51 =============================================================================== Pushing config INFO REPORT <6524.175.0> 2011-11-30 18:50:51 =============================================================================== Pushing config done INFO REPORT <6524.158.0> 2011-11-30 18:50:51 =============================================================================== config change: rest_creds -> ******** INFO REPORT <6524.158.0> 2011-11-30 18:50:51 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <6524.175.0> 2011-11-30 18:50:51 =============================================================================== Pushing config INFO REPORT <6524.175.0> 2011-11-30 18:50:51 =============================================================================== Pushing config done INFO REPORT <6524.193.0> 2011-11-30 18:50:52 =============================================================================== ns_1@127.0.0.1:<6524.193.0>:ns_doctor:86: Current node statuses: [{'ns_1@127.0.0.1', [{last_heard,{1322,679047,272594}}, {active_buckets,["default"]}, {ready_buckets,["default"]}, {replication,[{"default",0.0}]}, {memory, [{total,29066928}, {processes,8988296}, {processes_used,8979864}, {system,20078632}, {atom,822681}, {atom_used,817365}, {binary,1200128}, {code,7766853}, {ets,793088}]}, {system_stats, [{cpu_utilization_rate,0.2604166666666667}, {swap_total,0}, {swap_used,0}]}, {interesting_stats, [{curr_items,0},{curr_items_tot,0},{vb_replica_curr_items,0}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,176}, {memory_data,{17946181632,426209280,{<6524.7.0>,601336}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17107968 kB\nBuffers: 98252 kB\nCached: 55472 kB\nSwapCached: 0 kB\nActive: 143720 kB\nInactive: 45080 kB\nActive(anon): 35092 kB\nInactive(anon): 140 kB\nActive(file): 108628 kB\nInactive(file): 44940 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 68 kB\nWriteback: 0 kB\nAnonPages: 35124 kB\nMapped: 7352 kB\nShmem: 156 kB\nSlab: 18568 kB\nSReclaimable: 12784 kB\nSUnreclaim: 5784 kB\nKernelStack: 840 kB\nPageTables: 1576 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 224472 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory,17946181632}, {free_swap,0}, {total_swap,0}, {cached_memory,56803328}, {buffered_memory,100610048}, {free_memory,17518559232}, {total_memory,17946181632}]}, {statistics, [{wall_clock,{166614,1}}, {context_switches,{27931,0}}, {garbage_collection,{4818,9943325,0}}, {io,{{input,8363692},{output,3086289}}}, {reductions,{4230486,226483}}, {run_queue,0}, {runtime,{860,60}}]}]}] INFO REPORT <6524.693.0> 2011-11-30 18:50:52 =============================================================================== moxi<0.693.0>: 2011-11-30 18:50:51: (cproxy_config.c.317) env: MOXI_SASL_PLAIN_USR (13) moxi<0.693.0>: 2011-11-30 18:50:51: (cproxy_config.c.326) env: MOXI_SASL_PLAIN_PWD (8) INFO REPORT <6524.193.0> 2011-11-30 18:51:52 =============================================================================== ns_1@127.0.0.1:<6524.193.0>:ns_doctor:86: Current node statuses: [{'ns_1@127.0.0.1', [{last_heard,{1322,679107,272284}}, {active_buckets,["default"]}, {ready_buckets,["default"]}, {replication,[{"default",0.0}]}, {memory, [{total,32051672}, {processes,11352392}, {processes_used,11329008}, {system,20699280}, {atom,830761}, {atom_used,829980}, {binary,1258240}, {code,7912495}, {ets,1196800}]}, {system_stats, [{cpu_utilization_rate,0.26246719160104987}, {swap_total,0}, {swap_used,0}]}, {interesting_stats, [{curr_items,0},{curr_items_tot,0},{vb_replica_curr_items,0}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,236}, {memory_data,{17946181632,502136832,{<6524.197.0>,1343280}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17032664 kB\nBuffers: 98268 kB\nCached: 60212 kB\nSwapCached: 0 kB\nActive: 215412 kB\nInactive: 48164 kB\nActive(anon): 105132 kB\nInactive(anon): 140 kB\nActive(file): 110280 kB\nInactive(file): 48024 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 140 kB\nWriteback: 0 kB\nAnonPages: 105120 kB\nMapped: 8908 kB\nShmem: 156 kB\nSlab: 18796 kB\nSReclaimable: 12944 kB\nSUnreclaim: 5852 kB\nKernelStack: 872 kB\nPageTables: 1752 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 328200 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory,17946181632}, {free_swap,0}, {total_swap,0}, {cached_memory,61657088}, {buffered_memory,100626432}, {free_memory,17441447936}, {total_memory,17946181632}]}, {statistics, [{wall_clock,{226614,0}}, {context_switches,{54323,0}}, {garbage_collection,{9405,30317042,0}}, {io,{{input,9244335},{output,4792721}}}, {reductions,{17004394,3398547}}, {run_queue,0}, {runtime,{1820,200}}]}]}] INFO REPORT <6524.607.0> 2011-11-30 18:52:19 =============================================================================== ns_1@127.0.0.1:<6524.607.0>:stats_collector:83: Stats for bucket "default": accepting_conns 1 auth_cmds 0 auth_errors 0 bucket_active_conns 1 bucket_conns 1 bytes_read 35915 bytes_written 1309591 cas_badval 0 cas_hits 0 cas_misses 0 cmd_flush 0 cmd_get 0 cmd_set 0 conn_yields 0 connection_structures 11 curr_connections 11 curr_items 0 curr_items_tot 0 daemon_connections 10 decr_hits 0 decr_misses 0 delete_hits 0 delete_misses 0 ep_bg_fetched 0 ep_commit_num 0 ep_commit_time 0 ep_commit_time_total 0 ep_data_age 0 ep_data_age_highwat 0 ep_db_cleaner_status complete ep_db_strategy multiMTVBDB ep_dbinit 1 ep_dbname /opt/membase/var/lib/membase/data/default-data/default ep_dbshards 4 ep_diskqueue_drain 0 ep_diskqueue_fill 0 ep_diskqueue_items 0 ep_diskqueue_memory 0 ep_diskqueue_pending 0 ep_exp_pager_stime 3600 ep_expired 0 ep_flush_all false ep_flush_duration 0 ep_flush_duration_highwat 0 ep_flush_duration_total 0 ep_flush_preempts 0 ep_flusher_state running ep_flusher_todo 0 ep_io_num_read 0 ep_io_num_write 0 ep_io_read_bytes 0 ep_io_write_bytes 0 ep_item_begin_failed 0 ep_item_commit_failed 0 ep_item_flush_expired 0 ep_item_flush_failed 0 ep_items_rm_from_checkpoints 0 ep_kv_size 32768 ep_latency_arith_cmd 0 ep_latency_get_cmd 0 ep_latency_store_cmd 0 ep_max_data_size 104857600 ep_max_txn_size 1000 ep_mem_high_wat 78643200 ep_mem_low_wat 62914560 ep_min_data_age 0 ep_num_active_non_resident 0 ep_num_checkpoint_remover_runs20 ep_num_eject_failures 0 ep_num_eject_replicas 0 ep_num_expiry_pager_runs 0 ep_num_non_resident 0 ep_num_not_my_vbuckets 0 ep_num_pager_runs 0 ep_num_value_ejects 0 ep_onlineupdate false ep_onlineupdate_revert_add 0 ep_onlineupdate_revert_delete 0 ep_onlineupdate_revert_update 0 ep_oom_errors 0 ep_overhead 26935936 ep_pending_ops 0 ep_pending_ops_max 0 ep_pending_ops_max_duration 0 ep_pending_ops_total 0 ep_queue_age_cap 900 ep_queue_size 0 ep_storage_age 0 ep_storage_age_highwat 0 ep_storage_type featured ep_store_max_concurrency 10 ep_store_max_readers 9 ep_store_max_readwrite 1 ep_tap_bg_fetch_requeued 0 ep_tap_bg_fetched 0 ep_tap_keepalive 300 ep_tmp_oom_errors 0 ep_too_old 0 ep_too_young 0 ep_total_cache_size 0 ep_total_del_items 0 ep_total_enqueued 0 ep_total_new_items 0 ep_total_persisted 0 ep_uncommitted_items 0 ep_value_size 32768 ep_vb_total 1024 ep_vbucket_del 0 ep_vbucket_del_fail 0 ep_version 1.7.1.1_39_g4bd26a3 ep_warmed_up 0 ep_warmup true ep_warmup_dups 0 ep_warmup_oom 0 ep_warmup_thread complete ep_warmup_time 15688 get_hits 0 get_misses 0 incr_hits 0 incr_misses 0 libevent 2.0.11-stable limit_maxbytes 67108864 listen_disabled_num 0 mem_used 26968704 pid 758 pointer_size 64 rejected_conns 0 rusage_system 0.212013 rusage_user 0.568035 threads 4 time 1322679138 total_connections 11 uptime 267 vb_active_curr_items 0 vb_active_eject 0 vb_active_ht_memory 25673728 vb_active_itm_memory 0 vb_active_num 1024 vb_active_num_non_resident 0 vb_active_ops_create 0 vb_active_ops_delete 0 vb_active_ops_reject 0 vb_active_ops_update 0 vb_active_perc_mem_resident 0 vb_active_queue_age 0 vb_active_queue_drain 0 vb_active_queue_fill 0 vb_active_queue_memory 0 vb_active_queue_pending 0 vb_active_queue_size 0 vb_dead_num 0 vb_pending_curr_items 0 vb_pending_eject 0 vb_pending_ht_memory 0 vb_pending_itm_memory 0 vb_pending_num 0 vb_pending_num_non_resident 0 vb_pending_ops_create 0 vb_pending_ops_delete 0 vb_pending_ops_reject 0 vb_pending_ops_update 0 vb_pending_perc_mem_resident 0 vb_pending_queue_age 0 vb_pending_queue_drain 0 vb_pending_queue_fill 0 vb_pending_queue_memory 0 vb_pending_queue_pending 0 vb_pending_queue_size 0 vb_replica_curr_items 0 vb_replica_eject 0 vb_replica_ht_memory 0 vb_replica_itm_memory 0 vb_replica_num 0 vb_replica_num_non_resident 0 vb_replica_ops_create 0 vb_replica_ops_delete 0 vb_replica_ops_reject 0 vb_replica_ops_update 0 vb_replica_perc_mem_resident 0 vb_replica_queue_age 0 vb_replica_queue_drain 0 vb_replica_queue_fill 0 vb_replica_queue_memory 0 vb_replica_queue_pending 0 vb_replica_queue_size 0 version 1.4.4_461_gf99c147 INFO REPORT <6524.193.0> 2011-11-30 18:52:52 =============================================================================== ns_1@127.0.0.1:<6524.193.0>:ns_doctor:86: Current node statuses: [{'ns_1@127.0.0.1', [{last_heard,{1322,679167,272448}}, {active_buckets,["default"]}, {ready_buckets,["default"]}, {replication,[{"default",0.0}]}, {memory, [{total,33800904}, {processes,13054520}, {processes_used,13030184}, {system,20746384}, {atom,830761}, {atom_used,829980}, {binary,1151696}, {code,7912495}, {ets,1349728}]}, {system_stats, [{cpu_utilization_rate,0.25839793281653745}, {swap_total,0}, {swap_used,0}]}, {interesting_stats, [{curr_items,0},{curr_items_tot,0},{vb_replica_curr_items,0}]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,296}, {memory_data,{17946181632,505495552,{<6524.545.0>,1943480}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17029440 kB\nBuffers: 98268 kB\nCached: 60984 kB\nSwapCached: 0 kB\nActive: 217572 kB\nInactive: 48820 kB\nActive(anon): 107152 kB\nInactive(anon): 140 kB\nActive(file): 110420 kB\nInactive(file): 48680 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 48 kB\nWriteback: 0 kB\nAnonPages: 107136 kB\nMapped: 8908 kB\nShmem: 156 kB\nSlab: 18812 kB\nSReclaimable: 12960 kB\nSUnreclaim: 5852 kB\nKernelStack: 872 kB\nPageTables: 1752 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 331792 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory,17946181632}, {free_swap,0}, {total_swap,0}, {cached_memory,62447616}, {buffered_memory,100626432}, {free_memory,17438146560}, {total_memory,17946181632}]}, {statistics, [{wall_clock,{286614,0}}, {context_switches,{74603,0}}, {garbage_collection,{12767,49567666,0}}, {io,{{input,9385681},{output,5643297}}}, {reductions,{29371251,3406106}}, {run_queue,0}, {runtime,{2560,200}}]}]}] INFO REPORT <6524.64.0> 2011-11-30 18:53:25 =============================================================================== ns_1@127.0.0.1:<6524.64.0>:ns_cluster:90: handling add_node("10.13.41.47", 8091, ..) INFO REPORT <6524.64.0> 2011-11-30 18:53:28 =============================================================================== ns_1@127.0.0.1:<6524.64.0>:ns_cluster:253: Decided to change address to "10.13.41.69" INFO REPORT <3.170.0> 2011-11-30 18:53:28 =============================================================================== ns_log: logging ns_node_disco:5:Node nonode@nohost saw that node 'ns_1@127.0.0.1' went down. INFO REPORT <3.57.0> 2011-11-30 18:53:28 =============================================================================== Adjusted IP to "10.13.41.69" INFO REPORT <3.57.0> 2011-11-30 18:53:28 =============================================================================== nonode@nohost:<3.57.0>:dist_manager:105: Attempting to bring up net_kernel with name 'ns_1@10.13.41.69' INFO REPORT <3.169.0> 2011-11-30 18:53:28 =============================================================================== ns_node_disco_log: nodes changed: [] PROGRESS REPORT <3.1336.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,net_sup} started [{pid,<3.1337.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <3.1336.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,net_sup} started [{pid,<3.1338.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <0.1336.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,net_sup} started [{pid,<0.1339.0>}, {name,net_kernel}, {mfargs,{net_kernel,start_link,[['ns_1@10.13.41.69',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] PROGRESS REPORT <0.11.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,kernel_sup} started [{pid,<0.1336.0>}, {name,net_sup_dynamic}, {mfargs,{erl_distribution,start_link, [['ns_1@10.13.41.69',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] INFO REPORT <0.170.0> 2011-11-30 18:53:28 =============================================================================== ns_log: logging ns_node_disco:4:Node 'ns_1@10.13.41.69' saw that node 'ns_1@10.13.41.69' came up. INFO REPORT <0.57.0> 2011-11-30 18:53:28 =============================================================================== Re-setting cookie {sgzcoaesvptwzitb,'ns_1@10.13.41.69'} INFO REPORT <0.57.0> 2011-11-30 18:53:28 =============================================================================== saving ip config to "/opt/membase/var/lib/membase/ip" INFO REPORT <0.57.0> 2011-11-30 18:53:28 =============================================================================== save_address_config: ok INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:132: Renaming node from 'ns_1@127.0.0.1' to 'ns_1@10.13.41.69'. INFO REPORT <0.7.0> 2011-11-30 18:53:28 =============================================================================== application mnesia exited stopped type temporary INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:232: Found backup. Restoring Mnesia database. ERROR REPORT <0.608.0> 2011-11-30 18:53:28 =============================================================================== ** Generic server 'stats_archiver-default' terminating ** Last message in was {cascade,minute,hour,4} ** When Server state == {state,"default"} ** Reason for termination == ** {{badmatch,{aborted,{node_not_running,'ns_1@10.13.41.69'}}}, [{stats_archiver,cascade,4}, {stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} CRASH REPORT <0.608.0> 2011-11-30 18:53:28 =============================================================================== Crashing process initial_call {stats_archiver,init,['Argument__1']} pid <0.608.0> registered_name 'stats_archiver-default' error_info {exit,{{badmatch,{aborted,{node_not_running,'ns_1@10.13.41.69'}}}, [{stats_archiver,cascade,4}, {stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, [{gen_server,terminate,6},{proc_lib,init_p_do_apply,3}]} ancestors ['single_bucket_sup-default',<0.598.0>] messages [] links [<0.207.0>,<0.599.0>,<0.53.0>] dictionary [] trap_exit false status running heap_size 6765 stack_size 24 reductions 954312 SUPERVISOR REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== Reporting supervisor {local,'single_bucket_sup-default'} Child process errorContext child_terminated reason {{badmatch,{aborted,{node_not_running,'ns_1@10.13.41.69'}}}, [{stats_archiver,cascade,4}, {stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} pid <0.608.0> name {stats_archiver,"default"} mfargs {stats_archiver,start_link,["default"]} restart_type permanent shutdown 1000 child_type worker PROGRESS REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<0.1356.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<0.1357.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<0.1358.0>}, {name,{failover_safeness_level,"default"}}, {mfargs,{failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1362.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<0.1363.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1365.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1366.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1367.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1368.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] INFO REPORT <0.182.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.182.0>:mb_master:160: Node changed name from 'ns_1@127.0.0.1' to 'ns_1@10.13.41.69'. Updating state. PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1369.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1440.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] ERROR REPORT <0.211.0> 2011-11-30 18:53:28 =============================================================================== ** Generic server 'stats_archiver-@system' terminating ** Last message in was {stats,"@system", {stat_entry,1322679208266, [{cpu_idle_ms,3870}, {cpu_local_ms,3980}, {cpu_utilization_rate,2.763819095477387}, {mem_actual_free,17601146880}, {mem_actual_used,345034752}, {mem_free,17438400512}, {mem_total,17946181632}, {mem_used,507781120}, {swap_total,0}, {swap_used,0}]}} ** When Server state == {state,"@system"} ** Reason for termination == ** {{badmatch,{aborted,{no_exists,'stats_archiver-@system-minute'}}}, [{stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1441.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] CRASH REPORT <0.211.0> 2011-11-30 18:53:28 =============================================================================== Crashing process initial_call {stats_archiver,init,['Argument__1']} pid <0.211.0> registered_name 'stats_archiver-@system' error_info {exit, {{badmatch,{aborted,{no_exists,'stats_archiver-@system-minute'}}}, [{stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, [{gen_server,terminate,6},{proc_lib,init_p_do_apply,3}]} ancestors [ns_server_sup,ns_server_cluster_sup,<0.51.0>] messages [{stats,"default", {stat_entry,1322679208266, [{bytes_read,86}, {bytes_written,8129}, {cas_badval,0}, {cas_hits,0}, {cas_misses,0}, {cmd_get,0}, {cmd_set,0}, {curr_connections,11}, {curr_items,0}, {curr_items_tot,0}, {decr_hits,0}, {decr_misses,0}, {delete_hits,0}, {delete_misses,0}, {disk_write_queue,0}, {ep_bg_fetched,0}, {ep_diskqueue_drain,0}, {ep_diskqueue_fill,0}, {ep_diskqueue_items,0}, {ep_flusher_todo,0}, {ep_ht_memory,25673728}, {ep_kv_size,32768}, {ep_max_data_size,104857600}, {ep_num_non_resident,0}, {ep_num_value_ejects,0}, {ep_num_value_ejects,0}, {ep_oom_errors,0}, {ep_ops_create,0}, {ep_ops_update,0}, {ep_overhead,26935936}, {ep_queue_size,0}, {ep_tap_rebalance_count,0}, {ep_tap_rebalance_qlen,0}, {ep_tap_rebalance_queue_backfillremaining,0}, {ep_tap_rebalance_queue_backoff,0}, {ep_tap_rebalance_queue_drain,0}, {ep_tap_rebalance_queue_fill,0}, {ep_tap_rebalance_queue_itemondisk,0}, {ep_tap_rebalance_total_backlog_size,0}, {ep_tap_replica_count,0}, {ep_tap_replica_qlen,0}, {ep_tap_replica_queue_backfillremaining,0}, {ep_tap_replica_queue_backoff,0}, {ep_tap_replica_queue_drain,0}, {ep_tap_replica_queue_fill,0}, {ep_tap_replica_queue_itemondisk,0}, {ep_tap_replica_total_backlog_size,0}, {ep_tap_total_count,0}, {ep_tap_total_qlen,0}, {ep_tap_total_queue_backfillremaining,0}, {ep_tap_total_queue_backoff,0}, {ep_tap_total_queue_drain,0}, {ep_tap_total_queue_fill,0}, {ep_tap_total_queue_itemondisk,0}, {ep_tap_total_total_backlog_size,0}, {ep_tap_user_count,0}, {ep_tap_user_qlen,0}, {ep_tap_user_queue_backfillremaining,0}, {ep_tap_user_queue_backoff,0}, {ep_tap_user_queue_drain,0}, {ep_tap_user_queue_fill,0}, {ep_tap_user_queue_itemondisk,0}, {ep_tap_user_total_backlog_size,0}, {ep_tmp_oom_errors,0}, {ep_vb_total,1024}, {evictions,0}, {get_hits,0}, {get_misses,0}, {incr_hits,0}, {incr_misses,0}, {mem_used,26968704}, {misses,0}, {ops,0}, {vb_active_eject,0}, {vb_active_ht_memory,25673728}, {vb_active_itm_memory,0}, {vb_active_num,1024}, {vb_active_num_non_resident,0}, {vb_active_ops_create,0}, {vb_active_ops_update,0}, {vb_active_queue_age,0}, {vb_active_queue_drain,0}, {vb_active_queue_fill,0}, {vb_active_queue_size,0}, {vb_pending_curr_items,0}, {vb_pending_eject,0}, {vb_pending_ht_memory,0}, {vb_pending_itm_memory,0}, {vb_pending_num,0}, {vb_pending_num_non_resident,0}, {vb_pending_ops_create,0}, {vb_pending_ops_update,0}, {vb_pending_queue_age,0}, {vb_pending_queue_drain,0}, {vb_pending_queue_fill,0}, {vb_pending_queue_size,0}, {vb_replica_curr_items,0}, {vb_replica_eject,0}, {vb_replica_ht_memory,0}, {vb_replica_itm_memory,0}, {vb_replica_num,0}, {vb_replica_num_non_resident,0}, {vb_replica_ops_create,0}, {vb_replica_ops_update,0}, {vb_replica_queue_age,0}, {vb_replica_queue_drain,0}, {vb_replica_queue_fill,0}, {vb_replica_queue_size,0}, {vb_total_queue_age,0}]}}] links [<0.165.0>,<0.207.0>,<0.53.0>] dictionary [] trap_exit false status running heap_size 987 stack_size 24 reductions 1652374 SUPERVISOR REPORT <0.165.0> 2011-11-30 18:53:28 =============================================================================== Reporting supervisor {local,ns_server_sup} Child process errorContext child_terminated reason {{badmatch,{aborted,{no_exists,'stats_archiver-@system-minute'}}}, [{stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} pid <0.211.0> name {stats_archiver,"@system"} mfargs {stats_archiver,start_link,["@system"]} restart_type permanent shutdown 1000 child_type worker PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1442.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.165.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1443.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1364.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1444.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1362.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<0.1364.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.7.0> 2011-11-30 18:53:28 =============================================================================== application mnesia started_at 'ns_1@10.13.41.69' INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:245: Current config: [{access_module,mnesia}, {auto_repair,true}, {backup_module,mnesia_backup}, {checkpoints,[]}, {db_nodes,['ns_1@10.13.41.69']}, {debug,verbose}, {directory,"/opt/membase/var/lib/membase/mnesia"}, {dump_log_load_regulation,false}, {dump_log_time_threshold,180000}, {dump_log_update_in_place,true}, {dump_log_write_threshold,1000}, {embedded_mnemosyne,false}, {event_module,mnesia_event}, {extra_db_nodes,[]}, {fallback_activated,false}, {held_locks,[]}, {ignore_fallback_at_startup,false}, {fallback_error_function,{mnesia,lkill}}, {is_running,yes}, {local_tables,['stats_archiver-default-week','stats_archiver-@system-week', 'stats_archiver-default-day','stats_archiver-@system-day', 'stats_archiver-default-month','stats_archiver-@system-month', local_config,'stats_archiver-@system-minute', 'stats_archiver-default-minute',cluster, 'stats_archiver-default-year','stats_archiver-@system-year', 'stats_archiver-default-hour','stats_archiver-@system-hour', schema]}, {lock_queue,[]}, {log_version,"4.3"}, {master_node_tables,[]}, {max_wait_for_decision,10000}, {protocol_version,{7,6}}, {running_db_nodes,['ns_1@10.13.41.69']}, {schema_location,opt_disc}, {schema_version,{3,0}}, {subscribers,[<0.1363.0>,<0.67.0>]}, {tables,['stats_archiver-default-week','stats_archiver-@system-week', 'stats_archiver-default-day','stats_archiver-@system-day', 'stats_archiver-default-month','stats_archiver-@system-month', local_config,'stats_archiver-@system-minute', 'stats_archiver-default-minute',cluster, 'stats_archiver-default-year','stats_archiver-@system-year', 'stats_archiver-default-hour','stats_archiver-@system-hour',schema]}, {transaction_commits,2}, {transaction_failures,1}, {transaction_log_writes,0}, {transaction_restarts,0}, {transactions,[]}, {use_dir,true}, {core_dir,false}, {no_table_loaders,2}, {dc_dump_limit,4}, {send_compressed,0}, {version,"4.4.17"}] Peers: ['ns_1@10.13.41.69'] INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: {mnesia_checkpoint,{{1322,679208,130485},'ns_1@127.0.0.1'}} starting: <0.1330.0> INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: {mnesia_checkpoint,{{1322,679208,130485},'ns_1@127.0.0.1'}} terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_controller terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_tm terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_recover terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_locker terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:169: Saw Mnesia go down on 'ns_1@10.13.41.69' ERROR REPORT <0.6.0> 2011-11-30 18:53:28 =============================================================================== Truncated log event: {info_msg,<0.50.0>, {<0.159.0>,"renaming node conf ~p -> ~p:~n ~p ->~n ~p~n", [buckets,buckets, [{configs,[{"default", [{sasl_password,[]}, {num_replicas,1}, {ram_quota,104857600}, {auth_type,sasl}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@127.0.0.1']}, {map,[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1'|...], [...]|...]}]}]}], [{configs,[{"default", [{sasl_password,[]}, {num_replicas,1}, {ram_quota,104857600}, {auth_type,sasl}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.13.41.69']}, {map,[['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69'|...], [...]|...]}]}]}]]}} INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf {node,'ns_1@127.0.0.1',memcached} -> {node, 'ns_1@10.13.41.69', memcached}: [{dbdir,"/opt/membase/var/lib/membase/data"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] -> [{dbdir,"/opt/membase/var/lib/membase/data"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] ERROR REPORT <0.1356.0> 2011-11-30 18:53:28 =============================================================================== ** Generic server 'stats_archiver-default' terminating ** Last message in was {stats,"default", {stat_entry,1322679208266, [{bytes_read,86}, {bytes_written,8129}, {cas_badval,0}, {cas_hits,0}, {cas_misses,0}, {cmd_get,0}, {cmd_set,0}, {curr_connections,11}, {curr_items,0}, {curr_items_tot,0}, {decr_hits,0}, {decr_misses,0}, {delete_hits,0}, {delete_misses,0}, {disk_write_queue,0}, {ep_bg_fetched,0}, {ep_diskqueue_drain,0}, {ep_diskqueue_fill,0}, {ep_diskqueue_items,0}, {ep_flusher_todo,0}, {ep_ht_memory,25673728}, {ep_kv_size,32768}, {ep_max_data_size,104857600}, {ep_num_non_resident,0}, {ep_num_value_ejects,0}, {ep_num_value_ejects,0}, {ep_oom_errors,0}, {ep_ops_create,0}, {ep_ops_update,0}, {ep_overhead,26935936}, {ep_queue_size,0}, {ep_tap_rebalance_count,0}, {ep_tap_rebalance_qlen,0}, {ep_tap_rebalance_queue_backfillremaining,0}, {ep_tap_rebalance_queue_backoff,0}, {ep_tap_rebalance_queue_drain,0}, {ep_tap_rebalance_queue_fill,0}, {ep_tap_rebalance_queue_itemondisk,0}, {ep_tap_rebalance_total_backlog_size,0}, {ep_tap_replica_count,0}, {ep_tap_replica_qlen,0}, {ep_tap_replica_queue_backfillremaining,0}, {ep_tap_replica_queue_backoff,0}, {ep_tap_replica_queue_drain,0}, {ep_tap_replica_queue_fill,0}, {ep_tap_replica_queue_itemondisk,0}, {ep_tap_replica_total_backlog_size,0}, {ep_tap_total_count,0}, {ep_tap_total_qlen,0}, {ep_tap_total_queue_backfillremaining,0}, {ep_tap_total_queue_backoff,0}, {ep_tap_total_queue_drain,0}, {ep_tap_total_queue_fill,0}, {ep_tap_total_queue_itemondisk,0}, {ep_tap_total_total_backlog_size,0}, {ep_tap_user_count,0}, {ep_tap_user_qlen,0}, {ep_tap_user_queue_backfillremaining,0}, {ep_tap_user_queue_backoff,0}, {ep_tap_user_queue_drain,0}, {ep_tap_user_queue_fill,0}, {ep_tap_user_queue_itemondisk,0}, {ep_tap_user_total_backlog_size,0}, {ep_tmp_oom_errors,0}, {ep_vb_total,1024}, {evictions,0}, {get_hits,0}, {get_misses,0}, {incr_hits,0}, {incr_misses,0}, {mem_used,26968704}, {misses,0}, {ops,0}, {vb_active_eject,0}, {vb_active_ht_memory,25673728}, {vb_active_itm_memory,0}, {vb_active_num,1024}, {vb_active_num_non_resident,0}, {vb_active_ops_create,0}, {vb_active_ops_update,0}, {vb_active_queue_age,0}, {vb_active_queue_drain,0}, {vb_active_queue_fill,0}, {vb_active_queue_size,0}, {vb_pending_curr_items,0}, {vb_pending_eject,0}, {vb_pending_ht_memory,0}, {vb_pending_itm_memory,0}, {vb_pending_num,0}, {vb_pending_num_non_resident,0}, {vb_pending_ops_create,0}, {vb_pending_ops_update,0}, {vb_pending_queue_age,0}, {vb_pending_queue_drain,0}, {vb_pending_queue_fill,0}, {vb_pending_queue_size,0}, {vb_replica_curr_items,0}, {vb_replica_eject,0}, {vb_replica_ht_memory,0}, {vb_replica_itm_memory,0}, {vb_replica_num,0}, {vb_replica_num_non_resident,0}, {vb_replica_ops_create,0}, {vb_replica_ops_update,0}, {vb_replica_queue_age,0}, {vb_replica_queue_drain,0}, {vb_replica_queue_fill,0}, {vb_replica_queue_size,0}, {vb_total_queue_age,0}]}} ** When Server state == {state,"default"} ** Reason for termination == ** {{badmatch,{aborted,{no_exists,'stats_archiver-default-minute'}}}, [{stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} CRASH REPORT <0.1356.0> 2011-11-30 18:53:28 =============================================================================== Crashing process initial_call {stats_archiver,init,['Argument__1']} pid <0.1356.0> registered_name 'stats_archiver-default' error_info {exit, {{badmatch,{aborted,{no_exists,'stats_archiver-default-minute'}}}, [{stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]}, [{gen_server,terminate,6},{proc_lib,init_p_do_apply,3}]} ancestors ['single_bucket_sup-default',<0.598.0>] messages [] links [<0.207.0>,<0.599.0>,<0.53.0>] dictionary [] trap_exit false status running heap_size 2584 stack_size 24 reductions 1610 INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf nodes_wanted -> nodes_wanted: ['ns_1@127.0.0.1'] -> ['ns_1@10.13.41.69'] SUPERVISOR REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== Reporting supervisor {local,'single_bucket_sup-default'} Child process errorContext child_terminated reason {{badmatch,{aborted,{no_exists,'stats_archiver-default-minute'}}}, [{stats_archiver,do_handle_info,2}, {stats_archiver,handle_info,2}, {gen_server,handle_msg,5}, {proc_lib,init_p_do_apply,3}]} pid <0.1356.0> name {stats_archiver,"default"} mfargs {stats_archiver,start_link,["default"]} restart_type permanent shutdown 1000 child_type worker INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf {node,'ns_1@127.0.0.1',config_version} -> {node, 'ns_1@10.13.41.69', config_version}: {1,7,2} -> {1,7,2} INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf {node,'ns_1@127.0.0.1',isasl} -> {node,'ns_1@10.13.41.69', isasl}: [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}] -> [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}] INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf {node,'ns_1@127.0.0.1',membership} -> {node, 'ns_1@10.13.41.69', membership}: active -> active INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf {node,'ns_1@127.0.0.1',moxi} -> {node,'ns_1@10.13.41.69', moxi}: [{port,11211},{verbosity,[]}] -> [{port,11211},{verbosity,[]}] INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf {node,'ns_1@127.0.0.1',ns_log} -> {node, 'ns_1@10.13.41.69', ns_log}: [{filename,"/opt/membase/var/lib/membase/data/ns_log"}] -> [{filename,"/opt/membase/var/lib/membase/data/ns_log"}] INFO REPORT <0.159.0> 2011-11-30 18:53:28 =============================================================================== renaming node conf {node,'ns_1@127.0.0.1',rest} -> {node,'ns_1@10.13.41.69', rest}: [{port,8091},{port_meta,global}] -> [{port,8091},{port_meta,global}] PROGRESS REPORT <0.165.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1456.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.64.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:260: Renamed node. New name is 'ns_1@10.13.41.69'. ERROR REPORT <0.191.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.191.0>:ns_heart:137: Failed to grab system stats: {error,{exit,{aborted,{no_exists,['stats_archiver-@system-minute']}}}} ERROR REPORT <0.191.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.191.0>:ns_heart:156: Failed to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@10.13.41.69'}, {latest,minute}]}}} PROGRESS REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<0.1458.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<0.1463.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.599.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,'single_bucket_sup-default'} started [{pid,<0.1464.0>}, {name,{failover_safeness_level,"default"}}, {mfargs,{failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.1456.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.1456.0>:ns_doctor:82: Got initial status [{'ns_1@10.13.41.69', [{last_heard, {1322,679208, 402490}}, {active_buckets, ["default"]}, {ready_buckets, ["default"]}, {replication, [{"default", 0.0}]}, {memory, [{total, 32211888}, {processes, 12030552}, {processes_used, 12007016}, {system, 20181336}, {atom, 831569}, {atom_used, 830248}, {binary, 1120512}, {code, 7920010}, {ets, 785000}]}, {system_stats, [{cpu_utilization_rate, 0}, {swap_total, 0}, {swap_used, 0}]}, {interesting_stats, []}, {cluster_compatibility_version, 1}, {version, [{os_mon, "2.2.5"}, {mnesia, "4.4.17"}, {kernel, "2.14.3"}, {sasl, "2.1.9.3"}, {ns_server, "1.7.2r-20-g6604356"}, {stdlib, "1.17.3"}]}, {system_arch, "x86_64-unknown-linux-gnu"}, {wall_clock, 337}, {memory_data, {17946181632, 508162048, {<0.545.0>, 2172736}}}, {disk_data, [{"/", 8256952,15}, {"/dev", 8754940,1}, {"/run", 3505116,1}, {"/run/lock", 5120,0}, {"/run/shm", 8762784,0}, {"/mnt", 423135208, 1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17030068 kB\nBuffers: 98268 kB\nCached: 61580 kB\nSwapCached: 0 kB\nActive: 216820 kB\nInactive: 49272 kB\nActive(anon): 106304 kB\nInactive(anon): 140 kB\nActive(file): 110516 kB\nInactive(file): 49132 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 164 kB\nWriteback: 0 kB\nAnonPages: 106232 kB\nMapped: 8908 kB\nShmem: 156 kB\nSlab: 18828 kB\nSReclaimable: 12976 kB\nSUnreclaim: 5852 kB\nKernelStack: 872 kB\nPageTables: 1756 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 332148 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory, 17946181632}, {free_swap, 0}, {total_swap, 0}, {cached_memory, 63057920}, {buffered_memory, 100626432}, {free_memory, 17438789632}, {total_memory, 17946181632}]}, {statistics, [{wall_clock, {331624,1}}, {context_switches, {89358,0}}, {garbage_collection, {15555, 63704093, 0}}, {io, {{input, 9486622}, {output, 6290172}}}, {reductions, {38243947, 2601327}}, {run_queue, 1}, {runtime, {3100, 160}}]}]}] INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: {node,'ns_1@10.13.41.69',rest} -> [{port,8091},{port_meta,global}] INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: {node,'ns_1@10.13.41.69',ns_log} -> [{filename,"/opt/membase/var/lib/membase/data/ns_log"}] INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: {node,'ns_1@10.13.41.69',moxi} -> [{port,11211},{verbosity,[]}] INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: {node,'ns_1@10.13.41.69',membership} -> active INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: {node,'ns_1@10.13.41.69',isasl} -> [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}] INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: {node,'ns_1@10.13.41.69',config_version} -> {1,7,2} INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: nodes_wanted -> ['ns_1@10.13.41.69'] INFO REPORT <0.182.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.182.0>:mb_master:374: List of peers has changed from ['ns_1@127.0.0.1'] to ['ns_1@10.13.41.69'] INFO REPORT <0.182.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.182.0>:mb_master:378: List of new peers has changed from ['ns_1@127.0.0.1'] to [] INFO REPORT <0.1468.0> 2011-11-30 18:53:28 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.1468.0> 2011-11-30 18:53:28 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.13.41.69'], with cookie: sgzcoaesvptwzitb INFO REPORT <0.1468.0> 2011-11-30 18:53:28 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.13.41.69'], with cookie: sgzcoaesvptwzitb INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: {node,'ns_1@10.13.41.69',memcached} -> [{dbdir,"/opt/membase/var/lib/membase/data"}, {port,11210}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== config change: buckets -> [{configs,[{"default", [{sasl_password,[]}, {num_replicas,1}, {ram_quota,104857600}, {auth_type,sasl}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.13.41.69']}, {map,[['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69',undefined], ['ns_1@10.13.41.69'|...], [...]|...]}]}]}] INFO REPORT <0.158.0> 2011-11-30 18:53:28 =============================================================================== ns_node_disco_conf_events config all ERROR REPORT <0.64.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.64.0>:stats_reader:185: Bad replies: [{'ns_1@10.13.41.69', {error, {exit, {aborted, {no_exists, ['stats_archiver-default-minute']}}}}}] INFO REPORT <0.64.0> 2011-11-30 18:53:28 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:298: Posting node info to engage_cluster on {"10.13.41.47", 8091}: {struct, [{availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,8256952}, {usagePercent,15}]}, {struct, [{path,<<"/dev">>}, {sizeKBytes,8754940}, {usagePercent,1}]}, {struct, [{path,<<"/run">>}, {sizeKBytes,3505116}, {usagePercent,1}]}, {struct, [{path,<<"/run/lock">>}, {sizeKBytes,5120}, {usagePercent,0}]}, {struct, [{path,<<"/run/shm">>}, {sizeKBytes,8762784}, {usagePercent,0}]}, {struct, [{path,<<"/mnt">>}, {sizeKBytes,423135208}, {usagePercent,1}]}]}]}}, {memoryQuota,15000}, {storageTotals, {struct, [{ram, {struct, [{usedByData,0}, {total,17946181632}, {quotaTotal,15728640000}, {used,508162048}]}}, {hdd, {struct, [{usedByData,1808192}, {total,8455118848}, {quotaTotal,8455118848}, {used,1268267827}, {free,7186851021}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/opt/membase/var/lib/membase/data">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct,[{cpu_utilization_rate,0},{swap_total,0},{swap_used,0}]}}, {interestingStats,{struct,[]}}, {uptime,<<"337">>}, {memoryTotal,17946181632}, {memoryFree,17438019584}, {mcdMemoryReserved,13691}, {mcdMemoryAllocated,13691}, {otpNode,<<"ns_1@10.13.41.69">>}, {otpCookie,<<"sgzcoaesvptwzitb">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.13.41.69:8091">>}, {clusterCompatibility,1}, {version,<<"1.7.2r-20-g6604356">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]} INFO REPORT <0.175.0> 2011-11-30 18:53:28 =============================================================================== Pushing config INFO REPORT <0.175.0> 2011-11-30 18:53:28 =============================================================================== Pushing config done PROGRESS REPORT <0.1547.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.1548.0>}, {name,ftp_sup}, {mfargs,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1550.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,httpc_profile_sup} started [{pid,<0.1551.0>}, {name,httpc_manager}, {mfargs,{httpc_manager,start_link, [default,only_session_cookies,inets]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] PROGRESS REPORT <0.1549.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,httpc_sup} started [{pid,<0.1550.0>}, {name,httpc_profile_sup}, {mfargs, {httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1549.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,httpc_sup} started [{pid,<0.1552.0>}, {name,httpc_handler_sup}, {mfargs,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1547.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.1549.0>}, {name,httpc_sup}, {mfargs,{httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1547.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.1554.0>}, {name,httpd_sup}, {mfargs,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1547.0> 2011-11-30 18:53:28 =============================================================================== supervisor {local,inets_sup} started [{pid,<0.1555.0>}, {name,tftp_sup}, {mfargs,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.7.0> 2011-11-30 18:53:28 =============================================================================== application inets started_at 'ns_1@10.13.41.69' INFO REPORT <0.64.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:304: Reply from engage_cluster on {"10.13.41.47", 8091}: {ok,{struct,[{<<"availableStorage">>, {struct,[{<<"hdd">>, [{struct,[{<<"path">>,<<"/">>}, {<<"sizeKBytes">>,8256952}, {<<"usagePercent">>,15}]}, {struct,[{<<"path">>,<<"/dev">>}, {<<"sizeKBytes">>,8754940}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/run">>}, {<<"sizeKBytes">>,3505116}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/run/lock">>}, {<<"sizeKBytes">>,5120}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"/run/shm">>}, {<<"sizeKBytes">>,8762784}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"/mnt">>}, {<<"sizeKBytes">>,423135208}, {<<"usagePercent">>,1}]}]}]}}, {<<"memoryQuota">>,16090}, {<<"storageTotals">>, {struct,[{<<"ram">>, {struct,[{<<"usedByData">>,0}, {<<"total">>,17946181632.0}, {<<"quotaTotal">>,16871587840.0}, {<<"used">>,425013248}]}}, {<<"hdd">>, {struct,[{<<"usedByData">>,0}, {<<"total">>,8455118848.0}, {<<"quotaTotal">>,8455118848.0}, {<<"used">>,1268267827}, {<<"free">>,7186851021.0}]}}]}}, {<<"storage">>, {struct,[{<<"ssd">>,[]}, {<<"hdd">>, [{struct,[{<<"path">>, <<"/opt/membase/var/lib/membase/data">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"systemStats">>, {struct,[{<<"cpu_utilization_rate">>,0}, {<<"swap_total">>,0}, {<<"swap_used">>,0}]}}, {<<"interestingStats">>,{struct,[]}}, {<<"uptime">>,<<"351">>}, {<<"memoryTotal">>,17946181632.0}, {<<"memoryFree">>,17521168384.0}, {<<"mcdMemoryReserved">>,13691}, {<<"mcdMemoryAllocated">>,13691}, {<<"otpNode">>,<<"ns_1@10.13.41.47">>}, {<<"otpCookie">>,<<"sgzcoaesvptwzitb">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"hostname">>,<<"10.13.41.47:8091">>}, {<<"clusterCompatibility">>,1}, {<<"version">>,<<"1.7.2r-20-g6604356">>}, {<<"os">>,<<"x86_64-unknown-linux-gnu">>}, {<<"ports">>, {struct,[{<<"proxy">>,11211},{<<"direct">>,11210}]}}]}} INFO REPORT <0.64.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:369: port_please("ns_1", "10.13.41.47") = 21100 INFO REPORT <0.64.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:466: Started node add transaction by adding node 'ns_1@10.13.41.47' to nodes_wanted INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: nodes_wanted -> ['ns_1@10.13.41.47','ns_1@10.13.41.69'] INFO REPORT <0.182.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.182.0>:mb_master:374: List of peers has changed from ['ns_1@10.13.41.69'] to ['ns_1@10.13.41.47', 'ns_1@10.13.41.69'] INFO REPORT <0.182.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.182.0>:mb_master:424: Switching from normal to compatible mode INFO REPORT <0.1562.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.1562.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.13.41.47','ns_1@10.13.41.69'], with cookie: sgzcoaesvptwzitb INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== Pushing config INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== Pushing config done INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',membership} -> inactiveAdded INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== Pushing config INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== Pushing config done INFO REPORT <0.170.0> 2011-11-30 18:53:31 =============================================================================== ns_log: logging ns_node_disco:4:Node 'ns_1@10.13.41.69' saw that node 'ns_1@10.13.41.47' came up. INFO REPORT <0.169.0> 2011-11-30 18:53:31 =============================================================================== Detected a new nodes (['ns_1@10.13.41.47','ns_1@10.13.41.69']). Moving config around. INFO REPORT <0.169.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco_log: nodes changed: ['ns_1@10.13.41.47','ns_1@10.13.41.69'] INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.175.0>:ns_config_rep:186: Replicating config to/from: ['ns_1@10.13.41.47'] INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.175.0>:ns_config_rep:257: Pulling config from: 'ns_1@10.13.41.47' INFO REPORT <0.1562.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.13.41.47','ns_1@10.13.41.69'], with cookie: sgzcoaesvptwzitb INFO REPORT <0.64.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:436: Posting the following to complete_join on "10.13.41.47:8091": {struct, [{<<"targetNode">>,'ns_1@10.13.41.47'}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,8256952}, {usagePercent,15}]}, {struct, [{path,<<"/dev">>}, {sizeKBytes,8754940}, {usagePercent,1}]}, {struct, [{path,<<"/run">>}, {sizeKBytes,3505116}, {usagePercent,1}]}, {struct, [{path,<<"/run/lock">>}, {sizeKBytes,5120}, {usagePercent,0}]}, {struct, [{path,<<"/run/shm">>}, {sizeKBytes,8762784}, {usagePercent,0}]}, {struct, [{path,<<"/mnt">>}, {sizeKBytes,423135208}, {usagePercent,1}]}]}]}}, {memoryQuota,15000}, {storageTotals, {struct, [{ram, {struct, [{usedByData,26968704}, {total,17946181632}, {quotaTotal,15728640000}, {used,508162048}]}}, {hdd, {struct, [{usedByData,1808192}, {total,8455118848}, {quotaTotal,8455118848}, {used,1268267827}, {free,7186851021}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/opt/membase/var/lib/membase/data">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct,[{cpu_utilization_rate,0},{swap_total,0},{swap_used,0}]}}, {interestingStats,{struct,[]}}, {uptime,<<"337">>}, {memoryTotal,17946181632}, {memoryFree,17438019584}, {mcdMemoryReserved,13691}, {mcdMemoryAllocated,13691}, {otpNode,<<"ns_1@10.13.41.69">>}, {otpCookie,<<"sgzcoaesvptwzitb">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {hostname,<<"10.13.41.69:8091">>}, {clusterCompatibility,1}, {version,<<"1.7.2r-20-g6604356">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports,{struct,[{proxy,11211},{direct,11210}]}}]} INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== ns_log: logging ns_config:3:Conflicting configuration changes to field nodes_wanted: [{'_vclock',[{'ns_1@10.13.41.47',{1,63489898411}}]},'ns_1@10.13.41.47'] and [{'_vclock',[{'ns_1@10.13.41.69',{2,63489898411}}]}, 'ns_1@10.13.41.47','ns_1@10.13.41.69'], choosing the former. INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.175.0>:ns_config_rep:191: config pull_and_push done. INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: nodes_wanted -> ['ns_1@10.13.41.47'] INFO REPORT <0.182.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.182.0>:mb_master:374: List of peers has changed from ['ns_1@10.13.41.47', 'ns_1@10.13.41.69'] to ['ns_1@10.13.41.47'] INFO REPORT <0.182.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.182.0>:mb_master:178: Master has been demoted. Peers = ['ns_1@10.13.41.47'] INFO REPORT <0.1576.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.1576.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.13.41.47'], with cookie: sgzcoaesvptwzitb INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',config_version} -> {1,7,2} INFO REPORT <0.1576.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.13.41.47'], with cookie: sgzcoaesvptwzitb INFO REPORT <0.1576.0> 2011-11-30 18:53:31 =============================================================================== ns_1@10.13.41.69:<0.1576.0>:ns_node_disco:189: We've been shunned (nodes_wanted = ['ns_1@10.13.41.47']). Leaving cluster. INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',isasl} -> [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}] INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',membership} -> active INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',memcached} -> [{port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',moxi} -> [{port,11211},{verbosity,[]}] INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',ns_log} -> [{filename,"/opt/membase/var/lib/membase/data/ns_log"}] INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== config change: {node,'ns_1@10.13.41.47',rest} -> [{port,8091},{port_meta,global}] INFO REPORT <0.158.0> 2011-11-30 18:53:31 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== Pushing config INFO REPORT <0.175.0> 2011-11-30 18:53:31 =============================================================================== Pushing config done INFO REPORT <0.64.0> 2011-11-30 18:53:32 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:442: Reply from complete_join on "10.13.41.47:8091": {ok,[]} INFO REPORT <0.64.0> 2011-11-30 18:53:32 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:92: add_node("10.13.41.47", 8091, ..) -> {ok, 'ns_1@10.13.41.47'} INFO REPORT <0.64.0> 2011-11-30 18:53:32 =============================================================================== ns_log: logging ns_cluster:1:Node 'ns_1@10.13.41.69' is leaving cluster. INFO REPORT <0.598.0> 2011-11-30 18:53:32 =============================================================================== ns_1@10.13.41.69:<0.598.0>:single_bucket_sup:27: Delegating exit {'EXIT', <0.209.0>, shutdown} to child supervisor: <0.599.0> INFO REPORT <0.600.0> 2011-11-30 18:53:32 =============================================================================== ns_log: logging ns_memcached:2:Shutting down bucket "default" on 'ns_1@10.13.41.69' for server shutdown INFO REPORT <0.598.0> 2011-11-30 18:53:33 =============================================================================== ns_1@10.13.41.69:<0.598.0>:single_bucket_sup:24: per-bucket supervisor for "default" died with reason shutdown INFO REPORT <0.545.0> 2011-11-30 18:53:33 =============================================================================== menelaus_web streaming socket closed by client INFO REPORT <0.693.0> 2011-11-30 18:53:33 =============================================================================== moxi<0.693.0>: EOL on stdin. Exiting ERROR REPORT <0.191.0> 2011-11-30 18:53:33 =============================================================================== ns_1@10.13.41.69:<0.191.0>:ns_heart:137: Failed to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@10.13.41.69'}, {latest,"minute"}]}}} ERROR REPORT <0.191.0> 2011-11-30 18:53:33 =============================================================================== ns_1@10.13.41.69:<0.191.0>:ns_heart:156: Failed to get stats for bucket: "default": {'EXIT',{noproc,{gen_server,call, [{'stats_reader-default','ns_1@10.13.41.69'}, {latest,minute}]}}} ERROR REPORT <0.1278.0> 2011-11-30 18:53:33 =============================================================================== ns_1@10.13.41.69:<0.1278.0>:stats_reader:191: Some nodes didn't respond: ['ns_1@10.13.41.47'] ERROR REPORT <0.1278.0> 2011-11-30 18:53:33 =============================================================================== ns_1@10.13.41.69:<0.1278.0>:stats_reader:191: Some nodes didn't respond: ['ns_1@10.13.41.69'] INFO REPORT <0.206.0> 2011-11-30 18:53:34 =============================================================================== memcached<0.206.0>: EOL on stdin. Exiting INFO REPORT <0.7.0> 2011-11-30 18:53:34 =============================================================================== application mnesia exited stopped type temporary PROGRESS REPORT <0.1601.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<0.1602.0>}, {name,mnesia_event}, {mfargs,{mnesia_sup,start_event,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1604.0>}, {name,mnesia_monitor}, {mfargs,{mnesia_monitor,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1605.0>}, {name,mnesia_subscr}, {mfargs,{mnesia_subscr,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1606.0>}, {name,mnesia_locker}, {mfargs,{mnesia_locker,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1607.0>}, {name,mnesia_recover}, {mfargs,{mnesia_recover,start,[]}}, {restart_type,permanent}, {shutdown,180000}, {child_type,worker}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1608.0>}, {name,mnesia_tm}, {mfargs,{mnesia_tm,start,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1609.0>}, {name,mnesia_checkpoint_sup}, {mfargs,{mnesia_checkpoint_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1610.0>}, {name,mnesia_snmp_sup}, {mfargs,{mnesia_snmp_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1611.0>}, {name,mnesia_controller}, {mfargs,{mnesia_controller,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1603.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_kernel_sup} started [{pid,<0.1612.0>}, {name,mnesia_late_loader}, {mfargs,{mnesia_late_loader,start,[]}}, {restart_type,permanent}, {shutdown,3000}, {child_type,worker}] PROGRESS REPORT <0.1601.0> 2011-11-30 18:53:34 =============================================================================== supervisor {local,mnesia_sup} started [{pid,<0.1603.0>}, {name,mnesia_kernel_sup}, {mfargs,{mnesia_kernel_sup,start,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.7.0> 2011-11-30 18:53:34 =============================================================================== application mnesia started_at 'ns_1@10.13.41.69' INFO REPORT <0.67.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:409: Committed schema to disk. INFO REPORT <0.67.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_controller terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_tm terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_recover terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:166: Info from Mnesia: mnesia_locker terminated: shutdown INFO REPORT <0.67.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:169: Saw Mnesia go down on 'ns_1@10.13.41.69' INFO REPORT <0.67.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,schema, [{name,schema}, {type,set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,false}, {record_name,schema}, {attributes,[table,cstruct]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679214,71685},'ns_1@10.13.41.69'}}, {version,{{3,0},{'ns_1@10.13.41.69',{1322,679214,72236}}}}]}, {tid,3,<0.1615.0>}} INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config_default:226: Upgrading config from 1.6 to 1.7 INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config:413: Upgrading config by changes: [{set,{node,'ns_1@10.13.41.69',config_version},{1,7}}, {set, {node,'ns_1@10.13.41.69',memcached}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}]}, {set, {node,'ns_1@10.13.41.69',ns_log}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {filename,"/opt/membase/var/lib/membase/data/ns_log"}]}, {set,port_servers, [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {set, {node,'ns_1@10.13.41.69',isasl}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {path,"/opt/membase/var/lib/membase/data/isasl.pw"}]}, {set,directory,"/opt/membase/var/lib/membase/config"}] INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config_default:262: Upgrading config from 1.7 to 1.7.1 INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config:413: Upgrading config by changes: [{set,{node,'ns_1@10.13.41.69',config_version},{1,7,1}}, {set,email_alerts, [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small]}]}, {set,auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}] INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config_default:273: Upgrading config from 1.7.1 to 1.7.2 INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config_default:300: Setting global and per-node rest port to 8091 INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config:413: Upgrading config by changes: [{set,{node,'ns_1@10.13.41.69',config_version},{1,7,2}}, {set,rest,[{port,8091}]}, {set,{node,'ns_1@10.13.41.69',rest},[{port,8091},{port_meta,global}]}] INFO REPORT <0.159.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.159.0>:ns_config:447: Upgraded initial config: {config, {full,"/opt/membase/etc/membase/config",undefined,ns_config_default}, [[], [{directory,"/opt/membase/var/lib/membase/config"}, {nodes_wanted,['ns_1@10.13.41.69']}, {{node,'ns_1@10.13.41.69',membership},active}, {rest,[{port,8091}]}, {{node,'ns_1@10.13.41.69',rest},[{port,8091},{port_meta,global}]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@10.13.41.69',isasl}, [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}]}, {{node,'ns_1@10.13.41.69',memcached}, [{port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}]}, {memory_quota,16090}, {buckets,[{configs,[]}]}, {{node,'ns_1@10.13.41.69',moxi},[{port,11211},{verbosity,[]}]}, {port_servers, [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@10.13.41.69',ns_log}, [{filename,"/opt/membase/var/lib/membase/data/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small]}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}]], [[{directory,"/opt/membase/var/lib/membase/config"}, {{node,'ns_1@10.13.41.69',config_version},{1,7,2}}, {auto_failover_cfg, [{enabled,false},{timeout,30},{max_nodes,1},{count,0}]}, {buckets,[{configs,[]}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server, [{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small]}]}, {memory_quota,16090}, {nodes_wanted,['ns_1@10.13.41.69']}, {port_servers, [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {replication,[{enabled,true}]}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@10.13.41.69',isasl}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {path,"/opt/membase/var/lib/membase/data/isasl.pw"}]}, {{node,'ns_1@10.13.41.69',membership},active}, {{node,'ns_1@10.13.41.69',memcached}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}]}, {{node,'ns_1@10.13.41.69',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@10.13.41.69',ns_log}, [{'_vclock',[{'ns_1@10.13.41.69',{1,63489898414}}]}, {filename,"/opt/membase/var/lib/membase/data/ns_log"}]}, {{node,'ns_1@10.13.41.69',rest},[{port,8091},{port_meta,global}]}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} INFO REPORT <0.158.0> 2011-11-30 18:53:34 =============================================================================== config change: rest -> [{port,8091}] INFO REPORT <0.158.0> 2011-11-30 18:53:34 =============================================================================== config change: {node,'ns_1@10.13.41.69',rest} -> [{port,8091},{port_meta,global}] INFO REPORT <0.158.0> 2011-11-30 18:53:34 =============================================================================== config change: nodes_wanted -> ['ns_1@10.13.41.69'] INFO REPORT <0.158.0> 2011-11-30 18:53:34 =============================================================================== config change: otp -> [{cookie,rizwejgdhowfisdt}] INFO REPORT <0.64.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_storage_conf:337: Result of deleting db directory: "/opt/membase/var/lib/membase/data/default-data": ok INFO REPORT <0.64.0> 2011-11-30 18:53:34 =============================================================================== ns_1@10.13.41.69:<0.64.0>:ns_cluster:131: Leaving cluster PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1635.0>}, {name,ns_log}, {mfargs,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1636.0>}, {name,ns_log_events}, {mfargs,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1637.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.1638.0>}, {name,ns_node_disco_events}, {mfargs,{gen_event,start_link,[{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.1639.0> 2011-11-30 18:53:35 =============================================================================== Initting ns_node_disco with [] INFO REPORT <0.1640.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.1640.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.13.41.69'], with cookie: rizwejgdhowfisdt INFO REPORT <0.1640.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.13.41.69'], with cookie: rizwejgdhowfisdt PROGRESS REPORT <0.1637.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.1639.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1637.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.1642.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1637.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.1643.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.1638.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco_log: nodes changed: ['ns_1@10.13.41.69'] INFO REPORT <0.1644.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1644.0>:ns_config_rep:56: init pulling INFO REPORT <0.1644.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1644.0>:ns_config_rep:58: init pushing INFO REPORT <0.1644.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1644.0>:ns_config_rep:62: init reannouncing INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco_conf_events config on otp INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: otp -> [{cookie,rizwejgdhowfisdt}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco_conf_events config on nodes_wanted INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: nodes_wanted -> ['ns_1@10.13.41.69'] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: {node,'ns_1@10.13.41.69',rest} -> [{port,8091},{port_meta,global}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: rest -> [{port,8091}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: directory -> "/opt/membase/var/lib/membase/config" INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: {node,'ns_1@10.13.41.69',config_version} -> {1,7,2} INFO REPORT <0.1646.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: auto_failover_cfg -> [{enabled,false},{timeout,30},{max_nodes,1},{count,0}] INFO REPORT <0.1648.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco cookie_sync INFO REPORT <0.1646.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.13.41.69'], with cookie: rizwejgdhowfisdt INFO REPORT <0.1648.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco: nodes_wanted updated: ['ns_1@10.13.41.69'], with cookie: rizwejgdhowfisdt INFO REPORT <0.1646.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.13.41.69'], with cookie: rizwejgdhowfisdt INFO REPORT <0.1648.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco: nodes_wanted pong: ['ns_1@10.13.41.69'], with cookie: rizwejgdhowfisdt INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: buckets -> [{configs,[]}] PROGRESS REPORT <0.1637.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_node_disco_sup} started [{pid,<0.1644.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"membase@localhost"}, {enabled,true}, {email_server,[{user,[]}, {pass,[]}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small]}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1637.0>}, {name,ns_node_disco_sup}, {mfargs,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: memory_quota -> 16090 PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1649.0>}, {name,ns_tick_event}, {mfargs,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.162.0> 2011-11-30 18:53:35 =============================================================================== Writing isasl passwd file: "/opt/membase/var/lib/membase/data/isasl.pw" INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: port_servers -> [{moxi,"/opt/membase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{rest,port}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so","-p", {"~B",[port]}, "-E","/opt/membase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: replication -> [{enabled,true}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: rest_creds -> ******** INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: {node,'ns_1@10.13.41.69',isasl} -> [{path,"/opt/membase/var/lib/membase/data/isasl.pw"}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: {node,'ns_1@10.13.41.69',membership} -> active INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: {node,'ns_1@10.13.41.69',memcached} -> [{port,11210}, {dbdir,"/opt/membase/var/lib/membase/data"}, {admin_user,"_admin"}, {admin_pass,"_admin"}, {bucket_engine,"/opt/membase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/membase/lib/memcached/ep.so"}, {initfile,"/opt/membase/etc/membase/init.sql"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false;shardpattern=%d/%b-%i.mb;db_strategy=multiMTVBDB"}]}, {memcached, [{engine,"/opt/membase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {verbosity,[]}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: {node,'ns_1@10.13.41.69',moxi} -> [{port,11211},{verbosity,[]}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== config change: {node,'ns_1@10.13.41.69',ns_log} -> [{filename,"/opt/membase/var/lib/membase/data/ns_log"}] INFO REPORT <0.158.0> 2011-11-30 18:53:35 =============================================================================== ns_node_disco_conf_events config all INFO REPORT <0.1644.0> 2011-11-30 18:53:35 =============================================================================== Pushing config INFO REPORT <0.1650.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1650.0>:mb_master:97: I'm the only node, so I'm the master. INFO REPORT <0.1644.0> 2011-11-30 18:53:35 =============================================================================== Pushing config done INFO REPORT <0.1652.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1652.0>:misc:785: start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.1653.0> on 'ns_1@10.13.41.69' PROGRESS REPORT <0.1652.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<0.1653.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] INFO REPORT <0.1652.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1652.0>:misc:785: start_singleton(gen_server, ns_tick, [], []): started as <0.1655.0> on 'ns_1@10.13.41.69' PROGRESS REPORT <0.1652.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<0.1655.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.1656.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1656.0>:auto_failover:120: init auto_failover. INFO REPORT <0.1652.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1652.0>:misc:785: start_singleton(gen_server, auto_failover, [], []): started as <0.1656.0> on 'ns_1@10.13.41.69' PROGRESS REPORT <0.1652.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,mb_master_sup} started [{pid,<0.1656.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1650.0>}, {name,mb_master}, {mfargs,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1657.0>}, {name,buckets_events}, {mfargs,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1658.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<0.1659.0>}, {name,ns_mail}, {mfargs,{ns_mail,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1658.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_mail_sup} started [{pid,<0.1660.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.1636.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1636.0>:ns_mail_log:45: ns_mail_log started up PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1658.0>}, {name,ns_mail_sup}, {mfargs,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1661.0>}, {name,ns_heart}, {mfargs,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1662.0>}, {name,ns_doctor}, {mfargs,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1664.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.1666.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <0.1664.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.1668.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <0.1664.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.1669.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] PROGRESS REPORT <0.1664.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,menelaus_sup} started [{pid,<0.1670.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] INFO REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== ns_log: logging menelaus_sup:1:Membase Server has started on web port 8091 on node 'ns_1@10.13.41.69'. PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1664.0>}, {name,menelaus}, {mfargs,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] ERROR REPORT <0.1661.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1661.0>:ns_heart:137: Failed to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@10.13.41.69'}, {latest,"minute"}]}}} INFO REPORT <0.1662.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.1662.0>:ns_doctor:82: Got initial status [{'ns_1@10.13.41.69', [{last_heard, {1322,679215, 119273}}, {active_buckets, []}, {ready_buckets, []}, {replication, []}, {memory, [{total, 27821904}, {processes, 7218208}, {processes_used, 7177136}, {system, 20603696}, {atom, 900817}, {atom_used, 876987}, {binary, 964976}, {code, 8612851}, {ets, 547224}]}, {system_stats, [{cpu_utilization_rate, 0}, {swap_total, 0}, {swap_used, 0}]}, {interesting_stats, []}, {cluster_compatibility_version, 1}, {version, [{os_mon, "2.2.5"}, {mnesia, "4.4.17"}, {inets, "5.5.2"}, {kernel, "2.14.3"}, {sasl, "2.1.9.3"}, {ns_server, "1.7.2r-20-g6604356"}, {stdlib, "1.17.3"}]}, {system_arch, "x86_64-unknown-linux-gnu"}, {wall_clock, 344}, {memory_data, {17946181632, 508162048, {<0.545.0>, 2172736}}}, {disk_data, [{"/", 8256952,15}, {"/dev", 8754940,1}, {"/run", 3505116,1}, {"/run/lock", 5120,0}, {"/run/shm", 8762784,0}, {"/mnt", 423135208, 1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17098376 kB\nBuffers: 98272 kB\nCached: 57968 kB\nSwapCached: 0 kB\nActive: 151492 kB\nInactive: 46848 kB\nActive(anon): 42116 kB\nInactive(anon): 140 kB\nActive(file): 109376 kB\nInactive(file): 46708 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 252 kB\nWriteback: 0 kB\nAnonPages: 42048 kB\nMapped: 6224 kB\nShmem: 156 kB\nSlab: 18860 kB\nSReclaimable: 13120 kB\nSUnreclaim: 5740 kB\nKernelStack: 704 kB\nPageTables: 1380 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 113016 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory, 17946181632}, {free_swap, 0}, {total_swap, 0}, {cached_memory, 59359232}, {buffered_memory, 100630528}, {free_memory, 17508737024}, {total_memory, 17946181632}]}, {statistics, [{wall_clock, {344460, 1864}}, {context_switches, {101777,0}}, {garbage_collection, {17619, 72179732, 0}}, {io, {{input, 12311899}, {output, 9779485}}}, {reductions, {41597063, 564456}}, {run_queue, 0}, {runtime, {3460, 90}}]}]}] PROGRESS REPORT <0.1671.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<0.1672.0>}, {name,ns_port_init}, {mfargs,{ns_port_init,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] INFO REPORT <0.1673.0> 2011-11-30 18:53:35 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <0.1671.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<0.1673.0>}, {name, {moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]}}, {mfargs, {supervisor_cushion,start_link, [moxi,5000,ns_port_server,start_link, [moxi,"/opt/membase/bin/moxi", ["-Z", "port_listen=11211,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", "-z", "url=http://127.0.0.1:8091/pools/default/saslBucketsStreaming", "-p","0","-Y","y","-O","stderr",[]], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",[]}, {"MOXI_SASL_PLAIN_PWD",[]}]}, use_stdio,exit_status,port_server_send_eol, stderr_to_stdout,stream]]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] INFO REPORT <0.1675.0> 2011-11-30 18:53:35 =============================================================================== starting ns_port_server with delay of 5000 PROGRESS REPORT <0.1671.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_port_sup} started [{pid,<0.1675.0>}, {name, {memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so", "-p","11210","-E", "/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/opt/membase/var/lib/membase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}}, {mfargs, {supervisor_cushion,start_link, [memcached,5000,ns_port_server,start_link, [memcached,"/opt/membase/bin/memcached", ["-X","/opt/membase/lib/memcached/stdin_term_handler.so", "-p","11210","-E", "/opt/membase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", "admin=_admin;default_bucket_name=default;auto_create=false", []], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE", "/opt/membase/var/lib/membase/data/isasl.pw"}, {"ISASL_DB_CHECK_TIME","1"}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1671.0>}, {name,ns_port_sup}, {mfargs,{ns_port_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,60000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1677.0>}, {name,ns_stats_event}, {mfargs,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1678.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1679.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1680.0>}, {name,system_stats_collector}, {mfargs,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-minute' PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1682.0>}, {name,{stats_archiver,"@system"}}, {mfargs,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1685.0>}, {name,{stats_reader,"@system"}}, {mfargs,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1686.0>}, {name,ns_moxi_sup_work_queue}, {mfargs,{work_queue,start_link,[ns_moxi_sup_work_queue]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] PROGRESS REPORT <0.1634.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_sup} started [{pid,<0.1687.0>}, {name,ns_moxi_sup}, {mfargs,{ns_moxi_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] PROGRESS REPORT <0.54.0> 2011-11-30 18:53:35 =============================================================================== supervisor {local,ns_server_cluster_sup} started [{pid,<0.1634.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-minute', [{name,'stats_archiver-@system-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,131210},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,4,<0.1684.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-minute', [{name,'stats_archiver-@system-minute'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,131210},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,4,<0.1684.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-hour' INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-hour', [{name,'stats_archiver-@system-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,140789},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,5,<0.1694.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-hour', [{name,'stats_archiver-@system-hour'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,140789},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,5,<0.1694.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-day' INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-day', [{name,'stats_archiver-@system-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,146399},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,6,<0.1701.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-day', [{name,'stats_archiver-@system-day'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,146399},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,6,<0.1701.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-week' INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-week', [{name,'stats_archiver-@system-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,151700},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,7,<0.1708.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-week', [{name,'stats_archiver-@system-week'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,151700},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,7,<0.1708.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-month' INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-month', [{name,'stats_archiver-@system-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,157073},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,8,<0.1715.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-month', [{name,'stats_archiver-@system-month'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,157073},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,8,<0.1715.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:109: Creating table 'stats_archiver-@system-year' INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-year', [{name,'stats_archiver-@system-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,162573},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,9,<0.1722.0>}} INFO REPORT <0.67.0> 2011-11-30 18:53:35 =============================================================================== ns_1@10.13.41.69:<0.67.0>:mb_mnesia:199: Mnesia table event: {write,{schema,'stats_archiver-@system-year', [{name,'stats_archiver-@system-year'}, {type,ordered_set}, {ram_copies,[]}, {disc_copies,['ns_1@10.13.41.69']}, {disc_only_copies,[]}, {load_order,0}, {access_mode,read_write}, {index,[]}, {snmp,[]}, {local_content,true}, {record_name,stat_entry}, {attributes,[timestamp,values]}, {user_properties,[]}, {frag_properties,[]}, {cookie,{{1322,679215,162573},'ns_1@10.13.41.69'}}, {version,{{2,0},[]}}]}, {tid,9,<0.1722.0>}} INFO REPORT <0.1662.0> 2011-11-30 18:54:35 =============================================================================== ns_1@10.13.41.69:<0.1662.0>:ns_doctor:86: Current node statuses: [{'ns_1@10.13.41.69', [{last_heard,{1322,679270,118990}}, {active_buckets,[]}, {ready_buckets,[]}, {replication,[]}, {memory, [{total,29154696}, {processes,8446336}, {processes_used,8417432}, {system,20708360}, {atom,900817}, {atom_used,876987}, {binary,965488}, {code,8612851}, {ets,648320}]}, {system_stats,[{cpu_utilization_rate,0.0},{swap_total,0},{swap_used,0}]}, {interesting_stats,[]}, {cluster_compatibility_version,1}, {version, [{os_mon,"2.2.5"}, {mnesia,"4.4.17"}, {inets,"5.5.2"}, {kernel,"2.14.3"}, {sasl,"2.1.9.3"}, {ns_server,"1.7.2r-20-g6604356"}, {stdlib,"1.17.3"}]}, {system_arch,"x86_64-unknown-linux-gnu"}, {wall_clock,399}, {memory_data,{17946181632,438226944,{<0.6.0>,1113888}}}, {disk_data, [{"/",8256952,15}, {"/dev",8754940,1}, {"/run",3505116,1}, {"/run/lock",5120,0}, {"/run/shm",8762784,0}, {"/mnt",423135208,1}]}, {meminfo, <<"MemTotal: 17525568 kB\nMemFree: 17097580 kB\nBuffers: 98272 kB\nCached: 58076 kB\nSwapCached: 0 kB\nActive: 151796 kB\nInactive: 46904 kB\nActive(anon): 42376 kB\nInactive(anon): 140 kB\nActive(file): 109420 kB\nInactive(file): 46764 kB\nUnevictable: 0 kB\nMlocked: 0 kB\nSwapTotal: 0 kB\nSwapFree: 0 kB\nDirty: 40 kB\nWriteback: 0 kB\nAnonPages: 42360 kB\nMapped: 7412 kB\nShmem: 156 kB\nSlab: 18900 kB\nSReclaimable: 13112 kB\nSUnreclaim: 5788 kB\nKernelStack: 840 kB\nPageTables: 1604 kB\nNFS_Unstable: 0 kB\nBounce: 0 kB\nWritebackTmp: 0 kB\nCommitLimit: 8762784 kB\nCommitted_AS: 230736 kB\nVmallocTotal: 34359738367 kB\nVmallocUsed: 64000 kB\nVmallocChunk: 34359674252 kB\nHardwareCorrupted: 0 kB\nAnonHugePages: 0 kB\nHugePages_Total: 0\nHugePages_Free: 0\nHugePages_Rsvd: 0\nHugePages_Surp: 0\nHugepagesize: 2048 kB\nDirectMap4k: 17920000 kB\nDirectMap2M: 0 kB\n">>}, {system_memory_data, [{system_total_memory,17946181632}, {free_swap,0}, {total_swap,0}, {cached_memory,59469824}, {buffered_memory,100630528}, {free_memory,17507921920}, {total_memory,17946181632}]}, {statistics, [{wall_clock,{389460,0}}, {context_switches,{107167,0}}, {garbage_collection,{18961,74197921,0}}, {io,{{input,12370959},{output,9887658}}}, {reductions,{42366461,203414}}, {run_queue,0}, {runtime,{3610,50}}]}]}]